hexsha stringlengths 40 40 | size int64 7 1.04M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 247 | max_stars_repo_name stringlengths 4 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 247 | max_issues_repo_name stringlengths 4 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 247 | max_forks_repo_name stringlengths 4 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.04M | avg_line_length float64 1.77 618k | max_line_length int64 1 1.02M | alphanum_fraction float64 0 1 | original_content stringlengths 7 1.04M | filtered:remove_function_no_docstring int64 -102 942k | filtered:remove_class_no_docstring int64 -354 977k | filtered:remove_delete_markers int64 0 60.1k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
78828049aba38b64bd6be75f136de208bad3519a | 3,750 | py | Python | src/TheLanguage/Grammars/v0_0_1/Statements/TypeAliasStatement.py | davidbrownell/DavidBrownell_TheLanguage | 07170b448a0ebd7fa2325c9ccd4cefdb3cf7eb98 | [
"BSL-1.0"
] | null | null | null | src/TheLanguage/Grammars/v0_0_1/Statements/TypeAliasStatement.py | davidbrownell/DavidBrownell_TheLanguage | 07170b448a0ebd7fa2325c9ccd4cefdb3cf7eb98 | [
"BSL-1.0"
] | null | null | null | src/TheLanguage/Grammars/v0_0_1/Statements/TypeAliasStatement.py | davidbrownell/DavidBrownell_TheLanguage | 07170b448a0ebd7fa2325c9ccd4cefdb3cf7eb98 | [
"BSL-1.0"
] | 1 | 2021-06-18T18:58:57.000Z | 2021-06-18T18:58:57.000Z | # ----------------------------------------------------------------------
# |
# | TypeAliasStatement.py
# |
# | David Brownell <db@DavidBrownell.com>
# | 2021-10-14 13:22:30
# |
# ----------------------------------------------------------------------
# |
# | Copyright David Brownell 2021
# | Distributed under the Boost Software License, Version 1.0. See
# | accompanying file LICENSE_1_0.txt or copy at
# | http://www.boost.org/LICENSE_1_0.txt.
# |
# ----------------------------------------------------------------------
"""Contains the TypeAliasStatement object"""
import os
from typing import Callable, cast, Tuple, Union
import CommonEnvironment
from CommonEnvironment import Interface
from CommonEnvironmentEx.Package import InitRelativeImports
# ----------------------------------------------------------------------
_script_fullpath = CommonEnvironment.ThisFullpath()
_script_dir, _script_name = os.path.split(_script_fullpath)
# ----------------------------------------------------------------------
with InitRelativeImports():
from ..Common import Tokens as CommonTokens
from ...GrammarInfo import AST, DynamicPhrasesType, GrammarPhrase, ParserInfo
from ....Lexer.Phrases.DSL import (
CreatePhrase,
ExtractDynamic,
ExtractSequence,
ExtractToken,
)
from ....Parser.Parser import CreateParserRegions, GetParserInfo
from ....Parser.Statements.TypeAliasStatementParserInfo import (
TypeAliasStatementParserInfo,
TypeParserInfo,
)
# ----------------------------------------------------------------------
class TypeAliasStatement(GrammarPhrase):
"""\
Create a new type name.
'using' <name> '=' <type>
Examples:
using PositiveInt = Int<min_value=0>
"""
PHRASE_NAME = "Type Alias Statement"
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
@staticmethod
@Interface.override
| 30.737705 | 82 | 0.436533 | # ----------------------------------------------------------------------
# |
# | TypeAliasStatement.py
# |
# | David Brownell <db@DavidBrownell.com>
# | 2021-10-14 13:22:30
# |
# ----------------------------------------------------------------------
# |
# | Copyright David Brownell 2021
# | Distributed under the Boost Software License, Version 1.0. See
# | accompanying file LICENSE_1_0.txt or copy at
# | http://www.boost.org/LICENSE_1_0.txt.
# |
# ----------------------------------------------------------------------
"""Contains the TypeAliasStatement object"""
import os
from typing import Callable, cast, Tuple, Union
import CommonEnvironment
from CommonEnvironment import Interface
from CommonEnvironmentEx.Package import InitRelativeImports
# ----------------------------------------------------------------------
_script_fullpath = CommonEnvironment.ThisFullpath()
_script_dir, _script_name = os.path.split(_script_fullpath)
# ----------------------------------------------------------------------
with InitRelativeImports():
from ..Common import Tokens as CommonTokens
from ...GrammarInfo import AST, DynamicPhrasesType, GrammarPhrase, ParserInfo
from ....Lexer.Phrases.DSL import (
CreatePhrase,
ExtractDynamic,
ExtractSequence,
ExtractToken,
)
from ....Parser.Parser import CreateParserRegions, GetParserInfo
from ....Parser.Statements.TypeAliasStatementParserInfo import (
TypeAliasStatementParserInfo,
TypeParserInfo,
)
# ----------------------------------------------------------------------
class TypeAliasStatement(GrammarPhrase):
"""\
Create a new type name.
'using' <name> '=' <type>
Examples:
using PositiveInt = Int<min_value=0>
"""
PHRASE_NAME = "Type Alias Statement"
# ----------------------------------------------------------------------
def __init__(self):
super(TypeAliasStatement, self).__init__(
DynamicPhrasesType.Statements,
CreatePhrase(
name=self.PHRASE_NAME,
item=[
# 'using'
"using",
# <name>
CommonTokens.TypeName,
# '='
"=",
# <type>
DynamicPhrasesType.Types,
CommonTokens.Newline,
],
),
)
# ----------------------------------------------------------------------
@staticmethod
@Interface.override
def ExtractParserInfo(
node: AST.Node,
) -> Union[
None,
ParserInfo,
Callable[[], ParserInfo],
Tuple[ParserInfo, Callable[[], ParserInfo]],
]:
# ----------------------------------------------------------------------
def Impl():
nodes = ExtractSequence(node)
assert len(nodes) == 5
# <name>
name_leaf = cast(AST.Leaf, nodes[1])
name_info = cast(str, ExtractToken(name_leaf))
# <type>
type_node = cast(AST.Node, ExtractDynamic(cast(AST.Node, nodes[3])))
type_info = cast(TypeParserInfo, GetParserInfo(type_node))
return TypeAliasStatementParserInfo(
CreateParserRegions(node, name_leaf, type_node), # type: ignore
name_info,
type_info,
)
# ----------------------------------------------------------------------
return Impl
| 1,529 | 0 | 54 |
ca2f427df7f70436052f6a3dad6d5dd3ff36e62f | 4,832 | py | Python | Basics/E02_Elements/E20_UseBorders.py | freder/PageBotExamples | eb4ced53a673b9376e8357afa9ea0795b022b13c | [
"Ruby",
"MIT"
] | 5 | 2020-06-20T22:01:23.000Z | 2021-08-06T04:39:50.000Z | Basics/E02_Elements/E20_UseBorders.py | freder/PageBotExamples | eb4ced53a673b9376e8357afa9ea0795b022b13c | [
"Ruby",
"MIT"
] | 5 | 2020-05-17T09:32:27.000Z | 2021-03-15T19:45:52.000Z | Basics/E02_Elements/E20_UseBorders.py | freder/PageBotExamples | eb4ced53a673b9376e8357afa9ea0795b022b13c | [
"Ruby",
"MIT"
] | 2 | 2021-02-25T19:07:45.000Z | 2022-01-09T21:14:06.000Z | #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
# -----------------------------------------------------------------------------
#
# P A G E B O T E X A M P L E S
#
# www.pagebot.io
# Licensed under MIT conditions
#
# -----------------------------------------------------------------------------
#
# E20_UseBorders.py
#
from random import random
'''
# FIXME: shouldn't import DrawBot.
from drawBot import Variable
from drawBot.misc import DrawBotError
'''
from pagebot import getContext
from pagebot.constants import (A4, CENTER,TOP, BOTTOM, INLINE, OUTLINE, ONLINE,
EXPORT)
from pagebot.elements import *
from pagebot.document import Document
from pagebot.style import getRootStyle
from pagebot.toolbox.color import color, noColor
from pagebot.toolbox.units import pt
from pagebot.toolbox.transformer import path2FileName
ViewPadding = 64
PageSize = 500
GUTTER = 24 # Distance between the squares.
SQUARE = 3 * GUTTER # Size of the squares
DashWhite = 4
DashBlack = 4
LineType = ONLINE
FILENAME = path2FileName(__file__)
def draw(contextName):
"""Make a new document, using the rs as root style."""
exportPath = '%s/%s-%s.pdf' % (EXPORT, FILENAME, contextName)
context = getContext(contextName)
#W = H = 120 # Get the standard a4 width and height in points.
W = H = PageSize
# Hard coded SQUARE and GUTTE, just for simple demo, instead of filling
# padding an columns in the root style. Page size decides on the amount
# squares that is visible. Page padding is centered then.
sqx = int(W/(SQUARE + GUTTER)) # Whole amount of squares that fit on the page.
sqy = int(H/(SQUARE + GUTTER))
# Calculate centered paddings for the amount of fitting squares.
# Set values in the rootStyle, so we can compare with column calculated square position and sizes.
#rs['colH'] = rs['colW'] = SQUARE # Make default colW and colH square.
padX = (W - sqx*(SQUARE + GUTTER) + GUTTER)/2
my = (H - sqy*(SQUARE + GUTTER) + GUTTER)/2
doc = Document(title='Color Squares', w=W, h=H, context=context)
doc.view.padding = 0 # Don't show cropmarks in this example.
# Get list of pages with equal y, then equal x.
#page = doc[1][0] # Get the single page from te document.
page = doc.getPage(1) # Get page on pageNumber, first in row (this is only one now).
page.name = 'This demo page'
page.w = W
page.h = H
page.padding3D = padX # Set all 3 paddings to same value
page.gutter3D = GUTTER # Set all 3 gutters to same value
#newRect((0, 0), w=square, h=square, parent=page, fill=color(1, 0, 0), stroke=noColor)
for ix in range(sqx): # Run through the range of (0, 1, ...) number of horizontal squares
for iy in range(sqy): # Same with vertical squares
# Place squares in random colors
color1 = color(random()*0.5+0.5, 0.1, 0.6)
color2 = color(random()*0.5+0.5, 0.1, 0.6)
# Calculate the position for each square as combination
# of paddings and (ix, iy)
p = padX + ix * (SQUARE + GUTTER), my + iy * (SQUARE + GUTTER) # Make 2-dimensional point tuple.
# Create Rect object and place it in the page on position p
# Initialize the borders dicts on lineWidth == 0
e = newRect(xy=p, w=SQUARE, h=SQUARE, parent=page,
fill=color1, stroke=noColor, borders=1) # border=1 also works, identical.
#lineType = {-1:ONLINE, 0:INLINE, 1:ONLINE, 2:OUTLINE}[LineType]
e.borderLeft['line'] = ONLINE
e.borderLeft['stroke'] = color(0, 0, 0, 0.5)
e.borderLeft['dash'] = (DashWhite, DashBlack)
e.borderBottom['strokeWidth'] = pt((ix+1)*4)
e.borderBottom['line'] = ONLINE
e.borderBottom['stroke'] = color(0, 1, 0)
e.borderBottom['dash'] = (DashWhite, DashBlack)
e.borderTop['strokeWidth'] = pt((iy+1)*4)
e.borderTop['line'] = ONLINE
e.borderTop['stroke'] = color(1, 1, 0, 0.5)
e.borderRight['strokeWidth'] = pt((iy+1)*4)
e.borderRight['line'] = ONLINE
e.borderRight['stroke'] = color(0, 0, 1, 0.5)
page.solve()
doc.export(exportPath)
for contextName in ('DrawBot', 'Flat'):
draw(contextName)
'''
if __name__ == '__main__': # If running from DrawBot
Variable([
dict(name="LineType", ui="RadioGroup", args=dict(titles=[INLINE, ONLINE, OUTLINE],
isVertical=True)),
dict(name='DashWhite', ui='Slider', args=dict(minValue=0, value=8, maxValue=8)),
dict(name='DashBlack', ui='Slider', args=dict(minValue=0, value=0, maxValue=8)),
dict(name='PageSize', ui='Slider', args=dict(minValue=100, value=400, maxValue=800)),
], globals())
d = makeDocument()
d.export(EXPORT_PATH)
'''
| 39.284553 | 108 | 0.614031 | #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
# -----------------------------------------------------------------------------
#
# P A G E B O T E X A M P L E S
#
# www.pagebot.io
# Licensed under MIT conditions
#
# -----------------------------------------------------------------------------
#
# E20_UseBorders.py
#
from random import random
'''
# FIXME: shouldn't import DrawBot.
from drawBot import Variable
from drawBot.misc import DrawBotError
'''
from pagebot import getContext
from pagebot.constants import (A4, CENTER,TOP, BOTTOM, INLINE, OUTLINE, ONLINE,
EXPORT)
from pagebot.elements import *
from pagebot.document import Document
from pagebot.style import getRootStyle
from pagebot.toolbox.color import color, noColor
from pagebot.toolbox.units import pt
from pagebot.toolbox.transformer import path2FileName
ViewPadding = 64
PageSize = 500
GUTTER = 24 # Distance between the squares.
SQUARE = 3 * GUTTER # Size of the squares
DashWhite = 4
DashBlack = 4
LineType = ONLINE
FILENAME = path2FileName(__file__)
def draw(contextName):
"""Make a new document, using the rs as root style."""
exportPath = '%s/%s-%s.pdf' % (EXPORT, FILENAME, contextName)
context = getContext(contextName)
#W = H = 120 # Get the standard a4 width and height in points.
W = H = PageSize
# Hard coded SQUARE and GUTTE, just for simple demo, instead of filling
# padding an columns in the root style. Page size decides on the amount
# squares that is visible. Page padding is centered then.
sqx = int(W/(SQUARE + GUTTER)) # Whole amount of squares that fit on the page.
sqy = int(H/(SQUARE + GUTTER))
# Calculate centered paddings for the amount of fitting squares.
# Set values in the rootStyle, so we can compare with column calculated square position and sizes.
#rs['colH'] = rs['colW'] = SQUARE # Make default colW and colH square.
padX = (W - sqx*(SQUARE + GUTTER) + GUTTER)/2
my = (H - sqy*(SQUARE + GUTTER) + GUTTER)/2
doc = Document(title='Color Squares', w=W, h=H, context=context)
doc.view.padding = 0 # Don't show cropmarks in this example.
# Get list of pages with equal y, then equal x.
#page = doc[1][0] # Get the single page from te document.
page = doc.getPage(1) # Get page on pageNumber, first in row (this is only one now).
page.name = 'This demo page'
page.w = W
page.h = H
page.padding3D = padX # Set all 3 paddings to same value
page.gutter3D = GUTTER # Set all 3 gutters to same value
#newRect((0, 0), w=square, h=square, parent=page, fill=color(1, 0, 0), stroke=noColor)
for ix in range(sqx): # Run through the range of (0, 1, ...) number of horizontal squares
for iy in range(sqy): # Same with vertical squares
# Place squares in random colors
color1 = color(random()*0.5+0.5, 0.1, 0.6)
color2 = color(random()*0.5+0.5, 0.1, 0.6)
# Calculate the position for each square as combination
# of paddings and (ix, iy)
p = padX + ix * (SQUARE + GUTTER), my + iy * (SQUARE + GUTTER) # Make 2-dimensional point tuple.
# Create Rect object and place it in the page on position p
# Initialize the borders dicts on lineWidth == 0
e = newRect(xy=p, w=SQUARE, h=SQUARE, parent=page,
fill=color1, stroke=noColor, borders=1) # border=1 also works, identical.
#lineType = {-1:ONLINE, 0:INLINE, 1:ONLINE, 2:OUTLINE}[LineType]
e.borderLeft['line'] = ONLINE
e.borderLeft['stroke'] = color(0, 0, 0, 0.5)
e.borderLeft['dash'] = (DashWhite, DashBlack)
e.borderBottom['strokeWidth'] = pt((ix+1)*4)
e.borderBottom['line'] = ONLINE
e.borderBottom['stroke'] = color(0, 1, 0)
e.borderBottom['dash'] = (DashWhite, DashBlack)
e.borderTop['strokeWidth'] = pt((iy+1)*4)
e.borderTop['line'] = ONLINE
e.borderTop['stroke'] = color(1, 1, 0, 0.5)
e.borderRight['strokeWidth'] = pt((iy+1)*4)
e.borderRight['line'] = ONLINE
e.borderRight['stroke'] = color(0, 0, 1, 0.5)
page.solve()
doc.export(exportPath)
for contextName in ('DrawBot', 'Flat'):
draw(contextName)
'''
if __name__ == '__main__': # If running from DrawBot
Variable([
dict(name="LineType", ui="RadioGroup", args=dict(titles=[INLINE, ONLINE, OUTLINE],
isVertical=True)),
dict(name='DashWhite', ui='Slider', args=dict(minValue=0, value=8, maxValue=8)),
dict(name='DashBlack', ui='Slider', args=dict(minValue=0, value=0, maxValue=8)),
dict(name='PageSize', ui='Slider', args=dict(minValue=100, value=400, maxValue=800)),
], globals())
d = makeDocument()
d.export(EXPORT_PATH)
'''
| 0 | 0 | 0 |
87e1f1c93082000cf5ef0697c1149761ade4f004 | 127 | py | Python | np/__init__.py | iyanmv/galois | a5e6386a684e3e0b47af608217002795dc25c702 | [
"MIT"
] | 65 | 2021-02-20T04:07:59.000Z | 2022-03-13T10:14:58.000Z | np/__init__.py | iyanmv/galois | a5e6386a684e3e0b47af608217002795dc25c702 | [
"MIT"
] | 303 | 2021-02-22T19:36:25.000Z | 2022-03-31T14:48:15.000Z | np/__init__.py | iyanmv/galois | a5e6386a684e3e0b47af608217002795dc25c702 | [
"MIT"
] | 9 | 2021-03-11T07:40:51.000Z | 2022-03-06T20:13:17.000Z | from . import linalg
from .advanced import *
from .arithmetic import *
from .functions import *
from .linear_algebra import *
| 18.142857 | 29 | 0.76378 | from . import linalg
from .advanced import *
from .arithmetic import *
from .functions import *
from .linear_algebra import *
| 0 | 0 | 0 |
429f28d16735e254ac0ce64ef367c5668302a233 | 1,104 | py | Python | setup.py | Jaimedlrm/entropytriangle | 46076aa6e9e06777df4dcf885cd951afdf1de168 | [
"MIT"
] | 2 | 2019-08-07T07:13:38.000Z | 2019-08-07T07:13:41.000Z | setup.py | Jaimedlrm/entropytriangle | 46076aa6e9e06777df4dcf885cd951afdf1de168 | [
"MIT"
] | null | null | null | setup.py | Jaimedlrm/entropytriangle | 46076aa6e9e06777df4dcf885cd951afdf1de168 | [
"MIT"
] | 1 | 2021-03-13T18:24:14.000Z | 2021-03-13T18:24:14.000Z | from setuptools import setup,find_packages
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
]
with open('README.txt') as file:
long_description = file.read()
with open('requirements.txt') as reqs:
install_requires = reqs.read().splitlines()
print(find_packages())
setup(
name="entropytriangle",
version="1.0.2",
packages= find_packages(),
python_requires='>=3',
install_requires = install_requires,
author="Jaime de los Rios Mouvet",
author_email="jaime.delosriosmouvet@gmail.com",
classifiers=classifiers,
description="Calculation of the entropy triangles",
long_description=long_description,
keywords="Entropy Triangle Information Theory",
license="MIT",
url="https://github.com/Jaimedlrm/entropytriangle",
download_url="https://github.com/Jaimedlrm/entropytriangle.git",
)
| 29.837838 | 68 | 0.696558 | from setuptools import setup,find_packages
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
]
with open('README.txt') as file:
long_description = file.read()
with open('requirements.txt') as reqs:
install_requires = reqs.read().splitlines()
print(find_packages())
setup(
name="entropytriangle",
version="1.0.2",
packages= find_packages(),
python_requires='>=3',
install_requires = install_requires,
author="Jaime de los Rios Mouvet",
author_email="jaime.delosriosmouvet@gmail.com",
classifiers=classifiers,
description="Calculation of the entropy triangles",
long_description=long_description,
keywords="Entropy Triangle Information Theory",
license="MIT",
url="https://github.com/Jaimedlrm/entropytriangle",
download_url="https://github.com/Jaimedlrm/entropytriangle.git",
)
| 0 | 0 | 0 |
56a1c891ca9f0be4f3c633acdb48c8f723bb7a30 | 20,138 | py | Python | CPAC/pipeline/cpac_runner.py | danlurie/C-PAC | 5ddc2d4fa71eb13728d6156f73cb6e7621dda69d | [
"BSD-3-Clause"
] | null | null | null | CPAC/pipeline/cpac_runner.py | danlurie/C-PAC | 5ddc2d4fa71eb13728d6156f73cb6e7621dda69d | [
"BSD-3-Clause"
] | null | null | null | CPAC/pipeline/cpac_runner.py | danlurie/C-PAC | 5ddc2d4fa71eb13728d6156f73cb6e7621dda69d | [
"BSD-3-Clause"
] | null | null | null | from multiprocessing import Process
import os
from CPAC.utils.utils import create_seeds_, create_group_log_template
from CPAC.utils import Configuration
import yaml
import time
from time import strftime
| 34.961806 | 376 | 0.5942 | from multiprocessing import Process
import os
from CPAC.utils.utils import create_seeds_, create_group_log_template
from CPAC.utils import Configuration
import yaml
import time
from time import strftime
def validate(config_obj):
#check for path lengths
working_dir = config_obj.workingDirectory
try:
if len(working_dir) > 70:
print "\n\n" + "WARNING: Path to working directory should NOT be more than 70 characters."
print "Please update your configuration. Working directory: ", working_dir, "\n\n"
raise Exception
except:
print "\n\n" + "ERROR: Your directories in Output Settings are empty." + "\n" + \
"Error name: cpac_runner_0002" + "\n\n"
raise Exception
def get_vectors(strat):
paths = []
def dfs(val_list, path):
if val_list == []:
paths.append(path)
else:
vals = []
vals.append(val_list.pop())
for val in vals:
# make this an if statement because it trips up when it gets a
# 'None' entry for one of the iterables
if val != None:
### check if val is float, correct it on some version of python or ipython
### avoid auto change to double quote of the path
if isinstance(val[0], float):
#val = '%.2f' % val[0]
val = [str(val[0])]
if path == '':
dfs(list(val_list), str(val))
else:
dfs(list(val_list), str(val) + '#' + path)
val_list = []
for key in sorted(strat.keys()):
val_list.append(strat[key])
dfs(val_list, '')
return paths
def make_entries(paths, path_iterables):
entries = []
idx = 1
for path in sorted(paths):
sub_entries = []
values = path.split('#')
indx = 0
for value in values:
if '[' or '(' in value:
value = value.strip('[]')
value = value.strip('()')
if ',' in value:
import re
value = re.sub(r',', '.', value)
value = re.sub(r' ', '', value)
sub_entries.append(path_iterables[indx] + '_' + value)
indx += 1
### remove single quote in the paths
sub_entries = map(lambda x: x.replace("'", ""), sub_entries)
print "sub entries: "
print sub_entries
entries.append(sub_entries)
return entries
def build_strategies(configuration):
import collections
### make paths shorter
path_iterables = ['_gm_threshold', '_wm_threshold', '_csf_threshold', '_threshold', '_compcor', '_target_angle_deg']
non_strategy_iterables = ['_fwhm', '_hp', '_lp', '_bandpass_freqs']
proper_names = {'_threshold':'Scrubbing Threshold = ', '_csf_threshold':'Cerebral Spinal Fluid Threshold = ',
'_gm_threshold':'Gray Matter Threshold = ',
'nc':'Compcor: Number Of Components = ', '_compcor':'Nuisance Signal Corrections = ',
'_target_angle_deg':'Median Angle Correction: Target Angle in Degree = ', '_wm_threshold':'White Matter Threshold = '}
config_iterables = {'_gm_threshold': eval('configuration.grayMatterThreshold'), '_wm_threshold': eval('configuration.whiteMatterThreshold'), '_csf_threshold': eval('configuration.cerebralSpinalFluidThreshold'), '_threshold': eval('configuration.scrubbingThreshold'), '_compcor': eval('configuration.Corrections'), '_target_angle_deg': eval('configuration.targetAngleDeg')}
"""
path_iterables = ['_gm_threshold', '_wm_threshold', '_csf_threshold', '_threshold', '_compcor', '_target_angle_deg']
non_strategy_iterables = ['_fwhm', '_hp', '_lp', '_bandpass_freqs']
proper_names = {'_threshold':'Scrubbing Threshold = ', '_csf_threshold':'Cerebral Spinal Fluid Threshold = ',
'_gm_threshold':'Gray Matter Threshold = ',
'nc':'Compcor: Number Of Components = ', '_compcor':'Nuisance Signal Corrections = ',
'_target_angle_deg':'Median Angle Correction: Traget Angle in Degree = ', '_wm_threshold':'White Matter Threshold = '}
config_iterables = {'_gm_threshold': eval('configuration.grayMatterThreshold'), '_wm_threshold': eval('configuration.whiteMatterThreshold'), '_csf_threshold': eval('configuration.cerebralSpinalFluidThreshold'), '_threshold': eval('configuration.scrubbingThreshold'), '_compcor': eval('configuration.Corrections'), '_target_angle_deg': eval('configuration.targetAngleDeg')}
"""
### This is really dirty code and ordering of corrections in
### in output directory is dependant on the nuisance workflow
### when the workflow is changed , change this section as well
corrections_order = ['pc1', 'linear', 'wm', 'global', 'motion', 'quadratic', 'gm', 'compcor', 'csf']
corrections_dict_list = config_iterables['_compcor']
print "corrections dictionary list: "
print corrections_dict_list
main_all_options = []
if corrections_dict_list != None:
for corrections_dict in corrections_dict_list:
string = ""
for correction in corrections_order:
string += correction + str(corrections_dict[correction]) + '.'
string = string[0:len(string) -1]
cmpcor_components = eval('configuration.nComponents')
all_options = []
for comp in cmpcor_components:
comp = int(comp)
all_options.append('ncomponents_%d' %comp + '_selector_' + string)
main_all_options.append(str(str(all_options).strip('[]')).strip('\'\''))
config_iterables['_compcor'] = main_all_options
############
try:
paths = get_vectors(config_iterables)
except:
print "\n\n" + "ERROR: There are no strategies to build." + "\n" + \
"Error name: cpac_runner_0003" + "\n\n"
raise Exception
strategy_entries = make_entries(paths, sorted(path_iterables))
print 'strategy_entries: ', strategy_entries, '\n\n'
return strategy_entries
def run_sge_jobs(c, config_file, strategies_file, subject_list_file, p_name):
import commands
from time import strftime
try:
sublist = yaml.load(open(os.path.realpath(subject_list_file), 'r'))
except:
raise Exception ("Subject list is not in proper YAML format. Please check your file")
shell = commands.getoutput('echo $SHELL')
temp_files_dir = os.path.join(os.getcwd(), 'cluster_temp_files')
subject_bash_file = os.path.join(temp_files_dir, 'submit_%s.sge' % str(strftime("%Y_%m_%d_%H_%M_%S")))
f = open(subject_bash_file, 'w')
print >>f, '#! %s' % shell
print >>f, '#$ -cwd'
print >>f, '#$ -S %s' % shell
print >>f, '#$ -V'
print >>f, '#$ -t 1-%d' % len(sublist)
print >>f, '#$ -q %s' % c.queue
print >>f, '#$ -pe %s %d' % (c.parallelEnvironment, c.numCoresPerSubject)
print >>f, '#$ -e %s' % os.path.join(temp_files_dir, 'c-pac_%s.err' % str(strftime("%Y_%m_%d_%H_%M_%S")))
print >>f, '#$ -o %s' % os.path.join(temp_files_dir, 'c-pac_%s.out' % str(strftime("%Y_%m_%d_%H_%M_%S")))
print >>f, 'source ~/.bashrc'
# print >>f, "python CPAC.pipeline.cpac_pipeline.py -c ", str(config_file), " -s ", subject_list_file, " -indx $SGE_TASK_ID -strategies ", strategies_file
print >>f, "python -c \"import CPAC; CPAC.pipeline.cpac_pipeline.run(\\\"%s\\\" , \\\"%s\\\", \\\"$SGE_TASK_ID\\\" , \\\"%s\\\", \\\"%s\\\" , \\\"%s\\\", \\\"%s\\\", \\\"%s\\\") \" " % (str(config_file), \
subject_list_file, strategies_file, c.maskSpecificationFile, c.roiSpecificationFile, c.templateSpecificationFile, p_name)
f.close()
commands.getoutput('chmod +x %s' % subject_bash_file )
p = open(os.path.join(c.outputDirectory, 'pid.txt'), 'w')
out = commands.getoutput('qsub %s ' % (subject_bash_file))
import re
if re.search("(?<=Your job-array )\d+", out) == None:
print "Error: Running of 'qsub' command in terminal failed. Please troubleshoot your SGE configuration with your system administrator and then try again."
print "The command run was: qsub %s" % subject_bash_file
raise Exception
pid = re.search("(?<=Your job-array )\d+", out).group(0)
print >> p, pid
p.close()
def run_condor_jobs(c, config_file, strategies_file, subject_list_file, p_name):
import commands
from time import strftime
try:
sublist = yaml.load(open(os.path.realpath(subject_list_file), 'r'))
except:
raise Exception ("Subject list is not in proper YAML format. Please check your file")
temp_files_dir = os.path.join(os.getcwd(), 'cluster_temp_files')
subject_bash_file = os.path.join(temp_files_dir, 'submit_%s.condor' % str(strftime("%Y_%m_%d_%H_%M_%S")))
f = open(subject_bash_file, 'w')
print >>f, "Executable = /usr/bin/python"
print >>f, "Universe = vanilla"
print >>f, "transfer_executable = False"
print >>f, "getenv = True"
print >>f, "log = %s" % os.path.join(temp_files_dir, 'c-pac_%s.log' % str(strftime("%Y_%m_%d_%H_%M_%S")))
sublist = yaml.load(open(os.path.realpath(subject_list_file), 'r'))
for sidx in range(1,len(sublist)+1):
print >>f, "error = %s" % os.path.join(temp_files_dir, 'c-pac_%s.%s.err' % (str(strftime("%Y_%m_%d_%H_%M_%S")), str(sidx)))
print >>f, "output = %s" % os.path.join(temp_files_dir, 'c-pac_%s.%s.out' % (str(strftime("%Y_%m_%d_%H_%M_%S")), str(sidx)))
print >>f, "arguments = \"-c 'import CPAC; CPAC.pipeline.cpac_pipeline.run( ''%s'',''%s'',''%s'',''%s'', ''%s'',''%s'',''%s'',''%s'')\'\"" % (str(config_file), subject_list_file, str(sidx), strategies_file, c.maskSpecificationFile, c.roiSpecificationFile, c.templateSpecificationFile, p_name)
print >>f, "queue"
f.close()
#commands.getoutput('chmod +x %s' % subject_bash_file )
print commands.getoutput("condor_submit %s " % (subject_bash_file))
def run_pbs_jobs(c, config_file, strategies_file, subject_list_file, p_name):
import commands
from time import strftime
try:
sublist = yaml.load(open(os.path.realpath(subject_list_file), 'r'))
except:
raise Exception ("Subject list is not in proper YAML format. Please check your file")
temp_files_dir = os.path.join(os.getcwd(), 'cluster_temp_files')
shell = commands.getoutput('echo $SHELL')
subject_bash_file = os.path.join(temp_files_dir, 'submit_%s.pbs' % str(strftime("%Y_%m_%d_%H_%M_%S")))
f = open(subject_bash_file, 'w')
print >>f, '#! %s' % shell
print >>f, '#PBS -S %s' % shell
print >>f, '#PBS -V'
print >>f, '#PBS -t 1-%d' % len(sublist)
print >>f, '#PBS -q %s' % c.queue
print >>f, '#PBS -l nodes=1:ppn=%d' % c.numCoresPerSubject
print >>f, '#PBS -e %s' % os.path.join(temp_files_dir, 'c-pac_%s.err' % str(strftime("%Y_%m_%d_%H_%M_%S")))
print >>f, '#PBS -o %s' % os.path.join(temp_files_dir, 'c-pac_%s.out' % str(strftime("%Y_%m_%d_%H_%M_%S")))
print >>f, 'source ~/.bashrc'
print >>f, "python -c \"import CPAC; CPAC.pipeline.cpac_pipeline.run(\\\"%s\\\",\\\"%s\\\",\\\"${PBS_ARRAYID}\\\",\\\"%s\\\", \\\"%s\\\" , \\\"%s\\\", \\\"%s\\\", \\\"%s\\\") \" " % (str(config_file), \
subject_list_file, strategies_file, c.maskSpecificationFile, c.roiSpecificationFile, c.templateSpecificationFile, p_name)
# print >>f, "python -c \"import CPAC; CPAC.pipeline.cpac_pipeline.py -c %s -s %s -indx ${PBS_ARRAYID} -strategies %s \" " %(str(config_file), subject_list_file, strategies_file)
#print >>f, "python CPAC.pipeline.cpac_pipeline.py -c ", str(config_file), "-s ", subject_list_file, " -indx ${PBS_ARRAYID} -strategies ", strategies_file
f.close()
commands.getoutput('chmod +x %s' % subject_bash_file )
#logger.info(commands.getoutput('qsub %s ' % (subject_bash_file)))
def append_seeds_to_file(working_dir, seed_list, seed_file):
existing_seeds = []
filtered_list = []
try:
if os.path.isfile(seed_file):
existing_seeds += [line.rstrip('\r\n') for line in open(seed_file, 'r').readlines() if not (line.startswith('#') and line == '\n')]
for seed in seed_list:
if not seed in existing_seeds:
filtered_list.append(seed)
if not len(filtered_list) == 0:
f = open(seed_file, 'a')
for seed in filtered_list:
f.write("%s\n" % seed)
f.close()
return seed_file
else:
raise
except:
#make tempfile and add seeds to it
import tempfile
try:
if not os.path.exists(working_dir):
os.makedirs(working_dir)
except Exception, e:
print 'error encountered : ', e
raise
some_number, f_name = tempfile.mkstemp(suffix='.txt', prefix='temp_roi_seeds', dir=working_dir, text=True)
f_handle = open(f_name, 'w')
for seed in seed_list:
f_handle.write('%s\n' % seed)
f_handle.close()
return f_name
def run(config_file, subject_list_file, p_name = None):
# Import packages
import time
# take date+time stamp for run identification purposes
unique_pipeline_id = strftime("%Y%m%d%H%M%S")
pipeline_start_stamp = strftime("%Y-%m-%d_%H:%M:%S")
try:
if not os.path.exists(config_file):
raise IOError
else:
c = Configuration(yaml.load(open(os.path.realpath(config_file), 'r')))
except IOError:
print "config file %s doesn't exist" % config_file
raise
except Exception:
print "Error reading config file - %s" % config_file
raise Exception
#do some validation
validate(c)
# get the pipeline name
p_name = c.pipelineName
try:
sublist = yaml.load(open(os.path.realpath(subject_list_file), 'r'))
except:
print "Subject list is not in proper YAML format. Please check your file"
raise Exception
# NOTE: strategies list is only needed in cpac_pipeline prep_workflow for
# creating symlinks
strategies = sorted(build_strategies(c))
print "strategies ---> "
print strategies
sub_scan_map ={}
print "subject list: "
print sublist
try:
for sub in sublist:
if sub['unique_id']:
s = sub['subject_id']+"_" + sub["unique_id"]
else:
s = sub['subject_id']
scan_ids = ['scan_anat']
for id in sub['rest']:
scan_ids.append('scan_'+ str(id))
sub_scan_map[s] = scan_ids
except:
print "\n\n" + "ERROR: Subject list file not in proper format - check if you loaded the correct file?" + "\n" + \
"Error name: cpac_runner_0001" + "\n\n"
raise Exception
create_group_log_template(sub_scan_map, os.path.join(c.outputDirectory, 'logs'))
seeds_created = []
if not (c.seedSpecificationFile is None):
try:
if os.path.exists(c.seedSpecificationFile):
seeds_created = create_seeds_(c.seedOutputLocation, c.seedSpecificationFile, c.FSLDIR)
print 'seeds created %s -> ' % seeds_created
except:
raise IOError('Problem in seedSpecificationFile')
if 1 in c.runVoxelTimeseries:
if 'roi_voxelwise' in c.useSeedInAnalysis:
c.maskSpecificationFile = append_seeds_to_file(c.workingDirectory, seeds_created, c.maskSpecificationFile)
if 1 in c.runROITimeseries:
if 'roi_average' in c.useSeedInAnalysis:
c.roiSpecificationFile = append_seeds_to_file(c.workingDirectory, seeds_created, c.roiSpecificationFile)
if 1 in c.runSCA:
if 'roi_average' in c.useSeedInAnalysis:
c.roiSpecificationFileForSCA = append_seeds_to_file(c.workingDirectory, seeds_created, c.roiSpecificationFileForSCA)
if 1 in c.runNetworkCentrality:
if 'centrality_outputs_smoothed' in c.useSeedInAnalysis:
c.templateSpecificationFile = append_seeds_to_file(c.workingDirectory, seeds_created, c.templateSpecificationFile)
pipeline_timing_info = []
pipeline_timing_info.append(unique_pipeline_id)
pipeline_timing_info.append(pipeline_start_stamp)
pipeline_timing_info.append(len(sublist))
if not c.runOnGrid:
# Import packages
from CPAC.pipeline.cpac_pipeline import prep_workflow
# Init variables
procss = [Process(target=prep_workflow,
args=(sub, c, strategies, 1,
pipeline_timing_info, p_name)) \
for sub in sublist]
pid = open(os.path.join(c.outputDirectory, 'pid.txt'), 'w')
# Init job queue
jobQueue = []
# If we're allocating more processes than are subjects, run them all
if len(sublist) <= c.numSubjectsAtOnce:
"""
Stream all the subjects as sublist is
less than or equal to the number of
subjects that need to run
"""
for p in procss:
p.start()
print >>pid,p.pid
# Otherwise manage resources to run processes incrementally
else:
"""
Stream the subject workflows for preprocessing.
At Any time in the pipeline c.numSubjectsAtOnce
will run, unless the number remaining is less than
the value of the parameter stated above
"""
idx = 0
while(idx < len(sublist)):
# If the job queue is empty and we haven't started indexing
if len(jobQueue) == 0 and idx == 0:
# Init subject process index
idc = idx
# Launch processes (one for each subject)
for p in procss[idc: idc + c.numSubjectsAtOnce]:
p.start()
print >>pid,p.pid
jobQueue.append(p)
idx += 1
# Otherwise, jobs are running - check them
else:
# Check every job in the queue's status
for job in jobQueue:
# If the job is not alive
if not job.is_alive():
# Find job and delete it from queue
print 'found dead job ', job
loc = jobQueue.index(job)
del jobQueue[loc]
# ...and start the next available process (subject)
procss[idx].start()
# Append this to job queue and increment index
jobQueue.append(procss[idx])
idx += 1
# Add sleep so while loop isn't consuming 100% of CPU
time.sleep(2)
pid.close()
else:
import commands
import pickle
temp_files_dir = os.path.join(os.getcwd(), 'cluster_temp_files')
print commands.getoutput("mkdir -p %s" % temp_files_dir)
strategies_file = os.path.join(temp_files_dir, 'strategies.obj')
f = open(strategies_file, 'w')
pickle.dump(strategies, f)
f.close()
if 'sge' in c.resourceManager.lower():
run_sge_jobs(c, config_file, strategies_file, subject_list_file, p_name)
elif 'pbs' in c.resourceManager.lower():
run_pbs_jobs(c, config_file, strategies_file, subject_list_file, p_name)
elif 'condor' in c.resourceManager.lower():
run_condor_jobs(c, config_file, strategies_file, subject_list_file, p_name)
| 19,710 | 0 | 207 |
063699326fe17c0b5498a656ff0dc5e99dadd27d | 841 | py | Python | novice/03-03/kasus/app.py | blackjokie/praxis-academy | 594d24fcf18cea4f76c9889030eba4aa3f834b7a | [
"MIT"
] | 2 | 2019-08-11T16:58:04.000Z | 2019-08-27T17:01:40.000Z | novice/03-03/kasus/app.py | blackjokie/praxis-academy | 594d24fcf18cea4f76c9889030eba4aa3f834b7a | [
"MIT"
] | null | null | null | novice/03-03/kasus/app.py | blackjokie/praxis-academy | 594d24fcf18cea4f76c9889030eba4aa3f834b7a | [
"MIT"
] | null | null | null | from flask import Flask, render_template, request
import pymysql as asu
app = Flask(__name__)
@app.route('/')
if __name__ == '__main__':
app.run(debug = True) | 32.346154 | 154 | 0.62069 | from flask import Flask, render_template, request
import pymysql as asu
app = Flask(__name__)
@app.route('/')
def list():
# Open database connection
db = asu.connect("<hostname>","<username>","<password>","<db_name>")
# prepare a cursor object using cursor() method
cursor = db.cursor()
# Prepare SQL query to INSERT a record into the database.
sql = "SELECT member.f_name, movie.n_movie from member INNER JOIN movie ON member.id_member = movie.id_member WHERE member.f_name = 'Janet Jones'"
# Execute the SQL command
cursor.execute(sql)
# Fetch all the rows in a list of lists.
isi = cursor.fetchall()
return render_template('template.html', rows=isi)
if __name__ == '__main__':
app.run(debug = True) | 654 | 0 | 22 |
c5c6acefdfda2711c271d081ecd0efdbac1c3611 | 8,637 | py | Python | lenspack/utils.py | sfarrens/lenspack | b7a8d6dfd8a1fd4d026a16c0f9f447964d3f0581 | [
"MIT"
] | 6 | 2019-11-06T15:51:52.000Z | 2021-07-11T14:37:14.000Z | lenspack/utils.py | sfarrens/lenspack | b7a8d6dfd8a1fd4d026a16c0f9f447964d3f0581 | [
"MIT"
] | 1 | 2021-06-14T13:40:48.000Z | 2021-06-14T13:40:48.000Z | lenspack/utils.py | sfarrens/lenspack | b7a8d6dfd8a1fd4d026a16c0f9f447964d3f0581 | [
"MIT"
] | 5 | 2019-11-14T15:32:46.000Z | 2022-01-03T15:25:45.000Z | # -*- coding: utf-8 -*-
"""UTILS MODULE
This module contains utility functions globally available to lenspack.
"""
import numpy as np
from astropy.units.core import Unit
from astropy.constants import G as G_newton
from astropy.constants import c as c_light
def round_up_to_odd(x):
"""Round up to the nearest odd integer."""
return (np.ceil(x) // 2 * 2 + 1).astype(int)
def convert_units(x, target):
"""Convert or attach units to a variable.
Parameters
----------
x : float
Quantity to convert.
target : str
Target units given as an acceptable astropy.units string (e.g. 'km').
Raises
------
Exception
If the conversion fails.
Examples
--------
>>> conv(5, 'kpc')
<Quantity 5. kpc>
>>> x = 4e14
>>> x = conv(x, 'solMass')
>>> conv(x, 'kg')
<Quantity 7.95390166e+44 kg>
"""
try:
x = x.to(Unit(target))
except AttributeError:
x = x * Unit(target)
except Exception as e:
raise
return x
def sigma_critical(zl, zs, cosmology):
"""Critical surface mass density between a lens and source galaxy(-ies).
Sigma_critical = [c^2 / (4 * pi * G)] * D_os / (D_ol * D_ls)
Angular diameter distances D are calculated in a universe specified by
an instance of astropy.cosmology.core.Cosmology.
Parameters
----------
zl : float
Redshift of the lens.
zs : array_like
Redshift(s) of the source galaxies.
cosmology : astropy.cosmology.core.Cosmology
Cosmological model.
Returns
-------
astropy.units.quantity.Quantity
Critical surface mass density between a lens (i.e. cluster or DM halo)
and each source redshift in units of solar masses per square parsec.
For sources at the redshift of the halo and below, Sigma_critical is
set to np.inf.
Examples
--------
...
TODO
----
Include the option for source redshift probability distributions.
"""
# Ensure vectorization
zs = np.atleast_1d(zs).astype(float)
assert (zs >= 0).all(), "Redshifts must be positive."
result = np.zeros_like(zs)
# Compute distances
d_ol = cosmology.angular_diameter_distance(zl)
d_os = cosmology.angular_diameter_distance(zs)
d_ls = cosmology.angular_diameter_distance_z1z2(zl, zs)
# Avoid division by zero
d_ls[d_ls == 0] = np.inf
# Compute Sigma_crit
factor = np.power(c_light, 2) / (4 * np.pi * G_newton)
result = factor * d_os / (d_ol * d_ls)
# Sources at lower z than the halo are not lensed
result[result <= 0] = np.inf
# Clean up
if len(zs) == 1:
result = result[0]
return convert_units(result, "solMass / pc2")
def bin2d(x, y, npix=10, v=None, w=None, extent=None, verbose=False):
"""Bin samples of a spatially varying quantity according to position.
The (weighted) average is taken of values falling into the same bin. This
function is relatively general, but it is mainly used within this package
to produce maps of the two components of shear from a galaxy catalog.
Parameters
----------
x, y : array_like
1D position arrays.
npix : int or list or tuple as (nx, ny), optional
Number of bins in the `x` and `y` directions. If an int N is given,
use (N, N). Binning defaults to (10, 10) if not provided.
v : array_like, optional
Values at positions (`x`, `y`). This can be given as many arrays
(v1, v2, ...) of len(`x`) to bin simultaneously. If None, the bin
count in each pixel is returned.
w : array_like, optional
Weights for `v` during averaging. If provided, the same weights are
applied to each input `v`.
extent : array_like, optional
Boundaries of the resulting grid, given as (xmin, xmax, ymin, ymax).
If None, bin edges are set as the min/max coordinate values of the
input position arrays.
verbose : boolean, optional
If True, print details of the binning.
Returns
-------
ndarray or tuple of ndarray
2D numpy arrays of values `v` binned into pixels. The number of
outputs matches the number of input `v` arrays.
Examples
--------
>>> # 100 values at random positions within the ranges -0.5 < x, y < 0.5
>>> # and binned within -1 < x, y < 1 to a (5, 5) grid.
>>> x = np.random.random(100) - 0.5
>>> y = np.random.random(100) - 0.5
>>> v = np.random.randn(100) * 5
>>> bin2d(x, y, v=v, npix=5, extent=(-1, 1, -1, 1))
array([[ 0. , 0. , 0. , 0. , 0. ],
[ 0. , 4.43560619, -2.33308373, 0.48447844, 0. ],
[ 0. , 1.94903524, -0.29253335, 1.3694618 , 0. ],
[ 0. , -1.0202718 , 0.37112266, -1.43062585, 0. ],
[ 0. , 0. , 0. , 0. , 0. ]])
"""
# Regroup extent if necessary
if extent is not None:
assert len(extent) == 4
extent = [extent[:2], extent[2:]]
if v is None:
# Return the simple bin count map
bincount, xbins, ybins = np.histogram2d(x, y, bins=npix, range=extent)
result = bincount.T
else:
# Prepare values to bin
v = np.atleast_1d(v)
if len(v.shape) == 1:
v = v.reshape(1, len(v))
# Prepare weights
if w is not None:
w = np.atleast_1d(w)
has_weights = True
else:
w = np.ones_like(x)
has_weights = False
# Compute weighted bin count map
wmap, xbins, ybins = np.histogram2d(x, y, bins=npix, range=extent,
weights=w)
# Handle division by zero (i.e., empty pixels)
wmap[wmap == 0] = np.inf
# Compute mean values per pixel
result = tuple((np.histogram2d(x, y, bins=npix, range=extent,
weights=(vv * w))[0] / wmap).T for vv in v)
# Clean up
if len(result) == 1:
result = result[0]
if verbose:
if v is not None:
print("Binning {} array{} with{} weights.".format(len(v),
['', 's'][(len(v) > 1)], ['out', ''][has_weights]))
else:
print("Returning bin count map.")
print("npix : {}".format(npix))
print("extent : {}".format([xbins[0], xbins[-1], ybins[0], ybins[-1]]))
print("(dx, dy) : ({}, {})".format(xbins[1] - xbins[0],
ybins[1] - ybins[0]))
return result
def radius2d(N, center=None, mode='exact'):
"""Distances from every pixel to a fixed center in a square matrix.
Parameters
----------
N : int
Number of pixels to a side.
center : array_like, optional
Incides of the central pixel, given as (x0, y0). If not given, the
center is taken to be (N / 2, N / 2) (though see `mode` description).
mode : {'exact', 'fft'}
How to treat the case when N is even. If 'exact', compute distances
from the true (fractional) central pixel location. If 'fft', use the
numpy.fft.fftfreq convention such that the central pixel location
is rounded up to the nearest integer.
Returns
-------
numpy array
2D matrix of distances.
Notes
-----
Non-integer center coordinates are not supported. If a `center` is
provided, `mode` is ignored.
Examples
--------
>>> radius2d(4, mode='exact')
array([[ 2.12132034, 1.58113883, 1.58113883, 2.12132034],
[ 1.58113883, 0.70710678, 0.70710678, 1.58113883],
[ 1.58113883, 0.70710678, 0.70710678, 1.58113883],
[ 2.12132034, 1.58113883, 1.58113883, 2.12132034]])
>>> radius2d(4, mode='fft')
array([[ 2.82842712, 2.23606798, 2. , 2.23606798],
[ 2.23606798, 1.41421356, 1. , 1.41421356],
[ 2. , 1. , 0. , 1. ],
[ 2.23606798, 1.41421356, 1. , 1.41421356]])
"""
# Verify inputs
N = int(N)
assert mode in ('exact', 'fft'), "Mode must be either 'exact' or 'fft'."
# Generate index grids
x, y = np.indices((N, N))
# Determine center
if center is not None:
x0, y0 = map(int, center)
else:
if mode == 'fft' and N % 2 == 0:
x0 = N / 2.
y0 = N / 2.
else:
x0 = (N - 1) / 2.
y0 = (N - 1) / 2.
# Compute radii
return np.hypot(x - x0, y - y0)
| 31.180505 | 79 | 0.555749 | # -*- coding: utf-8 -*-
"""UTILS MODULE
This module contains utility functions globally available to lenspack.
"""
import numpy as np
from astropy.units.core import Unit
from astropy.constants import G as G_newton
from astropy.constants import c as c_light
def round_up_to_odd(x):
"""Round up to the nearest odd integer."""
return (np.ceil(x) // 2 * 2 + 1).astype(int)
def convert_units(x, target):
"""Convert or attach units to a variable.
Parameters
----------
x : float
Quantity to convert.
target : str
Target units given as an acceptable astropy.units string (e.g. 'km').
Raises
------
Exception
If the conversion fails.
Examples
--------
>>> conv(5, 'kpc')
<Quantity 5. kpc>
>>> x = 4e14
>>> x = conv(x, 'solMass')
>>> conv(x, 'kg')
<Quantity 7.95390166e+44 kg>
"""
try:
x = x.to(Unit(target))
except AttributeError:
x = x * Unit(target)
except Exception as e:
raise
return x
def sigma_critical(zl, zs, cosmology):
"""Critical surface mass density between a lens and source galaxy(-ies).
Sigma_critical = [c^2 / (4 * pi * G)] * D_os / (D_ol * D_ls)
Angular diameter distances D are calculated in a universe specified by
an instance of astropy.cosmology.core.Cosmology.
Parameters
----------
zl : float
Redshift of the lens.
zs : array_like
Redshift(s) of the source galaxies.
cosmology : astropy.cosmology.core.Cosmology
Cosmological model.
Returns
-------
astropy.units.quantity.Quantity
Critical surface mass density between a lens (i.e. cluster or DM halo)
and each source redshift in units of solar masses per square parsec.
For sources at the redshift of the halo and below, Sigma_critical is
set to np.inf.
Examples
--------
...
TODO
----
Include the option for source redshift probability distributions.
"""
# Ensure vectorization
zs = np.atleast_1d(zs).astype(float)
assert (zs >= 0).all(), "Redshifts must be positive."
result = np.zeros_like(zs)
# Compute distances
d_ol = cosmology.angular_diameter_distance(zl)
d_os = cosmology.angular_diameter_distance(zs)
d_ls = cosmology.angular_diameter_distance_z1z2(zl, zs)
# Avoid division by zero
d_ls[d_ls == 0] = np.inf
# Compute Sigma_crit
factor = np.power(c_light, 2) / (4 * np.pi * G_newton)
result = factor * d_os / (d_ol * d_ls)
# Sources at lower z than the halo are not lensed
result[result <= 0] = np.inf
# Clean up
if len(zs) == 1:
result = result[0]
return convert_units(result, "solMass / pc2")
def bin2d(x, y, npix=10, v=None, w=None, extent=None, verbose=False):
"""Bin samples of a spatially varying quantity according to position.
The (weighted) average is taken of values falling into the same bin. This
function is relatively general, but it is mainly used within this package
to produce maps of the two components of shear from a galaxy catalog.
Parameters
----------
x, y : array_like
1D position arrays.
npix : int or list or tuple as (nx, ny), optional
Number of bins in the `x` and `y` directions. If an int N is given,
use (N, N). Binning defaults to (10, 10) if not provided.
v : array_like, optional
Values at positions (`x`, `y`). This can be given as many arrays
(v1, v2, ...) of len(`x`) to bin simultaneously. If None, the bin
count in each pixel is returned.
w : array_like, optional
Weights for `v` during averaging. If provided, the same weights are
applied to each input `v`.
extent : array_like, optional
Boundaries of the resulting grid, given as (xmin, xmax, ymin, ymax).
If None, bin edges are set as the min/max coordinate values of the
input position arrays.
verbose : boolean, optional
If True, print details of the binning.
Returns
-------
ndarray or tuple of ndarray
2D numpy arrays of values `v` binned into pixels. The number of
outputs matches the number of input `v` arrays.
Examples
--------
>>> # 100 values at random positions within the ranges -0.5 < x, y < 0.5
>>> # and binned within -1 < x, y < 1 to a (5, 5) grid.
>>> x = np.random.random(100) - 0.5
>>> y = np.random.random(100) - 0.5
>>> v = np.random.randn(100) * 5
>>> bin2d(x, y, v=v, npix=5, extent=(-1, 1, -1, 1))
array([[ 0. , 0. , 0. , 0. , 0. ],
[ 0. , 4.43560619, -2.33308373, 0.48447844, 0. ],
[ 0. , 1.94903524, -0.29253335, 1.3694618 , 0. ],
[ 0. , -1.0202718 , 0.37112266, -1.43062585, 0. ],
[ 0. , 0. , 0. , 0. , 0. ]])
"""
# Regroup extent if necessary
if extent is not None:
assert len(extent) == 4
extent = [extent[:2], extent[2:]]
if v is None:
# Return the simple bin count map
bincount, xbins, ybins = np.histogram2d(x, y, bins=npix, range=extent)
result = bincount.T
else:
# Prepare values to bin
v = np.atleast_1d(v)
if len(v.shape) == 1:
v = v.reshape(1, len(v))
# Prepare weights
if w is not None:
w = np.atleast_1d(w)
has_weights = True
else:
w = np.ones_like(x)
has_weights = False
# Compute weighted bin count map
wmap, xbins, ybins = np.histogram2d(x, y, bins=npix, range=extent,
weights=w)
# Handle division by zero (i.e., empty pixels)
wmap[wmap == 0] = np.inf
# Compute mean values per pixel
result = tuple((np.histogram2d(x, y, bins=npix, range=extent,
weights=(vv * w))[0] / wmap).T for vv in v)
# Clean up
if len(result) == 1:
result = result[0]
if verbose:
if v is not None:
print("Binning {} array{} with{} weights.".format(len(v),
['', 's'][(len(v) > 1)], ['out', ''][has_weights]))
else:
print("Returning bin count map.")
print("npix : {}".format(npix))
print("extent : {}".format([xbins[0], xbins[-1], ybins[0], ybins[-1]]))
print("(dx, dy) : ({}, {})".format(xbins[1] - xbins[0],
ybins[1] - ybins[0]))
return result
def radius2d(N, center=None, mode='exact'):
"""Distances from every pixel to a fixed center in a square matrix.
Parameters
----------
N : int
Number of pixels to a side.
center : array_like, optional
Incides of the central pixel, given as (x0, y0). If not given, the
center is taken to be (N / 2, N / 2) (though see `mode` description).
mode : {'exact', 'fft'}
How to treat the case when N is even. If 'exact', compute distances
from the true (fractional) central pixel location. If 'fft', use the
numpy.fft.fftfreq convention such that the central pixel location
is rounded up to the nearest integer.
Returns
-------
numpy array
2D matrix of distances.
Notes
-----
Non-integer center coordinates are not supported. If a `center` is
provided, `mode` is ignored.
Examples
--------
>>> radius2d(4, mode='exact')
array([[ 2.12132034, 1.58113883, 1.58113883, 2.12132034],
[ 1.58113883, 0.70710678, 0.70710678, 1.58113883],
[ 1.58113883, 0.70710678, 0.70710678, 1.58113883],
[ 2.12132034, 1.58113883, 1.58113883, 2.12132034]])
>>> radius2d(4, mode='fft')
array([[ 2.82842712, 2.23606798, 2. , 2.23606798],
[ 2.23606798, 1.41421356, 1. , 1.41421356],
[ 2. , 1. , 0. , 1. ],
[ 2.23606798, 1.41421356, 1. , 1.41421356]])
"""
# Verify inputs
N = int(N)
assert mode in ('exact', 'fft'), "Mode must be either 'exact' or 'fft'."
# Generate index grids
x, y = np.indices((N, N))
# Determine center
if center is not None:
x0, y0 = map(int, center)
else:
if mode == 'fft' and N % 2 == 0:
x0 = N / 2.
y0 = N / 2.
else:
x0 = (N - 1) / 2.
y0 = (N - 1) / 2.
# Compute radii
return np.hypot(x - x0, y - y0)
| 0 | 0 | 0 |
e02fae9ac7476c2c210c7b0dfbe9721cf2ee01de | 4,450 | py | Python | test/models/test_general.py | HansBug/pyspj | ed776cf7d2d1766ee4c2152221d1d3dbdd18d93a | [
"Apache-2.0"
] | null | null | null | test/models/test_general.py | HansBug/pyspj | ed776cf7d2d1766ee4c2152221d1d3dbdd18d93a | [
"Apache-2.0"
] | null | null | null | test/models/test_general.py | HansBug/pyspj | ed776cf7d2d1766ee4c2152221d1d3dbdd18d93a | [
"Apache-2.0"
] | null | null | null | import pytest
from pyspj.models import load_result, SimpleSPJResult, ContinuitySPJResult, ResultType
@pytest.mark.unittest
| 52.352941 | 114 | 0.644944 | import pytest
from pyspj.models import load_result, SimpleSPJResult, ContinuitySPJResult, ResultType
@pytest.mark.unittest
class TestModelsGeneral:
def test_simple(self):
result = SimpleSPJResult(True, '123', '12345')
assert load_result(result) == result
assert load_result(result.to_json()) == result
assert load_result((True,)) == SimpleSPJResult(True, )
assert load_result(True) == SimpleSPJResult(True, )
assert load_result(None) == SimpleSPJResult(False, )
assert load_result((True, '123')) == SimpleSPJResult(True, '123')
assert load_result((True, '123', '12345')) == result
def test_continuity(self):
result = ContinuitySPJResult(True, 0.5, '123', '12345')
assert load_result(result) == result
assert load_result(result.to_json()) == result
assert load_result(((True, 0.5),)) == ContinuitySPJResult(True, 0.5)
assert load_result(((True, 0.5), '123')) == ContinuitySPJResult(True, 0.5, '123')
assert load_result(((True, 0.5), '123', '12345')) == result
def test_simple_force(self):
result = SimpleSPJResult(True, '123', '12345')
assert load_result(result, 'simple') == result
assert load_result(result.to_json(), 'simple') == result
assert load_result((True,), 'simple') == SimpleSPJResult(True, )
assert load_result(True, 'simple') == SimpleSPJResult(True, )
assert load_result(None, 'simple') == SimpleSPJResult(False, )
assert load_result((True, '123'), 'simple') == SimpleSPJResult(True, '123')
assert load_result((True, '123', '12345'), 'simple') == result
result = ContinuitySPJResult(True, 0.5, '123', '12345')
assert load_result(result, 'simple') == SimpleSPJResult(True, '123', '12345')
assert load_result(result.to_json(), 'simple') == SimpleSPJResult(True, '123', '12345')
assert load_result(((True, 0.5),), 'simple') == SimpleSPJResult(True)
assert load_result(((True, 0.5), '123'), 'simple') == SimpleSPJResult(True, '123')
assert load_result(((True, 0.5), '123', '12345'), 'simple') == SimpleSPJResult(True, '123', '12345')
def test_continuity_force(self):
result = SimpleSPJResult(True, '123', '12345')
assert load_result(result, 'continuity') == ContinuitySPJResult(True, 0.0, '123', '12345')
assert load_result(result.to_json(), 'continuity') == ContinuitySPJResult(True, 0.0, '123', '12345')
assert load_result((True,), 'continuity') == ContinuitySPJResult(True, 0.0)
assert load_result(True, 'continuity') == ContinuitySPJResult(True, 0.0)
assert load_result(None, 'continuity') == ContinuitySPJResult(False, 0.0, )
assert load_result((True, '123'), 'continuity') == ContinuitySPJResult(True, 0.0, '123')
assert load_result((True, '123', '12345'), 'continuity') == ContinuitySPJResult(True, 0.0, '123', '12345')
result = ContinuitySPJResult(True, 0.5, '123', '12345')
assert load_result(result, 'continuity') == result
assert load_result(result.to_json(), 'continuity') == result
assert load_result(((True, 0.5),), 'continuity') == ContinuitySPJResult(True, 0.5)
assert load_result(((True, 0.5), '123'), 'continuity') == ContinuitySPJResult(True, 0.5, '123')
assert load_result(((True, 0.5), '123', '12345'), 'continuity') == result
def test_invalid(self):
with pytest.raises(ValueError):
assert load_result(())
with pytest.raises(ValueError):
assert load_result((1, 2, 3, 4))
def test_result_type(self):
assert ResultType.loads(ResultType.FREE) == ResultType.FREE
assert ResultType.loads(ResultType.SIMPLE) == ResultType.SIMPLE
assert ResultType.loads(ResultType.CONTINUITY) == ResultType.CONTINUITY
assert ResultType.loads('free') == ResultType.FREE
assert ResultType.loads('simple') == ResultType.SIMPLE
assert ResultType.loads('continuity') == ResultType.CONTINUITY
with pytest.raises(KeyError):
ResultType.loads('sdkfjlsd')
assert ResultType.loads(0) == ResultType.FREE
assert ResultType.loads(1) == ResultType.SIMPLE
assert ResultType.loads(2) == ResultType.CONTINUITY
with pytest.raises(KeyError):
ResultType.loads(-100)
with pytest.raises(TypeError):
ResultType.loads([])
| 4,138 | 3 | 183 |
a4dfbe8bf3c713b2f61de253c0f1eb25783ea050 | 2,905 | py | Python | rl_games/algos_torch/model_builder.py | yzqin/rl_games | 6e09fec1e60d70c1dc1934ec65ed3265950a8c34 | [
"MIT"
] | null | null | null | rl_games/algos_torch/model_builder.py | yzqin/rl_games | 6e09fec1e60d70c1dc1934ec65ed3265950a8c34 | [
"MIT"
] | null | null | null | rl_games/algos_torch/model_builder.py | yzqin/rl_games | 6e09fec1e60d70c1dc1934ec65ed3265950a8c34 | [
"MIT"
] | null | null | null | from rl_games.common import object_factory
import rl_games.algos_torch
from rl_games.algos_torch import network_builder, pn_network_builder
from rl_games.algos_torch import models
NETWORK_REGISTRY = {}
MODEL_REGISTRY = {}
| 48.416667 | 118 | 0.687091 | from rl_games.common import object_factory
import rl_games.algos_torch
from rl_games.algos_torch import network_builder, pn_network_builder
from rl_games.algos_torch import models
NETWORK_REGISTRY = {}
MODEL_REGISTRY = {}
def register_network(name, target_class):
NETWORK_REGISTRY[name] = lambda **kwargs: target_class()
def register_model(name, target_class):
MODEL_REGISTRY[name] = lambda network, **kwargs: target_class(network)
class NetworkBuilder:
def __init__(self):
self.network_factory = object_factory.ObjectFactory()
self.network_factory.set_builders(NETWORK_REGISTRY)
self.network_factory.register_builder('actor_critic', lambda **kwargs: network_builder.A2CBuilder())
self.network_factory.register_builder('resnet_actor_critic',
lambda **kwargs: network_builder.A2CResnetBuilder())
self.network_factory.register_builder('rnd_curiosity', lambda **kwargs: network_builder.RNDCuriosityBuilder())
self.network_factory.register_builder('soft_actor_critic', lambda **kwargs: network_builder.SACBuilder())
self.network_factory.register_builder('pn_actor_critic', lambda **kwargs: pn_network_builder.A2CPNBuilder())
def load(self, params):
network_name = params['name']
network = self.network_factory.create(network_name)
network.load(params)
return network
class ModelBuilder:
def __init__(self):
self.model_factory = object_factory.ObjectFactory()
self.model_factory.set_builders(MODEL_REGISTRY)
self.model_factory.register_builder('discrete_a2c', lambda network, **kwargs: models.ModelA2C(network))
self.model_factory.register_builder('multi_discrete_a2c',
lambda network, **kwargs: models.ModelA2CMultiDiscrete(network))
self.model_factory.register_builder('continuous_a2c',
lambda network, **kwargs: models.ModelA2CContinuous(network))
self.model_factory.register_builder('continuous_a2c_logstd',
lambda network, **kwargs: models.ModelA2CContinuousLogStd(network))
self.model_factory.register_builder('soft_actor_critic',
lambda network, **kwargs: models.ModelSACContinuous(network))
self.model_factory.register_builder('central_value',
lambda network, **kwargs: models.ModelCentralValue(network))
self.network_builder = NetworkBuilder()
def get_network_builder(self):
return self.network_builder
def load(self, params):
model_name = params['model']['name']
network = self.network_builder.load(params['network'])
model = self.model_factory.create(model_name, network=network)
return model
| 2,457 | -2 | 225 |
0a0173adfbd6865ffe8cdcab826239cb84836335 | 3,408 | py | Python | win/devkit/other/pymel/extras/completion/py/pymel/util/mathutils.py | leegoonz/Maya-devkit | b81fe799b58e854e4ef16435426d60446e975871 | [
"ADSL"
] | 10 | 2018-03-30T16:09:02.000Z | 2021-12-07T07:29:19.000Z | win/devkit/other/pymel/extras/completion/py/pymel/util/mathutils.py | leegoonz/Maya-devkit | b81fe799b58e854e4ef16435426d60446e975871 | [
"ADSL"
] | null | null | null | win/devkit/other/pymel/extras/completion/py/pymel/util/mathutils.py | leegoonz/Maya-devkit | b81fe799b58e854e4ef16435426d60446e975871 | [
"ADSL"
] | 9 | 2018-06-02T09:18:49.000Z | 2021-12-20T09:24:35.000Z | import math
from __builtin__ import round as _round
def clamp(x=0.0, min=0.0, max=1.0):
"""
Clamps the value x between min and max
:rtype: float
"""
pass
def gamma(c, g):
"""
Gamma color correction of c with a single scalar gamma value g
:rtype: float
"""
pass
def round(value, ndigits=0):
"""
round(number[, ndigits]) -> float
Round a number to a given precision in decimal digits (default 0 digits).
This always returns a floating point number. Precision may be negative.
This builtin function was overloaded in mathutils to work on complex numbers,
in that case rel and imaginary values are rounded separately
"""
pass
def linmap(min, max, x):
"""
Returns the value of a linear remapping function.
performs a linear interpolation between 0 and 1 in the interval min to max,
but does not clamp the range
:rtype: float
"""
pass
def blend(a, b, weight=0.5):
"""
blend(a, b[, weight=0.5]) :
Blends values a and b according to normalized weight w,
returns a for weight == 0.0 and b for weight = 1.0, a*(1.0-weight)+b*weight in between
:rtype: float
"""
pass
def imag(x):
"""
the imaginary part of x
"""
pass
def conjugate(x):
"""
the conjugate part of x
"""
pass
def hermite(x=0.0, v0=0.0, v1=0.0, s0=0.0, s1=0.0):
"""
As the MEL command : This command returns x point along on x hermite curve from the five given control arguments.
The first two arguments are the start and end points of the curve, respectively.
The next two arguments are the tangents of the curve at the start point and end point of the curve, respectively.
The fifth argument, parameter, specifies the point on the hermite curve that is returned by this function.
This parameter is the unitized distance along the curve from the start point to the end point.
A parameter value of 0.0 corresponds to the start point and x parameter value of 1.0 corresponds to the end point of the curve.
:rtype: float
"""
pass
def smoothstep(min, max, x):
"""
Returns the value of a smooth step function.
Returns 0 if x < min, 1 if x > max, and performs a smooth Hermite
interpolation between 0 and 1 in the interval min to max.
:rtype: float
"""
pass
def smoothmap(min, max, x):
"""
Returns the value of a smooth remapping function.
performs a smooth Hermite interpolation between 0 and 1 in the interval min to max,
but does not clamp the range
:rtype: float
"""
pass
def hermiteInterp(x=0.0, y0=0.0, y1=1.0, s0=0.0, s1=0.0):
"""
Hermite interpolation of x between points y0 and y1 of tangent slope s0 and s1
:rtype: float
"""
pass
def linstep(min, max, x):
"""
Returns the value of a linear step function.
Returns 0 if x < min, 1 if x > max, and performs a linear
interpolation between 0 and 1 in the interval min to max.
:rtype: float
"""
pass
def real(x):
"""
the real part of x
"""
pass
def setRange(x=0.0, oldmin=0.0, oldmax=1.0, newmin=0.0, newmax=1.0):
"""
Resets x range from x linear interpolation of oldmin to oldmax to x linear interpolation from newmin to newmax
:rtype: float
"""
pass
| 21.167702 | 131 | 0.634977 | import math
from __builtin__ import round as _round
def clamp(x=0.0, min=0.0, max=1.0):
"""
Clamps the value x between min and max
:rtype: float
"""
pass
def gamma(c, g):
"""
Gamma color correction of c with a single scalar gamma value g
:rtype: float
"""
pass
def round(value, ndigits=0):
"""
round(number[, ndigits]) -> float
Round a number to a given precision in decimal digits (default 0 digits).
This always returns a floating point number. Precision may be negative.
This builtin function was overloaded in mathutils to work on complex numbers,
in that case rel and imaginary values are rounded separately
"""
pass
def linmap(min, max, x):
"""
Returns the value of a linear remapping function.
performs a linear interpolation between 0 and 1 in the interval min to max,
but does not clamp the range
:rtype: float
"""
pass
def blend(a, b, weight=0.5):
"""
blend(a, b[, weight=0.5]) :
Blends values a and b according to normalized weight w,
returns a for weight == 0.0 and b for weight = 1.0, a*(1.0-weight)+b*weight in between
:rtype: float
"""
pass
def imag(x):
"""
the imaginary part of x
"""
pass
def conjugate(x):
"""
the conjugate part of x
"""
pass
def hermite(x=0.0, v0=0.0, v1=0.0, s0=0.0, s1=0.0):
"""
As the MEL command : This command returns x point along on x hermite curve from the five given control arguments.
The first two arguments are the start and end points of the curve, respectively.
The next two arguments are the tangents of the curve at the start point and end point of the curve, respectively.
The fifth argument, parameter, specifies the point on the hermite curve that is returned by this function.
This parameter is the unitized distance along the curve from the start point to the end point.
A parameter value of 0.0 corresponds to the start point and x parameter value of 1.0 corresponds to the end point of the curve.
:rtype: float
"""
pass
def smoothstep(min, max, x):
"""
Returns the value of a smooth step function.
Returns 0 if x < min, 1 if x > max, and performs a smooth Hermite
interpolation between 0 and 1 in the interval min to max.
:rtype: float
"""
pass
def smoothmap(min, max, x):
"""
Returns the value of a smooth remapping function.
performs a smooth Hermite interpolation between 0 and 1 in the interval min to max,
but does not clamp the range
:rtype: float
"""
pass
def hermiteInterp(x=0.0, y0=0.0, y1=1.0, s0=0.0, s1=0.0):
"""
Hermite interpolation of x between points y0 and y1 of tangent slope s0 and s1
:rtype: float
"""
pass
def linstep(min, max, x):
"""
Returns the value of a linear step function.
Returns 0 if x < min, 1 if x > max, and performs a linear
interpolation between 0 and 1 in the interval min to max.
:rtype: float
"""
pass
def real(x):
"""
the real part of x
"""
pass
def setRange(x=0.0, oldmin=0.0, oldmax=1.0, newmin=0.0, newmax=1.0):
"""
Resets x range from x linear interpolation of oldmin to oldmax to x linear interpolation from newmin to newmax
:rtype: float
"""
pass
| 0 | 0 | 0 |
17a103eeb3bd2216f66db043bd9af26af28e43b4 | 81 | py | Python | app/reader.py | andreiavrammsd/newrelicpy | 677f91024d1475dd90b5520e2c8f6f656b97cca1 | [
"MIT"
] | 1 | 2018-02-01T23:17:10.000Z | 2018-02-01T23:17:10.000Z | app/reader.py | andreiavrammsd/py-newrelic | 677f91024d1475dd90b5520e2c8f6f656b97cca1 | [
"MIT"
] | null | null | null | app/reader.py | andreiavrammsd/py-newrelic | 677f91024d1475dd90b5520e2c8f6f656b97cca1 | [
"MIT"
] | null | null | null | import re
| 13.5 | 40 | 0.555556 | import re
def read(arg: str) -> list:
return re.findall(r'([\d\.]+)', arg)
| 47 | 0 | 23 |
3a1d246ea8433f7464347db25ad117f762fbd461 | 2,238 | py | Python | algorithms/dfs/optimal_account_balancing.py | kevinshenyang07/Data-Structure-and-Algo | 36b02feea04b892f1256de090c4fcf7b6aa98873 | [
"MIT"
] | null | null | null | algorithms/dfs/optimal_account_balancing.py | kevinshenyang07/Data-Structure-and-Algo | 36b02feea04b892f1256de090c4fcf7b6aa98873 | [
"MIT"
] | null | null | null | algorithms/dfs/optimal_account_balancing.py | kevinshenyang07/Data-Structure-and-Algo | 36b02feea04b892f1256de090c4fcf7b6aa98873 | [
"MIT"
] | null | null | null | # Optimal Account Balancing
# given a list of transactions between a group of people, with each transaction
# as a tuple (x, y, z), meaining person x send person y amount z of money
# assume x != y and z > 0, id x and y might not be linear
# return the minimum number of transactions required to settle the debt
# Optimal Account Balancing
# given a list of transactions between a group of people, with each transaction
# as a tuple (x, y, z), meaining person x send person y amount z of money
# assume x != y and z > 0, id x and y might not be linear
# return the minimum number of transactions required to settle the debt
| 39.263158 | 79 | 0.550045 | # Optimal Account Balancing
# given a list of transactions between a group of people, with each transaction
# as a tuple (x, y, z), meaining person x send person y amount z of money
# assume x != y and z > 0, id x and y might not be linear
# return the minimum number of transactions required to settle the debt
# Optimal Account Balancing
# given a list of transactions between a group of people, with each transaction
# as a tuple (x, y, z), meaining person x send person y amount z of money
# assume x != y and z > 0, id x and y might not be linear
# return the minimum number of transactions required to settle the debt
class Solution(object):
def minTransfers(self, transactions):
"""
:type transactions: List[List[int]]
:rtype: int
"""
acc = {}
for p1, p2, amount in transactions:
acc[p1] = acc.get(p1, 0) - amount
acc[p2] = acc.get(p2, 0) + amount
# cancel out balance pairs with equal amount but different sign
# then filter out zero balances
bal = acc.values()
trans = 0
for i in range(len(bal)):
for j in range(i):
if bal[i] * bal[j] != 0 and bal[i] + bal[j] == 0:
bal[i] = bal[j] = 0
trans += 1
break
bal = [b for b in bal if b != 0]
return self.dfs(bal, 0, trans)
# min number of transactions to settle starting from bal[i]
# trans: transactions made so far
def dfs(self, bal, i, trans):
n = len(bal)
# find the next balance that needs to be settled
while i < n and bal[i] == 0:
i += 1
# end condition
if i >= len(bal):
return trans
res = float('inf')
for j in range(i + 1, n):
if bal[i] * bal[j] < 0: # different sign
# a transaction that sets balance at i to 0 (settled)
# and balance at j to bal[j] + bal[i]
# values before bal[i + 1] are virtually 0 and then added back
bal[j] += bal[i]
res = min(res, self.dfs(bal, i + 1, trans + 1))
# rollback
bal[j] -= bal[i]
return res
| 712 | 880 | 22 |
dc770c3c12eb279da2f1f44d53d4f054d75e3568 | 1,664 | py | Python | btemu/install.py | pedersen/nerfgun | 136b2b1448eee3f6cc9e0cb88a31eb9cf321fe49 | [
"MIT"
] | null | null | null | btemu/install.py | pedersen/nerfgun | 136b2b1448eee3f6cc9e0cb88a31eb9cf321fe49 | [
"MIT"
] | null | null | null | btemu/install.py | pedersen/nerfgun | 136b2b1448eee3f6cc9e0cb88a31eb9cf321fe49 | [
"MIT"
] | null | null | null | import logging
import os
import shutil
from importlib.resources import read_text
from subprocess import check_call
import btemu.resources
if __name__ == '__main__':
main()
| 35.404255 | 113 | 0.623798 | import logging
import os
import shutil
from importlib.resources import read_text
from subprocess import check_call
import btemu.resources
def write_resource(fname, resource_module, resource_name):
logging.info(f'(Re)Creating {fname}')
data = read_text(resource_module, resource_name)
f = open(fname, 'w')
f.write(data)
f.close()
def main():
logging.basicConfig(level=logging.DEBUG)
if not os.path.exists('/etc/btemu'):
os.makedirs('/etc/btemu')
for (fname, rname) in [('/etc/btemu/btemu.conf', 'btemu.conf'),
('/etc/btemu/btemu-logging.ini', 'btemu-logging.ini'),
('/etc/dbus-1/system.d/org.thanhle.btkbservice.conf', 'org.thanhle.btkbservice.conf'),
('/lib/systemd/system/btemu-hid.service', 'btemu-hid.service'),
('/lib/systemd/system/btemu-agent.service', 'btemu-agent.service'),
('/lib/systemd/system/bluetooth.service', 'bluetooth.service')]:
write_resource(fname, btemu.resources, rname)
if os.path.exists('/boot/btemu.conf'):
logging.info('Found /boot/btemu.conf, installing it')
shutil.copy('/boot/btemu.conf', '/etc/btemu/btemu.conf')
logging.info('Reloading SystemD State')
check_call(['systemctl', 'daemon-reload'])
daemons = ['bluetooth', 'btemu-hid', 'btemu-agent']
for daemon in daemons:
logging.info(f'Enabling {daemon}')
check_call(['systemctl', 'enable', daemon])
logging.info(f'(Re)Starting {daemon}')
check_call(['systemctl', 'restart', daemon])
if __name__ == '__main__':
main()
| 1,437 | 0 | 46 |
983480b5218ad736d2b4e7dba40780990a35cd85 | 22,652 | py | Python | uliweb/contrib/jsonql/__init__.py | timgates42/uliweb | 80c0459c5e5d257b665eb2e1d0b5f68ad55c42f1 | [
"BSD-2-Clause"
] | 202 | 2015-01-12T08:10:48.000Z | 2021-11-08T09:04:32.000Z | uliweb/contrib/jsonql/__init__.py | timgates42/uliweb | 80c0459c5e5d257b665eb2e1d0b5f68ad55c42f1 | [
"BSD-2-Clause"
] | 30 | 2015-01-01T09:07:17.000Z | 2021-06-03T12:58:45.000Z | uliweb/contrib/jsonql/__init__.py | timgates42/uliweb | 80c0459c5e5d257b665eb2e1d0b5f68ad55c42f1 | [
"BSD-2-Clause"
] | 58 | 2015-01-12T03:28:54.000Z | 2022-01-14T01:58:08.000Z | #coding=utf8
from uliweb import settings
from uliweb.utils.common import safe_str, import_attr
from uliweb.utils.storage import Storage
from uliweb.orm import do_, get_model
from uliweb.utils.sorteddict import SortedDict
from sqlalchemy import __version__ as sa_version, select, true, text, literal
import logging
DEBUG = False
__schemas__ = {}
__relations__ = None
__default_limit__ = 10
log = logging.getLogger(__name__)
__relations__ = Relations()
def get_relation_condition(key):
"""
Get relation condition
:param key: should be (schema_a, schema_b)
:return:
"""
global __relations__
return __relations__.get_condition(key)
def query(d):
"""
Query schema
:param d: dict options
:return:
"""
q = Query(d)
return q.run()
| 33.360825 | 136 | 0.50181 | #coding=utf8
from uliweb import settings
from uliweb.utils.common import safe_str, import_attr
from uliweb.utils.storage import Storage
from uliweb.orm import do_, get_model
from uliweb.utils.sorteddict import SortedDict
from sqlalchemy import __version__ as sa_version, select, true, text, literal
import logging
DEBUG = False
__schemas__ = {}
__relations__ = None
__default_limit__ = 10
log = logging.getLogger(__name__)
class ModelError(Exception): pass
class SchemaError(Exception): pass
class RelationError(Exception): pass
class Type(object):
creation_counter = 1
def __init__(self, type='str', label='', field_name=None, **kwargs):
self.type = type
self.label = label
self.field_name = field_name
self.name = None
self.kwargs = kwargs
self.creation_counter = Type.creation_counter
Type.creation_counter += 1
class SchemaMetaClass(type):
def __init__(cls, name, bases, dct):
super(SchemaMetaClass, cls).__init__(name, bases, dct)
if name == 'Schema':
return
cls.properties = {}
cls._fields_list = []
cls._collection_names = {}
cls._bind()
for attr_name in dct.keys():
attr = dct[attr_name]
if isinstance(attr, Type):
attr.name = attr_name
cls.properties[attr_name] = attr
fields_list = [(k, v) for k, v in cls.properties.items()]
fields_list.sort(lambda x, y: cmp(x[1].creation_counter, y[1].creation_counter))
cls._fields_list = [k for k,v in fields_list]
cls._bind_query()
def reflect_column(column):
type_name = column.type.__class__.__name__.lower()
kwargs = SortedDict()
field_type = type_name
if type_name in ('char', 'varchar'):
kwargs['max_length'] = column.type.length
elif type_name in ('text', 'blob', 'integer', 'float', 'bigint'):
pass
elif type_name == 'long':
field_type = 'bigint'
elif type_name in ('clob',):
field_type = 'text'
elif type_name in ('decimal', 'float'):
kwargs['precision'] = column.type.precision
kwargs['scale'] = column.type.scale
elif type_name == 'raw': # oracle
field_type = 'binary'
kwargs['max_length'] = column_type.length
elif type_name == 'number':
if column.type.scale:
kwargs['precision'] = column.type.precision
kwargs['scale'] = column.type.scale
field_type = 'decimal'
else:
field_type = 'int'
elif type_name == 'numeric':
field_type = 'decimal'
kwargs['precision'] = column.type.precision
kwargs['scale'] = column.type.scale
elif type_name in ('timestamp',):
field_type = 'timestamp'
elif type_name in ('datetime', 'date', 'time'):
pass
# for tinyint will be treated as bool
elif type_name in ('tinyint', 'boolean'):
field_type = 'bool'
else:
raise ValueError("Don't support column [{0}] for type [{1}] when parsing {2}".format(column.name, type_name, column.table.name))
if sa_version >= '1.2' and column.comment:
kwargs['label'] = column.comment
if not kwargs.get('label'):
kwargs['label'] = column.name
return field_type, kwargs
class Schema(object):
__metaclass__ = SchemaMetaClass
__model__ = None #Model name
__table__ = None #table name
__fields__ = []
__query__ = None
@classmethod
def __repr__(cls):
d = []
d.append('{}{{'.format(cls.__name__))
for name in cls._fields_list:
f = cls.properties[name]
field_name = ''
if f.field_name:
field_name = ' ,field_name={}'.format(f.field_name)
d.append(' {}(type=\'{}\', label=\'{}\'{})'.format(f.name, f.type, safe_str(f.label), field_name))
d.append('}')
return '\n'.join(d)
@classmethod
def _bind(cls):
from uliweb.orm import reflect_table
if not cls.__table__ is not None and cls.__model__:
model = get_model(cls.__model__)
if not model:
raise ModelError('Model {} can not be found'.format(cls.__model__))
cls.__table__ = model.table
if cls.__table__ is not None:
cls.__table__ = reflect_table(cls.__table__)
for f in (cls.__fields__ or cls.__table__.columns.keys()):
col = cls.__table__.columns.get(f)
if col is not None:
field_type, kwargs = reflect_column(col)
field = Type(field_type, **kwargs)
field.name = f
cls.properties[f] = field
else:
raise FieldError('Field {} can not be found in table {}'.format(f, cls.table.name))
@classmethod
def _bind_query(cls):
if cls.__table__ is not None and not cls.__query__:
fields = []
for f in cls.properties.values():
name = f.field_name or f.name
col = cls.__table__.columns.get(name)
if col is not None:
fields.append(col)
cls.__query__ = select(fields, from_obj=[cls.__table__])
@classmethod
def get_column(cls, name):
alias = ''
if ':' in name:
name, alias = [x.strip() for x in name.split(':')]
col = cls.__table__.columns.get(name)
if col is None:
if alias:
col = text(name + ' as ' + alias)
else:
col = text(name)
else:
if alias:
col = col.label(alias)
return col
class Relation(object):
def __init__(self, relation):
self._schema_a = None # schema class
self._schema_b = None
self._schema_a_name = None
self._schema_b_name = None
self._fields_a = set()
self._fields_b = set()
self.relation_key = None # saving [(schema_a, schema_b), (schema_b, schema_a)]
self.cached = {}
if not isinstance(relation, (tuple, list)):
relation = [relation]
for v in relation:
t1, t2 = [x.strip() for x in v.split('=')]
schema_a_name, field_a_name = t1.split('.')
schema_b_name, field_b_name = t2.split('.')
key = (schema_a_name, schema_b_name)
if self.relation_key and key not in self.relation_key:
raise RelationError('Relation {!r} is not matched with before value {!r}'.format(
key, self.relation_key))
self._schema_a_name = schema_a_name
self._schema_b_name = schema_b_name
self.relation_key = [key, (schema_b_name, schema_a_name)]
self._fields_a.add((field_a_name, field_b_name))
self._fields_b.add((field_b_name, field_a_name))
@property
def schema_a(self):
if not self._schema_a:
self._schema_a = get_schema(self._schema_a_name)
return self._schema_a
@property
def schema_b(self):
if not self._schema_b:
self._schema_b = get_schema(self._schema_b_name)
return self._schema_b
def __eq__(self, key):
"""
:param key: (schema_a, schema_b)
:return:
"""
return key in self.relation_key
def get_condition(self, key):
condition = None
a, b = key
if not self == key:
return condition
condition = self.cached.get(key)
if not condition:
condition = true()
if a == self._schema_a_name:
for fa, fb in self._fields_a:
condition = (self.schema_a.get_column(fa) == self.schema_b.get_column(fb)) & condition
else:
for fb, fa in self._fields_b:
condition = (self.schema_b.get_column(fb) == self.schema_a.get_column(fa)) & condition
self.cached[key] = condition
return condition
class Relations(object):
def __init__(self):
self.relations = {}
def add(self, relation):
"""
relation is a string list, just like:
['User.id = Group.user', 'User.username = Group.username']
:param relation:
:return:
"""
r = Relation(relation)
key = r.relation_key[0]
if key not in self.relations:
self.relations[key] = r
self.relations[r.relation_key[1]]= r
def get_condition(self, relation):
"""
:param relation: (schema_a, schema_b)
:return:
"""
condition = None
r = self.relations.get(relation)
if r:
condition = r.get_condition(relation)
return condition
__relations__ = Relations()
def add_relation(relation):
global __relations__
__relations__.add(relation)
def get_relation_condition(key):
"""
Get relation condition
:param key: should be (schema_a, schema_b)
:return:
"""
global __relations__
return __relations__.get_condition(key)
def get_schema(name, exception=True):
global __schemas__
s = __schemas__.get(name)
if not s and exception:
raise SchemaError('Schema {} can not be found in settings.'.format(name))
return s
class Query(object):
def __init__(self, data):
self.data = data
def run(self):
data = {}
for name, param in self.data.items():
k, result = self.query_schema(name, param)
data[k] = result
return data
def parse_entry(self, name):
"""
Parse query entry name, just like:
{
'User[]:user'
}
'User[]:user' is an entry name.
:param name:
:return:
"""
# calculate schema mode
# if ':name' or '' or '[]:name' or '[]' found, it'll be treat as multiple Schema query
alias = name
if ':' in name:
name, alias = name.split(':')
if name.endswith('[]'):
need_list = True
name = name[:-2]
else:
need_list = False
return alias, name, need_list
def query_schema(self, name, param):
"""
If name includes '[]', then it'll return a list
:param name: schema name
:param param: json parameters
:return:
"""
alias, name, need_list = self.parse_entry(name)
if not name:
result = self.process_multiple_query(need_list, param)
else:
result = self.process_single_query(name, need_list, param)
return alias, result
def parse_condition(self, schema, name, v):
"""
Parse name = 'value' to condition
:param name: column name
:param schema: schema name
:param v: column value
:return:
"""
S = schema
col = S.get_column(name)
condition = None
if col is not None: # can create condition
if isinstance(v, (str, unicode)):
if v.startswith('>='):
condition = (col >= eval(v[2:].strip()))
elif v.startswith('>'):
condition = (col > eval(v[1:].strip()))
elif v.startswith('<='):
condition = (col <= eval(v[2:].strip()))
elif v.startswith('<'):
condition = (col < eval(v[1:].strip()))
elif v.startswith('='):
condition = (col == eval(v[1:].strip()))
elif v.startswith('!='):
condition = (col != eval(v[2:].strip()))
elif v.startswith('like'):
condition = col.like(v[4:].strip())
elif v.startswith('between'):
_v = eval(v[7:].strip())
if not isinstance(_v, (tuple, list)):
raise ValueError("Between operation should be a list, but {!r} found".format(v))
condition = (col.between(*_v))
elif v.startswith('in'):
condition = (col.in_(eval(v[2:].strip())))
else:
if '%' in v: # like
condition = col.like(v)
else:
condition = (col == v)
elif isinstance(v, (tuple, list)):
condition = (col.in_(v))
else:
condition = (col == v)
return condition
def parse_param(self, name, param):
"""
Parse schema parameter, it'll return
{
condition
columns
limit
order_by
group_by
total
page
table
name #schema name
}
:param name: schema name
:param param: schema query parameter
:return: dict
"""
S = get_schema(name)
# prepare condition
condition = true()
fields = []
columns = []
columns_param = {}
limit = __default_limit__
order_by = []
group_by = []
total = None
page = 0
table = S.__table__
relation = None
for k, v in param.items():
if k.startswith('@'):
if k == '@columns':
fields = v[:]
elif k == '@limit':
limit = v
elif k == '@page':
page = v
elif k == '@order_by':
if isinstance(v, (str, unicode)):
orders = v.split(',')
else:
orders = v
for c in orders:
if '.' in c:
col_name, dir = c.split('.')
else:
col_name = c
dir = 'asc'
col = S.get_column(col_name)
if dir == 'desc':
order_by.append(col.desc())
else:
order_by.append(col)
elif k == '@group_by':
if isinstance(v, (str, unicode)):
groups = v.split(',')
else:
groups = v
for c in groups:
col = S.get_column(c)
group_by.append(col)
elif k == '@total':
total = v
elif k == '@relation':
relation_key = name, v
relation = get_relation_condition(relation_key)
elif k.startswith('$'): # condition
c = self.parse_condition(S, k[1:], v)
if c is not None:
condition = c & condition
elif isinstance(v, dict): # guest schema
# todo nested schema
# if there is not one row, it'll using left join otherwise using standalone
# query
nested_alias, nested_name, nested_need_list = self.parse_entry(k)
nested_config = self.parse_param(nested_name, value)
if nested_need_list:
# insert resolve function
pass
else:
relation = name, nested_config.name
outerjoin_condition = get_relation_condition(relation)
if outerjoin_condition is None:
raise RelationError("Relation between {!r} can not be found".format(relation))
table.outerjoin(nested_config.table, outerjoin_condition)
condition = nested_config.condition & condition
columns.extend(nested_config.columns)
else:
# columns
if k not in fields:
fields.append(k)
columns.extend([S.get_column(x) for x in fields or S._fields_list]) # used for select
config = Storage({})
config.table = table
config.condition = condition
config.columns = columns
config.columns_param = columns_param
config.total = total
config.limit = limit
config.page = page
config.order_by = order_by
config.group_by = group_by
config.name = name
config.schema = S
config.relation = relation
return config
def parse_multiple_query(self, param):
tables = []
condition = true()
order_by = []
group_by = []
limit = __default_limit__
total = None
page = 0
columns = []
for k, v in param.items():
if isinstance(v, dict): # Schema
c = self.parse_param(k, v)
tables.append(c.table)
columns.extend(c.columns)
condition = c.condition & condition
if c.relation is not None:
condition = c.relation & condition
else:
if k.startswith('@'):
if k == '@limit':
limit = v
elif k == '@page':
page = v
elif k == '@order_by':
if isinstance(v, (str, unicode)):
orders = v.split(',')
else:
orders = v
for c in orders:
if '.' in c:
v = c.split('.')
if len(v) == 3:
schema_name, col_name, dir = v
else:
schema_name, col_name = v
dir = 'asc'
else:
col_name = c
dir = 'asc'
S = get_schema(schema_name)
col = S.get_column(col_name)
if dir == 'desc':
order_by.append(col.desc())
else:
order_by.append(col)
elif k == '@group_by':
if isinstance(v, (str, unicode)):
groups = v.split(',')
else:
groups = v
for c in groups:
if '.' in c:
schema_name, col_name = c.split('.')
S = get_schema(schema_name)
col = S.get_column(col_name)
group_by.append(col)
elif k == '@total':
total = v
config = Storage({})
config.tables = tables
config.condition = condition
config.columns = columns
config.order_by = order_by
config.group_by = group_by
config.page = page
config.limit = limit
config.total = total
return config
def process_multiple_query(self, need_list, param):
config = self.parse_multiple_query(param)
count = 0
query = select(config.columns, config.condition, from_obj=config.tables)
if need_list:
if config.order_by:
query = query.order_by(*config.order_by)
if config.group_by:
query = query.group_by(*config.group_by)
if config.total:
if DEBUG:
log.debug('Query Schema {} Count:'.format(config.name))
log.debug(query.count())
count = do_(query.count()).scalar()
if config.page > 0:
query = query.limit(config.limit).offset((config.page-1)*config.limit)
if DEBUG:
log.debug('Query Schema {}:'.format(config.name))
log.debug(query)
result = {'data': [dict(row) for row in do_(query)]}
if config.total:
result['total'] = count
else:
query = query.limit(1)
if DEBUG:
log.debug('Query Schema {}:'.format(config.name))
result = list(do_(query))
if result:
result = dict(result[0])
else:
result = {}
return result
def process_single_query(self, name, need_list, param):
config = self.parse_param(name, param)
count = 0
query = select(config.columns, config.condition, from_obj=[config.table])
if need_list:
if config.order_by:
query = query.order_by(*config.order_by)
if config.group_by:
query = query.group_by(*config.group_by)
if config.total:
if DEBUG:
log.debug('Query Schema {} Count:'.format(config.name))
log.debug(query.count())
count = do_(query.count()).scalar()
if config.page > 0:
query = query.limit(config.limit).offset((config.page-1)*config.limit)
if DEBUG:
log.debug('Query Schema {}:'.format(config.name))
log.debug(query)
result = {'data': [dict(row) for row in do_(query)]}
if config.total:
result['total'] = count
else:
query = query.limit(1)
if DEBUG:
log.debug('Query Schema {}:'.format(config.name))
result = list(do_(query))
if result:
result = dict(result[0])
else:
result = {}
return result
def query(d):
"""
Query schema
:param d: dict options
:return:
"""
q = Query(d)
return q.run()
def after_init_apps(sender):
global __schemas__, __default_limit__
if 'JSONQL_SCHEMA' in settings:
for name, model_path in settings.JSONQL_SCHEMA.items():
if not model_path: continue
if isinstance(model_path, (str, unicode)):
path = model_path
else:
raise Exception("Schema path should be a string but %r found" % model_path)
__schemas__[name] = import_attr(model_path)
__default_limit__ = settings.JSONQL.get('limit', 10) | 12,993 | 8,540 | 323 |
c3a1eff16ee4d748fa6ad17642207484a4f9e3b0 | 8,317 | py | Python | Bayesian-HM-DenseED/test_BayesianNN_UP.py | zabaras/bayesmultiscale | c2f7d36e8ff08a28e5da0809029143a9dd0e2777 | [
"MIT"
] | null | null | null | Bayesian-HM-DenseED/test_BayesianNN_UP.py | zabaras/bayesmultiscale | c2f7d36e8ff08a28e5da0809029143a9dd0e2777 | [
"MIT"
] | null | null | null | Bayesian-HM-DenseED/test_BayesianNN_UP.py | zabaras/bayesmultiscale | c2f7d36e8ff08a28e5da0809029143a9dd0e2777 | [
"MIT"
] | 1 | 2021-09-20T16:25:43.000Z | 2021-09-20T16:25:43.000Z | """
Stein Variational Gradient Descent for Deep ConvNet on GPU.
Current implementation is mainly using for-loops over model instances.
"""
import torch
import numpy as np
from time import time
from args import args, device
import h5py
import os
from models.model_det import DenseED
from models.Bayesian_model_NN import Bayesian_model_NN
from models.model_train import Bayesian_model_train
from utils.misc import mkdirs, logger
from utils.plot1 import plot_prediction_det1
from utils.plot import plot_prediction_det
from utils.mcs_data_upload import mcs_load_data
import json
import scipy.io as io
import sys
n_out_pixels_train = args.ntrain*128*128
n_out_pixels_test = args.ntest*128*128
dir = './models'
# Bayesian NN
Bayesian_model = torch.load('model_%d.pt'%args.ntrain)
KLE_val = 100
# load data
test_loader = mcs_load_data()
print('Loaded data!')
def test(epoch, logger, test_fixed=None):
"""Evaluate model during training.
Print predictions including 4 rows:
1. target
2. predictive mean
3. error of the above two
4. two sigma of predictive variance
Args:
test_fixed (Tensor): (2, N, *), `test_fixed[0]` is the fixed test input,
`test_fixed[1]` is the corresponding target
"""
Bayesian_model.eval()
mse_test, nlp_test = 0., 0.
mse_test_final = 0.
nlp_test_final = 0.
final_predict = []
mse_test, nlp_test = 0., 0.
final_target_UQ = []
final_predict_UQ = []
nlp_test_val = []
for batch_idx, (input,basis_patch,A_matrix, B_matrix,target_P, q_matrix) in enumerate(test_loader):
input_rr,output_basis,A1_transformed1,B1_transformed, target_pressure, q1_transformed \
= input.float(),basis_patch.float(),A_matrix.float(),B_matrix.float(), target_P.float(), q_matrix.float()
input_rr,output_basis,A1_transformed1,B1_transformed, target_pressure, q1_transformed \
= input_rr.to(device),output_basis.to(device),A1_transformed1.to(device),B1_transformed.to(device), target_pressure.to(device), q1_transformed.to(device)
#================================================================================
tocc = time()
output_basis = output_basis.view(144*args.batchs,1,15,15)
input_rr = input_rr.view(144*args.batchs,1,15,15)
A_app = []
for i in range(args.batchs):
A_torch = A1_transformed1[i,:,:]
A_torch1 = A_torch[:,0:2]
A_torch2 = A_torch[:,2]
A_torch1 = A_torch1.type(torch.LongTensor).to(device)
A_torch_final = torch.sparse.FloatTensor(A_torch1.t(), A_torch2, torch.Size([16384,16384]))
A_app.append(A_torch_final)
A1_transformed = torch.stack(A_app,dim=0).to(device)
#================================================================================
C = io.loadmat(dir+'/matlab_index_save_1.mat')
C = C['basis_save']
C = np.squeeze(C)
X = np.empty((C.shape[0], C[0].shape[0], C[0].shape[1]))
for i in range(X.shape[0]):
X[i] = C[i]
# -1 because of matlab and python
X1 = X.reshape(144,225)-1
#==============
#If tanining un-comment below part
#==============
X2 = np.zeros((144,225))
for i in range(144):
var2 = np.zeros((15,15))
ele = X1[i,0]
for varu in range(15):
var1 = ele+128*(varu)
for vm in range(15):
var2[varu,vm] = var1+vm
var3 = var2.reshape(1,225)
X2[i,:] = var3
X2 = torch.Tensor(X2)
mse, nlp, output, target = Bayesian_model.test_model(A1_transformed, B1_transformed,q1_transformed,input_rr, target_pressure,batch_idx, X2,
size_average=True, out=True)
y_noise_var = (- Bayesian_model.log_beta).exp().mean()
mse_test += mse.item()
nlp_test += nlp.item()
nlp1 = nlp.cpu().detach().numpy()
nlp_test_val.append(nlp1)
final_predict_UQ.append(output)
final_target_UQ.append(target)
ticc = time()
print('total time',ticc-tocc)
save_pred = np.array(final_predict_UQ)
save_tar = np.array(final_target_UQ)
mse_test_final += mse_test
nlp_test_final += nlp_test
nlp_test_val = np.array(nlp_test_val)
return mse_test_final, nlp_test_final, save_pred, save_tar
#==========================================================
#==========================================================
if __name__ == "__main__":
main()
| 37.129464 | 165 | 0.625105 | """
Stein Variational Gradient Descent for Deep ConvNet on GPU.
Current implementation is mainly using for-loops over model instances.
"""
import torch
import numpy as np
from time import time
from args import args, device
import h5py
import os
from models.model_det import DenseED
from models.Bayesian_model_NN import Bayesian_model_NN
from models.model_train import Bayesian_model_train
from utils.misc import mkdirs, logger
from utils.plot1 import plot_prediction_det1
from utils.plot import plot_prediction_det
from utils.mcs_data_upload import mcs_load_data
import json
import scipy.io as io
import sys
n_out_pixels_train = args.ntrain*128*128
n_out_pixels_test = args.ntest*128*128
dir = './models'
# Bayesian NN
Bayesian_model = torch.load('model_%d.pt'%args.ntrain)
KLE_val = 100
# load data
test_loader = mcs_load_data()
print('Loaded data!')
def test(epoch, logger, test_fixed=None):
"""Evaluate model during training.
Print predictions including 4 rows:
1. target
2. predictive mean
3. error of the above two
4. two sigma of predictive variance
Args:
test_fixed (Tensor): (2, N, *), `test_fixed[0]` is the fixed test input,
`test_fixed[1]` is the corresponding target
"""
Bayesian_model.eval()
mse_test, nlp_test = 0., 0.
mse_test_final = 0.
nlp_test_final = 0.
final_predict = []
mse_test, nlp_test = 0., 0.
final_target_UQ = []
final_predict_UQ = []
nlp_test_val = []
for batch_idx, (input,basis_patch,A_matrix, B_matrix,target_P, q_matrix) in enumerate(test_loader):
input_rr,output_basis,A1_transformed1,B1_transformed, target_pressure, q1_transformed \
= input.float(),basis_patch.float(),A_matrix.float(),B_matrix.float(), target_P.float(), q_matrix.float()
input_rr,output_basis,A1_transformed1,B1_transformed, target_pressure, q1_transformed \
= input_rr.to(device),output_basis.to(device),A1_transformed1.to(device),B1_transformed.to(device), target_pressure.to(device), q1_transformed.to(device)
#================================================================================
tocc = time()
output_basis = output_basis.view(144*args.batchs,1,15,15)
input_rr = input_rr.view(144*args.batchs,1,15,15)
A_app = []
for i in range(args.batchs):
A_torch = A1_transformed1[i,:,:]
A_torch1 = A_torch[:,0:2]
A_torch2 = A_torch[:,2]
A_torch1 = A_torch1.type(torch.LongTensor).to(device)
A_torch_final = torch.sparse.FloatTensor(A_torch1.t(), A_torch2, torch.Size([16384,16384]))
A_app.append(A_torch_final)
A1_transformed = torch.stack(A_app,dim=0).to(device)
#================================================================================
C = io.loadmat(dir+'/matlab_index_save_1.mat')
C = C['basis_save']
C = np.squeeze(C)
X = np.empty((C.shape[0], C[0].shape[0], C[0].shape[1]))
for i in range(X.shape[0]):
X[i] = C[i]
# -1 because of matlab and python
X1 = X.reshape(144,225)-1
#==============
#If tanining un-comment below part
#==============
X2 = np.zeros((144,225))
for i in range(144):
var2 = np.zeros((15,15))
ele = X1[i,0]
for varu in range(15):
var1 = ele+128*(varu)
for vm in range(15):
var2[varu,vm] = var1+vm
var3 = var2.reshape(1,225)
X2[i,:] = var3
X2 = torch.Tensor(X2)
mse, nlp, output, target = Bayesian_model.test_model(A1_transformed, B1_transformed,q1_transformed,input_rr, target_pressure,batch_idx, X2,
size_average=True, out=True)
y_noise_var = (- Bayesian_model.log_beta).exp().mean()
mse_test += mse.item()
nlp_test += nlp.item()
nlp1 = nlp.cpu().detach().numpy()
nlp_test_val.append(nlp1)
final_predict_UQ.append(output)
final_target_UQ.append(target)
ticc = time()
print('total time',ticc-tocc)
save_pred = np.array(final_predict_UQ)
save_tar = np.array(final_target_UQ)
mse_test_final += mse_test
nlp_test_final += nlp_test
nlp_test_val = np.array(nlp_test_val)
return mse_test_final, nlp_test_final, save_pred, save_tar
def UP(Bayesian_model, y):
y = torch.tensor(y).to(device)
cond_Ey = y.mean(1)
cond_Eyy = y.pow(2).mean(1)
beta_inv = (- Bayesian_model.log_beta).exp()
print('Noise variances: {}'.format(beta_inv))
y_cond_pred_var = cond_Eyy - cond_Ey ** 2 \
+ beta_inv.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1)
# compute statistics of conditional statistics
return cond_Ey.mean(0), cond_Ey.var(0), \
y_cond_pred_var.mean(0), y_cond_pred_var.var(0)
#==========================================================
def mkdir(path):
if not os.path.exists(path):
os.makedirs(path)
#==========================================================
def main():
args.batchs = 100
print('Start training.........................................................')
tic = time()
#result_plot
mkdir('result_plot')
#results
mkdir('results')
for epoch in range(1):
print ('epoch number .......................................',epoch)
with torch.no_grad():
mse_tot_test, nlp_tot_test, predict_val,target_val = test(epoch, logger)
predict_val = np.array(predict_val)
target_val = np.array(target_val)
nlp_tot_test
print('MSE:',mse_tot_test)
RMSE = np.sqrt(mse_tot_test/n_out_pixels_test)
print('RMSE:',RMSE)
training_time = time() - tic
print('Finished testing:\n{} epochs\n{} data\n{} samples (SVGD)\n{} seconds'
.format(args.epochs, args.ntrain, args.n_samples, training_time))
final_tar = predict_val
final_pred = target_val
final_mnlp = np.array(nlp_tot_test)
predict_val1 = predict_val.reshape(args.nmc,20,1,128,128)
predict_val1 = np.swapaxes(predict_val1,0,1)
predict_val1 = np.swapaxes(predict_val1,3,4)
target_val1 = target_val.reshape(args.nmc,1,128,128)
target_val1 = np.swapaxes(target_val1,2,3)
hf = h5py.File('predict_val1.hdf5', 'w')
hf.create_dataset('predict', data=predict_val1)
hf.close()
hf = h5py.File('target_val1.hdf5', 'w')
hf.create_dataset('target', data=target_val1)
hf.close()
predict_val2 = torch.Tensor(predict_val1)
y_pred_EE, y_pred_VE, y_pred_EV, y_pred_VV = UP(Bayesian_model, predict_val2)
y_pred_EE = y_pred_EE.cpu().detach().numpy()
y_pred_VE = y_pred_VE.cpu().detach().numpy()
y_pred_EV = y_pred_EV.cpu().detach().numpy()
y_pred_VV = y_pred_VV.cpu().detach().numpy()
two_sigma = 2 * np.sqrt(y_pred_VV)
two_sigma_VE = 2 * np.sqrt(y_pred_VE)
actual_mean = np.mean(target_val1,axis=0)
actual_mean = actual_mean.reshape(128,128)
actual_var = np.var(target_val1,axis=0)
actual_var = actual_var.reshape(128,128)
target1 = actual_mean.reshape(1,128,128)
mean_predict1 = y_pred_EE.reshape(1,128,128)
std_predict1 = two_sigma_VE.reshape(1,128,128)
plot_prediction_det(target1, mean_predict1, std_predict1, args.ntrain, KLE_val, plot_fn='imshow')
target1v = actual_var.reshape(1,128,128)
mean_predict1v = y_pred_EV.reshape(1,128,128)
std_predict1v = two_sigma.reshape(1,128,128)
plot_prediction_det1(target1v, mean_predict1v, std_predict1v, args.ntrain, KLE_val, plot_fn='imshow')
#Save files
io.savemat('./result_plot/target1.mat', dict([('target1',np.array(target1))]))
io.savemat('./result_plot/mean_predict1.mat', dict([('mean_predict1',np.array(mean_predict1))]))
io.savemat('./result_plot/std_predict1.mat', dict([('std_predict1',np.array(std_predict1))]))
io.savemat('./result_plot/target1v.mat', dict([('target1v',np.array(target1v))]))
io.savemat('./result_plot/mean_predict1v.mat', dict([('mean_predict1v',np.array(mean_predict1v))]))
io.savemat('./result_plot/std_predict1v.mat', dict([('std_predict1v',np.array(std_predict1v))]))
if __name__ == "__main__":
main()
| 3,659 | 0 | 68 |
9cd182a154ad67bc80a62c610648170da0218fab | 47,302 | py | Python | src/action_handlers/manage_bill_handler.py | ZAFW/whopaybot | a2d51de69223efc10ae09f0a70962b8397a5dd93 | [
"MIT"
] | null | null | null | src/action_handlers/manage_bill_handler.py | ZAFW/whopaybot | a2d51de69223efc10ae09f0a70962b8397a5dd93 | [
"MIT"
] | null | null | null | src/action_handlers/manage_bill_handler.py | ZAFW/whopaybot | a2d51de69223efc10ae09f0a70962b8397a5dd93 | [
"MIT"
] | null | null | null | from action_handlers.action_handler import ActionHandler, Action
from telegram.inlinekeyboardmarkup import InlineKeyboardMarkup
from telegram.inlinekeyboardbutton import InlineKeyboardButton
from telegram.ext import Filters
from telegram.parsemode import ParseMode
from telegram.error import BadRequest
import constants as const
import utils
import datetime
import logging
import counter
import math
import random
MODULE_ACTION_TYPE = const.TYPE_MANAGE_BILL
ACTION_GET_MANAGE_BILL = 0
ACTION_GET_MANAGE_BILL_KB = 1
ACTION_SHARE_BILL = 2
ACTION_CALCULATE_SPLIT = 3
ACTION_REFRESH_BILL = 4
ACTION_SEND_DEBTS_BILL_ADMIN = 5
ACTION_GET_CONFIRM_PAYMENTS_KB = 6
ACTION_CONFIRM_BILL_PAYMENT = 7
ACTION_SEND_DEBTS_BILL = 8
ACTION_SEND_BILL = 9
ACTION_SHARE_BILL_ITEM = 10
ACTION_SHARE_ALL_ITEMS = 11
ACTION_GET_SHARE_ITEMS_KB = 12
ACTION_GET_PAY_ITEMS_KB = 13
ACTION_PAY_DEBT = 14
ACTION_GET_INSPECT_BILL_KB = 15
ACTION_GET_FORCE_CONFIRM_PAYMENTS_KB = 16
ACTION_FORCE_CONFIRM_PAYMENT = 17
ACTION_ADD_SOMEONE = 18
ERROR_ITEMS_NOT_SHARED = "The bill cannot be split because the following items are not shared:\n{}"
REQUEST_CALC_SPLIT_CONFIRMATION = "You are about to calculate the splitting of the bill. Once this is done, no new person can be added to the bill anymore. Do you wish to continue? Reply /yes or /no."
ERROR_INVALID_CONTACT = "Sorry, invalid Contact or name sent. Name can only be 250 characters long. Please try again."
REQUEST_PAY_CONFIRMATION = "You are about to confirm <b>{}'s</b> payment of {}{:.2f}. This action is irreversible. Do you wish to continue? Reply /yes or /no."
REQUEST_FORCE_PAY_CONFIRMATION = "You are about to forcibly confirm <b>{}'s</b> payment of {}{:.2f}. This person has not indicated payment yet. This action is irreversible. Do you wish to continue? Reply /yes or /no."
REQUEST_CONTACT = "Please send me the <b>Contact</b> or name of the person. However, this person might <b>not</b> be able to indicate payment for this bill later on. You will have to force confirm his/her payment. To stop this, reply /no."
YES_WITH_QUOTES = "'yes'"
YES = 'yes'
NO_WITH_QUOTES = "'no'"
NO = 'no'
| 35.998478 | 239 | 0.586571 | from action_handlers.action_handler import ActionHandler, Action
from telegram.inlinekeyboardmarkup import InlineKeyboardMarkup
from telegram.inlinekeyboardbutton import InlineKeyboardButton
from telegram.ext import Filters
from telegram.parsemode import ParseMode
from telegram.error import BadRequest
import constants as const
import utils
import datetime
import logging
import counter
import math
import random
MODULE_ACTION_TYPE = const.TYPE_MANAGE_BILL
ACTION_GET_MANAGE_BILL = 0
ACTION_GET_MANAGE_BILL_KB = 1
ACTION_SHARE_BILL = 2
ACTION_CALCULATE_SPLIT = 3
ACTION_REFRESH_BILL = 4
ACTION_SEND_DEBTS_BILL_ADMIN = 5
ACTION_GET_CONFIRM_PAYMENTS_KB = 6
ACTION_CONFIRM_BILL_PAYMENT = 7
ACTION_SEND_DEBTS_BILL = 8
ACTION_SEND_BILL = 9
ACTION_SHARE_BILL_ITEM = 10
ACTION_SHARE_ALL_ITEMS = 11
ACTION_GET_SHARE_ITEMS_KB = 12
ACTION_GET_PAY_ITEMS_KB = 13
ACTION_PAY_DEBT = 14
ACTION_GET_INSPECT_BILL_KB = 15
ACTION_GET_FORCE_CONFIRM_PAYMENTS_KB = 16
ACTION_FORCE_CONFIRM_PAYMENT = 17
ACTION_ADD_SOMEONE = 18
ERROR_ITEMS_NOT_SHARED = "The bill cannot be split because the following items are not shared:\n{}"
REQUEST_CALC_SPLIT_CONFIRMATION = "You are about to calculate the splitting of the bill. Once this is done, no new person can be added to the bill anymore. Do you wish to continue? Reply /yes or /no."
ERROR_INVALID_CONTACT = "Sorry, invalid Contact or name sent. Name can only be 250 characters long. Please try again."
REQUEST_PAY_CONFIRMATION = "You are about to confirm <b>{}'s</b> payment of {}{:.2f}. This action is irreversible. Do you wish to continue? Reply /yes or /no."
REQUEST_FORCE_PAY_CONFIRMATION = "You are about to forcibly confirm <b>{}'s</b> payment of {}{:.2f}. This person has not indicated payment yet. This action is irreversible. Do you wish to continue? Reply /yes or /no."
REQUEST_CONTACT = "Please send me the <b>Contact</b> or name of the person. However, this person might <b>not</b> be able to indicate payment for this bill later on. You will have to force confirm his/her payment. To stop this, reply /no."
YES_WITH_QUOTES = "'yes'"
YES = 'yes'
NO_WITH_QUOTES = "'no'"
NO = 'no'
class BillManagementHandler(ActionHandler):
def __init__(self):
super().__init__(MODULE_ACTION_TYPE)
def execute(self, bot, update, trans, action_id,
subaction_id=0, data=None):
action = None
if action_id == ACTION_SEND_BILL:
action = SendBill()
if action_id == ACTION_GET_MANAGE_BILL:
action = SendCompleteBill()
if action_id == ACTION_REFRESH_BILL:
action = RefreshBill()
if action_id == ACTION_CALCULATE_SPLIT:
action = CalculateBillSplit()
if action_id == ACTION_GET_CONFIRM_PAYMENTS_KB:
action = DisplayConfirmPaymentsKB()
if action_id == ACTION_CONFIRM_BILL_PAYMENT:
action = ConfirmPayment()
if action_id == ACTION_SHARE_BILL_ITEM:
action = ShareBillItem()
if action_id == ACTION_SHARE_ALL_ITEMS:
action = ShareAllItems()
if action_id == ACTION_GET_MANAGE_BILL_KB:
action = DisplayManageBillKB()
if action_id == ACTION_GET_SHARE_ITEMS_KB:
action = DisplayShareItemsKB()
if action_id == ACTION_PAY_DEBT:
action = PayDebt()
if action_id == ACTION_FORCE_CONFIRM_PAYMENT:
action = ForceConfirmPayment()
if action_id == ACTION_GET_FORCE_CONFIRM_PAYMENTS_KB:
action = DisplayForceConfirmPaymentsKB()
if action_id == ACTION_ADD_SOMEONE:
action = AddSomeone()
action.execute(bot, update, trans, subaction_id, data)
def execute_yes(self, bot, update, trans, action_id,
subaction_id=0, data=None):
action = None
if action_id == ACTION_CONFIRM_BILL_PAYMENT:
action = ConfirmPayment()
if action_id == ACTION_CALCULATE_SPLIT:
action = CalculateBillSplit()
if action_id == ACTION_FORCE_CONFIRM_PAYMENT:
action = ForceConfirmPayment()
action.yes(bot, update, trans, subaction_id, data)
def execute_no(self, bot, update, trans, action_id,
subaction_id=0, data=None):
action = None
if action_id == ACTION_CONFIRM_BILL_PAYMENT:
action = ConfirmPayment()
if action_id == ACTION_CALCULATE_SPLIT:
action = CalculateBillSplit()
if action_id == ACTION_FORCE_CONFIRM_PAYMENT:
action = ForceConfirmPayment()
if action_id == ACTION_ADD_SOMEONE:
action = AddSomeone()
action.no(bot, update, trans, subaction_id, data)
class SendBill(Action):
ACTION_SEND_BILL = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_SEND_BILL)
def execute(self, bot, update, trans, subaction_id=0, data=None):
bill_id = data.get(const.JSON_BILL_ID)
msg = update.message
__, __, __, is_closed = trans.get_bill_gen_info(bill_id)
if is_closed is None:
text, pm, kb = SendCompleteBill.get_appropriate_response(
bill_id, msg.from_user.id, trans
)
bot.sendMessage(
text=text,
chat_id=msg.chat_id,
parse_mode=pm,
reply_markup=kb
)
else:
return SendDebtsBill().execute(bot, update, trans, 0, data)
class SendCompleteBill(Action):
ACTION_MANAGE_BILL = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_GET_MANAGE_BILL)
def execute(self, bot, update, trans, subaction_id=0, data=None):
if subaction_id == self.ACTION_MANAGE_BILL:
cbq = update.callback_query
bill_id = data.get(const.JSON_BILL_ID)
__, owner_id, __, is_closed = trans.get_bill_gen_info(bill_id)
if is_closed is not None:
return
self.send_bill_response(bot, cbq, bill_id, trans)
@staticmethod
def get_appropriate_response(bill_id, user_id, trans):
text, pm = utils.get_complete_bill_text(bill_id, trans)
kb = None
__, owner_id, __, is_closed = trans.get_bill_gen_info(bill_id)
if user_id == owner_id:
kb = DisplayManageBillKB.get_manage_bill_keyboard(
bill_id, trans
)
else:
kb = DisplayShareItemsKB.get_share_items_keyboard(
bill_id, trans, user_id
)
return text, pm, kb
def send_bill_response(self, bot, cbq, bill_id, trans):
try:
chat_id = cbq.message.chat_id
text, pm, kb = self.get_appropriate_response(
bill_id, cbq.from_user.id, trans
)
trans.reset_session(chat_id, cbq.from_user.id)
cbq.answer()
cbq.edit_message_text(
text=text,
parse_mode=pm,
reply_markup=kb
)
except BadRequest as e:
print(e)
except Exception as e:
logging.exception('SendCompleteBill')
class DisplayManageBillKB(Action):
ACTION_DISPLAY_NEW_BILL_KB = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_GET_MANAGE_BILL_KB)
def execute(self, bot, update, trans, subaction_id, data=None):
has_rights, chat_id, text = evaluate_rights(update, trans, data)
if not has_rights:
if chat_id is not None:
if update.callback_query is not None:
update.callback_query.answer()
bot.sendMessage(
chat_id=chat_id,
text=text
)
return
if subaction_id == self.ACTION_DISPLAY_NEW_BILL_KB:
cbq = update.callback_query
bill_id = data.get(const.JSON_BILL_ID)
return cbq.edit_message_reply_markup(
reply_markup=self.get_manage_bill_keyboard(bill_id, trans)
)
@staticmethod
def get_manage_bill_keyboard(bill_id, trans):
bill_name, __, __, __ = trans.get_bill_gen_info(bill_id)
share_btn = InlineKeyboardButton(
text="📮 Share Bill for Collaboration",
switch_inline_query=bill_name
)
refresh_btn = InlineKeyboardButton(
text="🔄 Refresh Bill",
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_REFRESH_BILL,
{const.JSON_BILL_ID: bill_id}
)
)
share_items = InlineKeyboardButton(
text="🙋 Add yourself to Item(s)",
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_GET_SHARE_ITEMS_KB,
{const.JSON_BILL_ID: bill_id}
)
)
share_else_items = InlineKeyboardButton(
text="💁 Add someone to Item(s)",
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_ADD_SOMEONE,
{const.JSON_BILL_ID: bill_id}
)
)
calc_bill_btn = InlineKeyboardButton(
text="⚖ Calculate Split",
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_CALCULATE_SPLIT,
{const.JSON_BILL_ID: bill_id}
)
)
return InlineKeyboardMarkup(
[[share_btn],
[refresh_btn],
[share_items],
[share_else_items],
[calc_bill_btn]]
)
class DisplayShareItemsKB(Action):
ACTION_DISPLAY_SHARE_ITEMS_KB = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_GET_SHARE_ITEMS_KB)
def execute(self, bot, update, trans, subaction_id, data=None):
if subaction_id == self.ACTION_DISPLAY_SHARE_ITEMS_KB:
cbq = update.callback_query
bill_id = data.get(const.JSON_BILL_ID)
kb = self.get_appropriate_keyboard(
bill_id, cbq.from_user.id, trans
)
return cbq.edit_message_reply_markup(reply_markup=kb)
@staticmethod
def get_appropriate_keyboard(bill_id, user_id, trans, proxy_uid=None):
if proxy_uid is None:
proxy_uid = user_id
__, owner_id, __, closed_at = trans.get_bill_gen_info(bill_id)
if owner_id == proxy_uid:
return DisplayShareItemsKB.get_share_items_admin_keyboard(
bill_id, trans, user_id
)
else:
return DisplayShareItemsKB.get_share_items_keyboard(
bill_id, trans, user_id
)
@staticmethod
def get_share_items_keyboard(bill_id, trans, user_id):
keyboard = []
items = trans.get_bill_items(bill_id)
refresh_btn = InlineKeyboardButton(
text='🔄 Refresh',
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_REFRESH_BILL,
{const.JSON_BILL_ID: bill_id}
)
)
keyboard.append([refresh_btn])
for item_id, item_name, __ in items:
if trans.has_bill_share(bill_id, item_id, user_id):
text = "👋 Unshare " + item_name
else:
text = '☝️ Share ' + item_name
item_btn = InlineKeyboardButton(
text=text,
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_SHARE_BILL_ITEM,
{const.JSON_ITEM_ID: item_id,
const.JSON_USER_ID: user_id}
)
)
keyboard.append([item_btn])
text = "🙅 Unshare all items"
for item_id, item_name, __ in items:
if not trans.has_bill_share(bill_id, item_id, user_id):
text = '🙌 Share all items'
break
share_all_btn = InlineKeyboardButton(
text=text,
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_SHARE_ALL_ITEMS,
{const.JSON_BILL_ID: bill_id,
const.JSON_USER_ID: user_id}
)
)
keyboard.append([share_all_btn])
return InlineKeyboardMarkup(keyboard)
@staticmethod
def get_share_items_admin_keyboard(bill_id, trans, user_id):
keyboard = []
items = trans.get_bill_items(bill_id)
for item_id, item_name, __ in items:
if trans.has_bill_share(bill_id, item_id, user_id):
text = "👋 Unshare " + item_name
else:
text = '☝️ Share ' + item_name
item_btn = InlineKeyboardButton(
text=text,
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_SHARE_BILL_ITEM,
{const.JSON_ITEM_ID: item_id,
const.JSON_USER_ID: user_id}
)
)
keyboard.append([item_btn])
text = "🙅 Unshare all items"
for item_id, item_name, __ in items:
if not trans.has_bill_share(bill_id, item_id, user_id):
text = '🙌 Share all items'
break
share_all_btn = InlineKeyboardButton(
text=text,
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_SHARE_ALL_ITEMS,
{const.JSON_BILL_ID: bill_id,
const.JSON_USER_ID: user_id}
)
)
keyboard.append([share_all_btn])
back_btn = InlineKeyboardButton(
text='🔙 Back',
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_GET_MANAGE_BILL_KB,
{const.JSON_BILL_ID: bill_id}
)
)
keyboard.append([back_btn])
return InlineKeyboardMarkup(keyboard)
class DisplayPayItemsKB(Action):
ACTION_DISPLAY_PAY_ITEMS_KB = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_GET_PAY_ITEMS_KB)
def execute(self, bot, update, trans, subaction_id, data=None):
if subaction_id == self.ACTION_DISPLAY_PAY_ITEMS_KB:
cbq = update.callback_query
bill_id = data.get(const.JSON_BILL_ID)
kb = self.get_appropriate_keyboard(
bill_id, cbq.from_user.id, trans
)
return cbq.edit_message_reply_markup(reply_markup=kb)
@staticmethod
def get_appropriate_keyboard(bill_id, user_id, trans):
__, owner_id, __, closed_at = trans.get_bill_gen_info(bill_id)
if owner_id == user_id:
return DisplayPayItemsKB.get_pay_items_admin_keyboard(
bill_id, user_id, trans
)
else:
return DisplayPayItemsKB.get_pay_items_keyboard(
bill_id, user_id, trans
)
@staticmethod
def get_pay_items_keyboard(self, bill_id, user_id, trans):
keyboard = []
keyboard.extend(DisplayPayItemsKB.get_payment_buttons(
bill_id, user_id, trans
))
refresh_btn = InlineKeyboardButton(
text='🔄 Refresh',
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_REFRESH_BILL,
{const.JSON_BILL_ID: bill_id}
)
)
keyboard.append([refresh_btn])
return InlineKeyboardMarkup(keyboard)
@staticmethod
def get_pay_items_admin_keyboard(bill_id, user_id, trans):
keyboard = []
keyboard.extend(DisplayPayItemsKB.get_payment_buttons(
bill_id, user_id, trans
))
back_btn = InlineKeyboardButton(
text='🔙 Back',
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_GET_MANAGE_BILL_KB,
{const.JSON_BILL_ID: bill_id}
)
)
keyboard.append([back_btn])
return InlineKeyboardMarkup(keyboard)
@staticmethod
def get_payment_buttons(bill_id, user_id, trans, debts=None):
kb = []
if debts is None:
debts, __ = utils.calculate_remaining_debt(
bill_id, trans
)
for debt in debts:
text = '💸 Pay '
for debtor in debt['debtors']:
if (debtor['debtor'][0] == user_id and
debtor['status'] == '(Pending)'):
text = '💰 Unpay '
break
credtr = debt['creditor']
refresh_btn = InlineKeyboardButton(
text="🔄 Refresh Bill",
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_REFRESH_BILL,
{const.JSON_BILL_ID: bill_id}
)
)
pay_btn = InlineKeyboardButton(
text=text + utils.format_name(
credtr[3], credtr[1], credtr[2]
),
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_PAY_DEBT,
{const.JSON_BILL_ID: bill_id,
const.JSON_CREDITOR_ID: credtr[0]}
)
)
kb.append([refresh_btn])
kb.append([pay_btn])
return kb
class ShareBillItem(Action):
ACTION_SHARE_ITEM = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_SHARE_BILL_ITEM)
def execute(self, bot, update, trans, subaction_id, data=None):
if subaction_id == self.ACTION_SHARE_ITEM:
print("3. Parsing: " + str(datetime.datetime.now().time()))
cbq = update.callback_query
item_id = data.get(const.JSON_ITEM_ID)
bill_id = trans.get_bill_id_of_item(item_id)
__, __, __, is_closed = trans.get_bill_gen_info(bill_id)
if is_closed is not None:
debts, unique_users = utils.calculate_remaining_debt(
bill_id, trans
)
text, pm = utils.format_debts_bill_text(
bill_id, debts, unique_users, trans
)
btns = DisplayPayItemsKB.get_payment_buttons(
bill_id, cbq.from_user.id, trans, debts=debts
)
kb = InlineKeyboardMarkup(btns)
cbq.answer()
return cbq.edit_message_text(
text=text,
parse_mode=pm,
reply_markup=kb
)
user_id = data.get(const.JSON_USER_ID)
if user_id is None:
raise Exception('Missing user_id')
self.share_bill_item(bot, cbq, bill_id, item_id, user_id, trans)
print("7. Sent: " + str(datetime.datetime.now().time()))
counter.Counter.remove_count()
@staticmethod
def share_bill_item(bot, cbq, bill_id, item_id, user_id, trans):
print("4. Toggle share: " + str(datetime.datetime.now().time()))
trans.toggle_bill_share(bill_id, item_id, user_id)
print("5. Toggled: " + str(datetime.datetime.now().time()))
text, pm = utils.get_complete_bill_text(bill_id, trans)
kb = DisplayShareItemsKB.get_appropriate_keyboard(
bill_id, user_id, trans, proxy_uid=cbq.from_user.id
)
print("6. Prepared: " + str(datetime.datetime.now().time()))
cbq.edit_message_text(
text=text,
parse_mode=pm,
reply_markup=kb
)
class ShareAllItems(Action):
ACTION_SHARE_ALL = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_SHARE_ALL_ITEMS)
def execute(self, bot, update, trans, subaction_id, data=None):
if subaction_id == self.ACTION_SHARE_ALL:
print("3. Parsing: " + str(datetime.datetime.now().time()))
cbq = update.callback_query
bill_id = data.get(const.JSON_BILL_ID)
__, __, __, is_closed = trans.get_bill_gen_info(bill_id)
if is_closed is not None:
debts, unique_users = utils.calculate_remaining_debt(
bill_id, trans
)
text, pm = utils.format_debts_bill_text(
bill_id, debts, unique_users, trans
)
btns = DisplayPayItemsKB.get_payment_buttons(
bill_id, cbq.from_user.id, trans, debts=debts
)
kb = InlineKeyboardMarkup(btns)
cbq.answer()
return cbq.edit_message_text(
text=text,
parse_mode=pm,
reply_markup=kb
)
user_id = data.get(const.JSON_USER_ID)
if user_id is None:
raise Exception('Missing user_id')
self.share_all_items(bot, cbq, bill_id, user_id, trans)
print("7. Sent: " + str(datetime.datetime.now().time()))
counter.Counter.remove_count()
def share_all_items(self, bot, cbq, bill_id, user_id, trans):
print("4. Toggle share: " + str(datetime.datetime.now().time()))
trans.toggle_all_bill_shares(bill_id, user_id)
print("5. Toggled: " + str(datetime.datetime.now().time()))
text, pm = utils.get_complete_bill_text(bill_id, trans)
kb = DisplayShareItemsKB.get_appropriate_keyboard(
bill_id, user_id, trans, proxy_uid=cbq.from_user.id
)
print("6. Prepared: " + str(datetime.datetime.now().time()))
cbq.edit_message_text(
text=text,
parse_mode=pm,
reply_markup=kb
)
class RefreshBill(Action):
ACTION_REFRESH_BILL = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_REFRESH_BILL)
def execute(self, bot, update, trans, subaction_id, data=None):
if subaction_id == self.ACTION_REFRESH_BILL:
bill_id = data.get(const.JSON_BILL_ID)
__, __, __, closed_at = trans.get_bill_gen_info(bill_id)
if closed_at is None:
return SendCompleteBill().execute(
bot, update, trans, data=data
)
else:
return self.refresh_debts_bill(update, trans, data)
def refresh_debts_bill(self, update, trans, data):
try:
cbq = update.callback_query
bill_id = data.get(const.JSON_BILL_ID)
txt, pm, kb = SendDebtsBill.get_debts_bill_msg(
bill_id, cbq.from_user.id, trans
)
cbq.answer()
cbq.edit_message_text(
text=txt,
parse_mode=pm,
reply_markup=kb
)
except BadRequest as e:
print(e)
except Exception as e:
logging.exception('RefreshBill')
class CalculateBillSplit(Action):
ACTION_REQUEST_CONFIRMATION = 0
ACTION_PROCESS_SPLIT_BILL = 1
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_CALCULATE_SPLIT)
def execute(self, bot, update, trans, subaction_id, data=None):
has_rights, chat_id, text = evaluate_rights(update, trans, data)
if not has_rights:
if chat_id is not None:
if update.callback_query is not None:
update.callback_query.answer()
bot.sendMessage(
chat_id=chat_id,
text=text
)
return
if subaction_id == self.ACTION_REQUEST_CONFIRMATION:
cbq = update.callback_query
bill_id = data.get(const.JSON_BILL_ID)
unshared_items = self.get_unshared_items(bill_id, trans)
if len(unshared_items) > 0:
return self.reject_incomplete_bill(bot, cbq, unshared_items)
return self.send_confirmation(bot, cbq, bill_id, trans)
def yes(self, bot, update, trans, subaction_id, data=None):
if subaction_id == self.ACTION_PROCESS_SPLIT_BILL:
return self.split_bill(bot, update, trans, data)
def no(self, bot, update, trans, subaction_id, data=None):
if subaction_id == self.ACTION_PROCESS_SPLIT_BILL:
msg = update.message
bill_id = data.get(const.JSON_BILL_ID)
return self.send_manage_bill(
bot, bill_id, msg.chat_id, msg.from_user.id, trans
)
def get_unshared_items(self, bill_id, trans):
items = trans.get_bill_items(bill_id)
items_dict = {}
for idx, item in enumerate(items):
item_id, item_name, item_price = item
items_dict[item_id] = (idx, item_name, item_price)
sharers = trans.get_sharers(bill_id)
for item_id, __, __, __, __ in sharers:
if item_id in items_dict:
del items_dict[item_id]
return items_dict.values()
def reject_incomplete_bill(self, bot, cbq, unshared_items):
formatted_items = [
'<i>{}. {} {}{:.2f}</i>'.format(str(idx + 1), name, const.EMOJI_MONEY_BAG, price)
for idx, name, price in unshared_items
]
bot.sendMessage(
chat_id=cbq.message.chat_id,
text=ERROR_ITEMS_NOT_SHARED.format('\n'.join(formatted_items)),
parse_mode=ParseMode.HTML
)
def send_confirmation(self, bot, cbq, bill_id, trans):
self.set_session(
cbq.message.chat_id,
cbq.from_user,
self.action_type,
self.action_id,
self.ACTION_PROCESS_SPLIT_BILL,
trans,
data={const.JSON_BILL_ID: bill_id}
)
cbq.answer()
bot.sendMessage(
chat_id=cbq.message.chat_id,
text=REQUEST_CALC_SPLIT_CONFIRMATION
)
def send_manage_bill(self, bot, bill_id, chat_id, user_id, trans):
text, pm = utils.get_complete_bill_text(bill_id, trans)
keyboard = DisplayManageBillKB.get_manage_bill_keyboard(bill_id, trans)
trans.reset_session(chat_id, user_id)
bot.sendMessage(
chat_id=chat_id,
text=text,
parse_mode=pm,
reply_markup=keyboard
)
def split_bill(self, bot, update, trans, data):
try:
bill_id = data[const.JSON_BILL_ID]
bill = trans.get_bill_details(bill_id)
taxes = bill['taxes']
tax_amt = 1
for __, __, amt in taxes:
tax_amt *= (1 + amt / 100)
sharers = trans.get_sharers(bill_id)
items = bill['items']
debtors = {}
for item in items:
item_id, title, price = item
item_sharers = []
for i_id, u_id, __, __, __ in sharers:
if i_id == item_id:
item_sharers.append(u_id)
if len(item_sharers) == 0:
continue
num_sharers = len(item_sharers)
# convert to cents
item_amount = math.floor(price * tax_amt * 100)
debt = item_amount // num_sharers
remainder = item_amount % num_sharers
# get random users to get remainder
selected = random.sample(range(num_sharers), remainder)
for i, sharer in enumerate(item_sharers):
amt_to_pay = debt
if i in selected:
amt_to_pay += 1
if debtors.get(sharer) is None:
debtors[sharer] = amt_to_pay / 100
else:
debtors[sharer] += amt_to_pay / 100
trans.add_debtors(bill_id, bill['owner_id'], debtors)
trans.close_bill(bill_id)
for debtor_id, amt in debtors.items():
auto_confirm = debtor_id == bill['owner_id']
is_deleted = debtor_id != bill['owner_id']
trans.add_payment_by_bill(
const.PAY_TYPE_NORMAL,
bill_id,
bill['owner_id'],
debtor_id,
auto_confirm=auto_confirm,
is_deleted=is_deleted
)
return SendDebtsBillAdmin().execute(bot, update, trans, data=data)
except Exception as e:
logging.exception('split_bill')
class DisplayInspectBillKB(Action):
ACTION_DISPLAY_INSPECT_BILL_KB = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_GET_INSPECT_BILL_KB)
def execute(self, bot, update, trans, subaction_id, data=None):
if subaction_id == self.ACTION_DISPLAY_INSPECT_BILL_KB:
cbq = update.callback_query
bill_id = data.get(const.JSON_BILL_ID)
return cbq.edit_message_reply_markup(
reply_markup=self.get_inspect_bill_keyboard(bill_id)
)
def get_inspect_bill_keyboard(bill_id):
kb = []
by_user_btn = InlineKeyboardButton(
text="Inspect Bill by Person",
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_REFRESH_BILL,
{const.JSON_BILL_ID: bill_id}
)
)
by_item_btn = InlineKeyboardButton(
text="Inspect Bill by Item",
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_REFRESH_BILL,
{const.JSON_BILL_ID: bill_id}
)
)
back_btn = InlineKeyboardButton(
text="Inspect Bill by Item",
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_REFRESH_BILL,
{const.JSON_BILL_ID: bill_id}
)
)
kb = [
[by_user_btn],
[by_item_btn],
[back_btn]
]
return InlineKeyboardMarkup(kb)
class DisplayConfirmPaymentsKB(Action):
ACTION_DISPLAY_PAYMENTS_KB = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_GET_CONFIRM_PAYMENTS_KB)
def execute(self, bot, update, trans, subaction_id, data=None):
if subaction_id == self.ACTION_DISPLAY_PAYMENTS_KB:
cbq = update.callback_query
bill_id = data.get(const.JSON_BILL_ID)
creditor_id = cbq.from_user.id
return cbq.edit_message_reply_markup(
reply_markup=self.get_confirm_payments_keyboard(
bill_id, creditor_id, trans
)
)
@staticmethod
def get_confirm_payments_keyboard(bill_id, creditor_id, trans):
pending = trans.get_pending_payments(bill_id, creditor_id)
kb = []
for payment in pending:
btn = InlineKeyboardButton(
text='✅ {} {}{:.2f}'.format(
utils.format_name(payment[5], payment[3], payment[4]),
const.EMOJI_MONEY_BAG,
payment[1],
),
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_CONFIRM_BILL_PAYMENT,
{const.JSON_BILL_ID: bill_id,
const.JSON_PAYMENT_ID: payment[0]}
)
)
kb.append([btn])
back_btn = InlineKeyboardButton(
text="🔙 Back",
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_REFRESH_BILL,
{const.JSON_BILL_ID: bill_id}
)
)
kb.append([back_btn])
return InlineKeyboardMarkup(kb)
class DisplayForceConfirmPaymentsKB(Action):
ACTION_DISPLAY_PAYMENTS_KB = 0
def __init__(self):
super().__init__(
MODULE_ACTION_TYPE,
ACTION_GET_FORCE_CONFIRM_PAYMENTS_KB
)
def execute(self, bot, update, trans, subaction_id, data=None):
if subaction_id == self.ACTION_DISPLAY_PAYMENTS_KB:
cbq = update.callback_query
bill_id = data.get(const.JSON_BILL_ID)
creditor_id = cbq.from_user.id
return cbq.edit_message_reply_markup(
reply_markup=self.get_force_confirm_payments_keyboard(
bill_id, creditor_id, trans
)
)
@staticmethod
def get_force_confirm_payments_keyboard(bill_id, creditor_id, trans):
unpaid = trans.get_unpaid_payments(bill_id, creditor_id)
kb = []
for payment in unpaid:
btn = InlineKeyboardButton(
text='✅ {} {}{:.2f}'.format(
utils.format_name(payment[5], payment[3], payment[4]),
const.EMOJI_MONEY_BAG,
payment[1],
),
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_FORCE_CONFIRM_PAYMENT,
{const.JSON_BILL_ID: bill_id,
const.JSON_PAYMENT_ID: payment[0]}
)
)
kb.append([btn])
back_btn = InlineKeyboardButton(
text="🔙 Back",
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_REFRESH_BILL,
{const.JSON_BILL_ID: bill_id}
)
)
kb.append([back_btn])
return InlineKeyboardMarkup(kb)
class SendDebtsBill(Action):
ACTION_SEND_DEBTS_BILL = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_SEND_DEBTS_BILL)
def execute(self, bot, update, trans, subaction_id=0, data=None):
if subaction_id == self.ACTION_SEND_DEBTS_BILL:
bill_id = data.get(const.JSON_BILL_ID)
__, owner_id, __, is_closed = trans.get_bill_gen_info(bill_id)
if not is_closed:
return
msg = update.message
if msg.from_user.id == owner_id:
return SendDebtsBillAdmin().execute(
bot, update, trans, subaction_id, data
)
return self.send_debts_bill(bot, bill_id, msg, trans)
def send_debts_bill(self, bot, bill_id, msg, trans):
text, pm, kb = self.get_debts_bill_msg(bill_id, msg.from_user.id, trans)
trans.reset_session(msg.chat_id, msg.from_user.id)
bot.sendMessage(
chat_id=msg.chat_id,
text=text,
parse_mode=pm,
reply_markup=kb
)
@staticmethod
def get_debts_bill_msg(bill_id, user_id, trans):
__, owner_id, __, __ = trans.get_bill_gen_info(bill_id)
if user_id == owner_id:
return SendDebtsBillAdmin.get_debts_bill_msg(bill_id, trans)
debts, unique_users = utils.calculate_remaining_debt(bill_id, trans)
text, pm = utils.format_debts_bill_text(
bill_id, debts, unique_users, trans
)
kb = DisplayPayItemsKB.get_payment_buttons(
bill_id, user_id, trans, debts=debts
)
return text, pm, InlineKeyboardMarkup(kb)
class SendDebtsBillAdmin(Action):
ACTION_SEND_DEBTS_BILL = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_SEND_DEBTS_BILL_ADMIN)
def execute(self, bot, update, trans, subaction_id=0, data=None):
if subaction_id == self.ACTION_SEND_DEBTS_BILL:
bill_id = data.get(const.JSON_BILL_ID)
msg = update.message
self.send_debts_bill(bot, bill_id, msg, trans)
def send_debts_bill(self, bot, bill_id, msg, trans):
text, pm, kb = self.get_debts_bill_msg(bill_id, trans)
trans.reset_session(msg.chat_id, msg.from_user.id)
bot.sendMessage(
chat_id=msg.chat_id,
text=text,
parse_mode=pm,
reply_markup=kb
)
@staticmethod
def get_debts_bill_msg(bill_id, trans):
bill_name, __, __, __ = trans.get_bill_gen_info(bill_id)
share_btn = InlineKeyboardButton(
text="📮 Share Bill",
switch_inline_query=bill_name
)
refresh_btn = InlineKeyboardButton(
text="🔄 Refresh Bill",
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_REFRESH_BILL,
{const.JSON_BILL_ID: bill_id}
)
)
confirm_btn = InlineKeyboardButton(
text="🤑 Confirm Payments",
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_GET_CONFIRM_PAYMENTS_KB,
{const.JSON_BILL_ID: bill_id}
)
)
f_confirm_btn = InlineKeyboardButton(
text="😵 Force Confirm Payments",
callback_data=utils.get_action_callback_data(
MODULE_ACTION_TYPE,
ACTION_GET_FORCE_CONFIRM_PAYMENTS_KB,
{const.JSON_BILL_ID: bill_id}
)
)
kb = InlineKeyboardMarkup(
[[share_btn],
[refresh_btn],
[confirm_btn],
[f_confirm_btn]]
)
text, pm = utils.get_debts_bill_text(bill_id, trans)
return text, pm, kb
class PayDebt(Action):
ACTION_PAY_DEBT = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_PAY_DEBT)
def execute(self, bot, update, trans, subaction_id, data=None):
if subaction_id == self.ACTION_PAY_DEBT:
cbq = update.callback_query
bill_id = data.get(const.JSON_BILL_ID)
creditor_id = data.get(const.JSON_CREDITOR_ID)
self.pay_debt(bot, cbq, bill_id, creditor_id, trans)
RefreshBill().execute(bot, update, trans, 0, data)
def pay_debt(self, bot, cbq, bill_id, creditor_id, trans):
trans.add_payment_by_bill(
const.PAY_TYPE_NORMAL,
bill_id,
creditor_id,
cbq.from_user.id
)
class ConfirmPayment(Action):
ACTION_REQUEST_CONFIRMATION = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_CONFIRM_BILL_PAYMENT)
def execute(self, bot, update, trans, subaction_id=0, data=None):
if subaction_id == self.ACTION_REQUEST_CONFIRMATION:
cbq = update.callback_query
bill_id = data.get(const.JSON_BILL_ID)
payment_id = data.get(const.JSON_PAYMENT_ID)
return self.send_confirmation(bot, cbq, bill_id, payment_id, trans)
def send_confirmation(self, bot, cbq, bill_id, payment_id, trans):
self.set_session(
cbq.message.chat_id,
cbq.from_user,
self.action_type,
self.action_id,
0,
trans,
data={const.JSON_BILL_ID: bill_id,
const.JSON_PAYMENT_ID: payment_id}
)
amt, fname, lname, uname = trans.get_payment(payment_id)
cbq.answer()
bot.sendMessage(
chat_id=cbq.message.chat_id,
text=REQUEST_PAY_CONFIRMATION.format(
utils.escape_html(
utils.format_name(uname, fname, lname)
),
const.EMOJI_MONEY_BAG,
amt
),
parse_mode=ParseMode.HTML
)
def yes(self, bot, update, trans, subaction_id, data=None):
bill_id = data.get(const.JSON_BILL_ID)
payment_id = data.get(const.JSON_PAYMENT_ID)
self.confirm_payment(
bot, bill_id, payment_id, update.message, trans
)
def no(self, bot, update, trans, subaction_id, data=None):
return SendDebtsBill().execute(bot, update, trans, 0, data)
def confirm_payment(self, bot, bill_id, payment_id, msg, trans):
trans.confirm_payment(payment_id)
text, pm = utils.get_debts_bill_text(bill_id, trans)
kb = DisplayConfirmPaymentsKB.get_confirm_payments_keyboard(
bill_id, msg.from_user.id, trans
)
trans.reset_session(msg.chat_id, msg.from_user.id)
bot.sendMessage(
chat_id=msg.chat_id,
text=text,
parse_mode=pm,
reply_markup=kb
)
class ForceConfirmPayment(Action):
ACTION_REQUEST_CONFIRMATION = 0
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_FORCE_CONFIRM_PAYMENT)
def execute(self, bot, update, trans, subaction_id=0, data=None):
if subaction_id == self.ACTION_REQUEST_CONFIRMATION:
cbq = update.callback_query
bill_id = data.get(const.JSON_BILL_ID)
payment_id = data.get(const.JSON_PAYMENT_ID)
return self.send_confirmation(bot, cbq, bill_id, payment_id, trans)
def send_confirmation(self, bot, cbq, bill_id, payment_id, trans):
self.set_session(
cbq.message.chat_id,
cbq.from_user,
self.action_type,
self.action_id,
0,
trans,
data={const.JSON_BILL_ID: bill_id,
const.JSON_PAYMENT_ID: payment_id}
)
amt, fname, lname, uname = trans.get_payment(payment_id)
cbq.answer()
bot.sendMessage(
chat_id=cbq.message.chat_id,
text=REQUEST_FORCE_PAY_CONFIRMATION.format(
utils.escape_html(
utils.format_name(uname, fname, lname)
),
const.EMOJI_MONEY_BAG,
amt
),
parse_mode=ParseMode.HTML
)
def yes(self, bot, update, trans, subaction_id, data=None):
bill_id = data.get(const.JSON_BILL_ID)
payment_id = data.get(const.JSON_PAYMENT_ID)
self.force_confirm_payment(
bot, bill_id, payment_id, update.message, trans
)
def no(self, bot, update, trans, subaction_id, data=None):
return SendDebtsBill().execute(bot, update, trans, 0, data)
def force_confirm_payment(self, bot, bill_id, payment_id, msg, trans):
trans.force_confirm_payment(payment_id)
text, pm = utils.get_debts_bill_text(bill_id, trans)
kb = DisplayForceConfirmPaymentsKB.get_force_confirm_payments_keyboard(
bill_id, msg.from_user.id, trans
)
trans.reset_session(msg.chat_id, msg.from_user.id)
bot.sendMessage(
chat_id=msg.chat_id,
text=text,
parse_mode=pm,
reply_markup=kb
)
class AddSomeone(Action):
ACTION_REQUEST_CONTACT = 0
ACTION_DISPLAY_ITEMS = 1
def __init__(self):
super().__init__(MODULE_ACTION_TYPE, ACTION_ADD_SOMEONE)
def execute(self, bot, update, trans, subaction_id=0, data=None):
has_rights, chat_id, text = evaluate_rights(update, trans, data)
if not has_rights:
if chat_id is not None:
if update.callback_query is not None:
update.callback_query.answer()
return bot.sendMessage(
chat_id=chat_id,
text=text
)
bill_id = data.get(const.JSON_BILL_ID)
if subaction_id == self.ACTION_REQUEST_CONTACT:
cbq = update.callback_query
return self.request_contact(bot, cbq, bill_id, trans)
if subaction_id == self.ACTION_DISPLAY_ITEMS:
return self.send_items_list(bot, update.message, bill_id, trans)
def no(self, bot, update, trans, subaction_id, data=None):
return SendBill().execute(bot, update, trans, subaction_id, data)
def request_contact(self, bot, cbq, bill_id, trans):
self.set_session(
cbq.message.chat_id,
cbq.from_user,
self.action_type,
self.action_id,
self.ACTION_DISPLAY_ITEMS,
trans,
data={const.JSON_BILL_ID: bill_id}
)
cbq.answer()
bot.sendMessage(
chat_id=cbq.message.chat_id,
text=REQUEST_CONTACT,
parse_mode=ParseMode.HTML
)
def send_items_list(self, bot, msg, bill_id, trans):
try:
is_valid = False
user_id = 0
fname = None
lname = None
if Filters.contact.filter(msg):
is_valid = True
contact = msg.contact
if contact is None:
raise Exception(ERROR_INVALID_CONTACT)
user_id = contact.user_id
fname = contact.first_name
lname = contact.last_name
if Filters.text.filter(msg):
is_valid = True
text = msg.text
if (text is None or len(text) < 1 or len(text) > 250):
Exception(ERROR_INVALID_CONTACT)
fname = text
if not is_valid:
raise Exception(ERROR_INVALID_CONTACT)
user_id = trans.add_user(
user_id,
fname,
lname,
None,
is_ignore_id=(user_id == 0)
)
text, pm = utils.get_complete_bill_text(bill_id, trans)
kb = DisplayShareItemsKB.get_appropriate_keyboard(
bill_id, user_id, trans, proxy_uid=msg.from_user.id
)
bot.sendMessage(
chat_id=msg.chat_id,
text=text,
parse_mode=pm,
reply_markup=kb
)
trans.reset_session(msg.chat_id, msg.from_user.id)
except Exception as e:
return bot.sendMessage(
chat_id=msg.chat_id,
text=str(e)
)
def evaluate_rights(update, trans, data):
if data is None:
return True, None, None
bill_id = data.get(const.JSON_BILL_ID)
if bill_id is None:
return True, None, None
__, owner_id, __, is_closed = trans.get_bill_gen_info(bill_id)
chat_id = None
if update.callback_query is not None:
has_rights = update.callback_query.from_user.id == owner_id
chat_id = update.callback_query.message.chat_id
if not has_rights:
update.callback_query.answer()
return has_rights, chat_id, 'Sorry, you do not have permission for this action.'
if chat_id is None and update.message is not None:
has_rights = update.message.from_user.id == owner_id
chat_id = update.message.chat_id
if not has_rights:
return has_rights, chat_id, 'Sorry, you do not have permission for this action.'
if is_closed is not None:
return False, chat_id, 'Sorry, bill is already calculated and closed.'
return True, None, None
| 41,564 | 3,116 | 567 |
df422463a9f784dc2d6e065bfa91ba5deeaf0dbc | 4,031 | py | Python | tests/zoomus/components/webinar/test_register.py | karthikkommindala1995/zoomus | bf994aee5656c88a4f53e78fe8fb80c39fd737db | [
"Apache-2.0"
] | 1 | 2019-11-05T06:02:20.000Z | 2019-11-05T06:02:20.000Z | tests/zoomus/components/webinar/test_register.py | karthikkommindala1995/zoomus | bf994aee5656c88a4f53e78fe8fb80c39fd737db | [
"Apache-2.0"
] | null | null | null | tests/zoomus/components/webinar/test_register.py | karthikkommindala1995/zoomus | bf994aee5656c88a4f53e78fe8fb80c39fd737db | [
"Apache-2.0"
] | null | null | null | from datetime import datetime
import unittest
from mock import patch
from zoomus import components
def suite():
"""Define all the tests of the module."""
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(RegisterV1TestCase))
return suite
if __name__ == '__main__':
unittest.main()
| 31.248062 | 83 | 0.574051 | from datetime import datetime
import unittest
from mock import patch
from zoomus import components
def suite():
"""Define all the tests of the module."""
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(RegisterV1TestCase))
return suite
class RegisterV1TestCase(unittest.TestCase):
def setUp(self):
self.component = components.webinar.WebinarComponent(
base_uri="http://foo.com",
config={
'api_key': 'KEY',
'api_secret': 'SECRET'
}
)
@patch.object(components.base.BaseComponent, 'post_request', return_value=True)
def test_can_register(self, mock_post_request):
self.component.register(
id='ID',
email='foo@bar.com',
first_name="Foo",
last_name="Bar")
mock_post_request.assert_called_with(
"/webinar/register",
params={
'id': 'ID',
'email': 'foo@bar.com',
'first_name': 'Foo',
'last_name': 'Bar'
}
)
def test_requires_id(self):
with self.assertRaisesRegexp(ValueError, "'id' must be set"):
self.component.register()
def test_requires_email(self):
with self.assertRaisesRegexp(ValueError, "'email' must be set"):
self.component.register(id='ID')
def test_requires_first_name(self):
with self.assertRaisesRegexp(ValueError, "'first_name' must be set"):
self.component.register(id='ID', email='foo@bar.com')
def test_requires_last_name(self):
with self.assertRaisesRegexp(ValueError, "'last_name' must be set"):
self.component.register(
id='ID', email='foo@bar.com', first_name='foo')
@patch.object(components.base.BaseComponent, 'post_request', return_value=True)
def test_start_time_gets_transformed(self, mock_post_request):
self.component.register(
id='ID', email='foo@bar.com', first_name='foo', last_name='bar',
start_time=datetime(1969, 1, 1)
)
mock_post_request.assert_called_with(
"/webinar/register",
params={
'id': 'ID',
'email': 'foo@bar.com',
'first_name': 'foo',
'last_name': 'bar',
'start_time': '1969-01-01T00:00:00Z',
}
)
class RegisterV2TestCase(unittest.TestCase):
def setUp(self):
self.component = components.webinar.WebinarComponentV2(
base_uri="http://foo.com",
config={
'api_key': 'KEY',
'api_secret': 'SECRET'
}
)
@patch.object(components.base.BaseComponent, 'post_request', return_value=True)
def test_can_register(self, mock_post_request):
self.component.register(
id='ID',
email='foo@bar.com',
first_name="Foo",
last_name="Bar")
mock_post_request.assert_called_with(
"/webinars/ID/registrants",
params={
'id': 'ID',
'email': 'foo@bar.com',
'first_name': 'Foo',
'last_name': 'Bar'
}
)
def test_requires_id(self):
with self.assertRaisesRegexp(ValueError, "'id' must be set"):
self.component.register()
def test_requires_email(self):
with self.assertRaisesRegexp(ValueError, "'email' must be set"):
self.component.register(id='ID')
def test_requires_first_name(self):
with self.assertRaisesRegexp(ValueError, "'first_name' must be set"):
self.component.register(id='ID', email='foo@bar.com')
def test_requires_last_name(self):
with self.assertRaisesRegexp(ValueError, "'last_name' must be set"):
self.component.register(
id='ID', email='foo@bar.com', first_name='foo')
if __name__ == '__main__':
unittest.main()
| 3,015 | 649 | 46 |
cd5586552672028e366947fc3fccefd5d49c0394 | 10,942 | py | Python | finctrl/__init__.py | aspyatkin/finctrl | df0b787065c7cff612a9bdfd0fed09880b73c571 | [
"MIT"
] | null | null | null | finctrl/__init__.py | aspyatkin/finctrl | df0b787065c7cff612a9bdfd0fed09880b73c571 | [
"MIT"
] | null | null | null | finctrl/__init__.py | aspyatkin/finctrl | df0b787065c7cff612a9bdfd0fed09880b73c571 | [
"MIT"
] | null | null | null | from peewee import (
SqliteDatabase,
Model,
CharField,
ForeignKeyField,
DateField,
DecimalField,
DateTimeField,
IntegerField,
TextField
)
import os
import click
from datetime import date, datetime, timedelta
from decimal import Decimal
from enum import IntEnum
db = SqliteDatabase(os.path.join(os.getcwd(), 'finance.db'))
@click.group()
@cli.command()
@click.argument('name')
@click.argument('code')
@click.argument('sign')
@cli.command()
@cli.command()
@click.argument('name')
@click.argument('currency_code')
@cli.command()
@click.argument('account_id', type=click.INT)
@click.argument('name')
@cli.command()
@cli.command()
@click.argument('account_id', type=click.INT)
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
@click.argument('day', type=click.INT, default=date.today().day)
@click.argument('balance')
@cli.command()
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
@click.argument('day', type=click.INT, default=date.today().day)
@cli.command()
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
@click.argument('day', type=click.INT, default=date.today().day)
@cli.command()
@click.argument('account_id', type=click.INT)
@click.argument('type', type=click.Choice([TransactionType.DEBIT.name, TransactionType.CREDIT.name, TransactionType.TRANSFER_OUT.name, TransactionType.TRANSFER_IN.name]))
@click.argument('amount')
@click.argument('comment')
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
@click.argument('day', type=click.INT, default=date.today().day)
@click.argument('hour', type=click.INT, default=datetime.now().hour)
@click.argument('minute', type=click.INT, default=datetime.now().minute)
@click.argument('second', type=click.INT, default=datetime.now().second)
@cli.command()
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
@click.argument('day', type=click.INT, default=date.today().day)
@cli.command()
@click.argument('transaction_id', click.INT)
@cli.command()
@click.argument('account_id', click.INT)
@cli.command()
@click.argument('entry_id', click.INT)
@cli.command()
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
@cli.command()
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
@click.argument('day', type=click.INT, default=date.today().day)
if __name__ == '__main__':
cli()
| 34.087227 | 200 | 0.677847 | from peewee import (
SqliteDatabase,
Model,
CharField,
ForeignKeyField,
DateField,
DecimalField,
DateTimeField,
IntegerField,
TextField
)
import os
import click
from datetime import date, datetime, timedelta
from decimal import Decimal
from enum import IntEnum
db = SqliteDatabase(os.path.join(os.getcwd(), 'finance.db'))
class Currency(Model):
name = CharField()
code = CharField()
sign = CharField()
class Meta:
database = db
class Account(Model):
name = CharField()
currency = ForeignKeyField(Currency, related_name='accounts')
class Meta:
database = db
class AccountBalance(Model):
account = ForeignKeyField(Account, related_name='balance_entries')
date = DateField()
balance = DecimalField(max_digits=10, decimal_places=2)
class Meta:
database = db
class TransactionType(IntEnum):
DEBIT = 1
CREDIT = 2
TRANSFER_OUT = 3
TRANSFER_IN = 4
class AccountTransaction(Model):
account = ForeignKeyField(Account, related_name='transactions')
timestamp = DateTimeField()
type = IntegerField()
amount = DecimalField(max_digits=10, decimal_places=2)
comment = TextField()
class Meta:
database = db
def init_db():
db.create_tables([Currency, Account, AccountBalance, AccountTransaction], safe=True)
@click.group()
def cli():
pass
@cli.command()
@click.argument('name')
@click.argument('code')
@click.argument('sign')
def create_currency(name, code, sign):
init_db()
currency = Currency.create(
name=name,
code=code,
sign=sign
)
currency.save()
click.echo('Created Currency instance #{0}'.format(currency.id))
@cli.command()
def list_currencies():
init_db()
currencies = Currency.select()
for currency in currencies:
click.echo('#{0} "{1}" {2} {3}'.format(currency.id, currency.name, currency.code, currency.sign))
@cli.command()
@click.argument('name')
@click.argument('currency_code')
def create_account(name, currency_code):
init_db()
currency = Currency.get(Currency.code == currency_code)
account = Account.create(
name=name,
currency=currency
)
account.save()
click.echo('Created Account instance #{0}'.format(account.id))
@cli.command()
@click.argument('account_id', type=click.INT)
@click.argument('name')
def rename_account(account_id, name):
init_db()
account = Account.get(Account.id == account_id)
account.name = name
account.save()
@cli.command()
def list_accounts():
init_db()
accounts = Account.select()
for account in accounts:
click.echo('#{0} "{1}" {2}'.format(account.id, account.name, account.currency.code))
@cli.command()
@click.argument('account_id', type=click.INT)
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
@click.argument('day', type=click.INT, default=date.today().day)
@click.argument('balance')
def create_account_balance_entry(account_id, year, month, day, balance):
init_db()
account = Account.get(Account.id == account_id)
balance_date = date(year, month, day)
balance_decimal = Decimal(balance)
balance_entry = AccountBalance(
account=account,
date=balance_date,
balance=balance_decimal
)
balance_entry.save()
@cli.command()
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
@click.argument('day', type=click.INT, default=date.today().day)
def list_account_balance_entries(year, month, day):
init_db()
balance_date = date(year, month, day)
account_balance_entries = AccountBalance.select().where(AccountBalance.date == balance_date)
for entry in account_balance_entries:
click.echo('#{0} "{1}" {2} {3} {4}'.format(entry.id, entry.account.name, entry.date, entry.balance, entry.account.currency.sign))
@cli.command()
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
@click.argument('day', type=click.INT, default=date.today().day)
def update_account_balance_entries(year, month, day):
init_db()
date_cur = date(year, month, day)
date_day_before = date_cur - timedelta(days=1)
for account in Account.select():
try:
balance_cur = AccountBalance.select().where(AccountBalance.account == account).where(AccountBalance.date == date_cur).get()
click.echo('#{0} "{1}" {2} {3} {4}'.format(balance_cur.id, balance_cur.account.name, balance_cur.date, balance_cur.balance, balance_cur.account.currency.sign))
except AccountBalance.DoesNotExist:
try:
balance_day_before = AccountBalance.select().where(AccountBalance.account == account).where(AccountBalance.date == date_day_before).get()
transactions = AccountTransaction.select().where(
AccountTransaction.account == account
).where(
AccountTransaction.timestamp.year == date_day_before.year
).where(
AccountTransaction.timestamp.month == date_day_before.month
).where(
AccountTransaction.timestamp.day == date_day_before.day
)
balance = balance_day_before.balance
for transaction in transactions:
if transaction.type in (TransactionType.DEBIT, TransactionType.TRANSFER_OUT):
balance = (balance - transaction.amount).quantize(Decimal('.01'))
elif transaction.type in (TransactionType.CREDIT, TransactionType.TRANSFER_IN):
balance = (balance + transaction.amount).quantize(Decimal('.01'))
balance_entry = AccountBalance(
account=account,
date=date_cur,
balance=balance
)
balance_entry.save()
except AccountBalance.DoesNotExist:
print('Not OK')
@cli.command()
@click.argument('account_id', type=click.INT)
@click.argument('type', type=click.Choice([TransactionType.DEBIT.name, TransactionType.CREDIT.name, TransactionType.TRANSFER_OUT.name, TransactionType.TRANSFER_IN.name]))
@click.argument('amount')
@click.argument('comment')
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
@click.argument('day', type=click.INT, default=date.today().day)
@click.argument('hour', type=click.INT, default=datetime.now().hour)
@click.argument('minute', type=click.INT, default=datetime.now().minute)
@click.argument('second', type=click.INT, default=datetime.now().second)
def create_account_transaction(account_id, type, amount, comment, year, month, day, hour, minute, second):
init_db()
account = Account.get(Account.id == account_id)
timestamp = datetime(year, month, day, hour, minute, second)
amount_decimal = Decimal(amount)
object_type = TransactionType[type]
transaction = AccountTransaction(
account=account,
timestamp=timestamp,
type=object_type.value,
amount=amount_decimal,
comment=comment
)
transaction.save()
@cli.command()
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
@click.argument('day', type=click.INT, default=date.today().day)
def list_account_transactions(year, month, day):
init_db()
balance_date = date(year, month, day)
account_transactions = AccountTransaction.select().where(
AccountTransaction.timestamp.year == balance_date.year
).where(
AccountTransaction.timestamp.month == balance_date.month
).where(
AccountTransaction.timestamp.day == balance_date.day
)
for entry in account_transactions:
click.echo('#{0} "{1}" {2} {3} {4} {5} "{6}"'.format(entry.id, entry.account.name, entry.timestamp, TransactionType(entry.type).name, entry.amount, entry.account.currency.sign, entry.comment))
@cli.command()
@click.argument('transaction_id', click.INT)
def remove_account_transaction(transaction_id):
init_db()
transaction = AccountTransaction.get(AccountTransaction.id == transaction_id)
transaction.delete_instance()
@cli.command()
@click.argument('account_id', click.INT)
def remove_account(account_id):
init_db()
account = Account.get(Account.id == account_id)
account.delete_instance()
@cli.command()
@click.argument('entry_id', click.INT)
def remove_account_balance_entry(entry_id):
init_db()
transaction = AccountBalance.get(AccountBalance.id == entry_id)
transaction.delete_instance()
@cli.command()
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
def show_monthly_report(year, month):
init_db()
account_transactions = AccountTransaction.select().where(
AccountTransaction.timestamp.year == year
).where(
AccountTransaction.timestamp.month == month
)
monthly_credit = {}
monthly_debit = {}
for entry in account_transactions:
if entry.type == TransactionType.DEBIT:
if entry.account.currency not in monthly_debit:
monthly_debit[entry.account.currency] = Decimal('0.00')
monthly_debit[entry.account.currency] += entry.amount
elif entry.type == TransactionType.CREDIT:
if entry.account.currency not in monthly_credit:
monthly_credit[entry.account.currency] = Decimal('0.00')
monthly_credit[entry.account.currency] += entry.amount
for currency, amount in monthly_credit.items():
click.echo('CREDIT: {0} {1}'.format(amount, currency.sign))
for currency, amount in monthly_debit.items():
click.echo('DEBIT: {0} {1}'.format(amount, currency.sign))
@cli.command()
@click.argument('year', type=click.INT, default=date.today().year)
@click.argument('month', type=click.INT, default=date.today().month)
@click.argument('day', type=click.INT, default=date.today().day)
def show_balance_report(year, month, day):
init_db()
balance_date = date(year, month, day)
account_balance_entries = AccountBalance.select().where(AccountBalance.date == balance_date)
balance_map = {}
for entry in account_balance_entries:
if entry.account.currency not in balance_map:
balance_map[entry.account.currency] = Decimal('0.00')
balance_map[entry.account.currency] += entry.balance
for currency, balance in balance_map.items():
click.echo('{0} - {1} {2}'.format(currency.code, balance, currency.sign))
if __name__ == '__main__':
cli()
| 6,773 | 778 | 490 |
46ade8070326db43ba877e4374ad5f10191c390f | 2,429 | py | Python | 2021/2021-12-15.py | wac-/advent-of-code | 19022fe5660b30125cd99054565b70edbf3a06a0 | [
"BSD-3-Clause"
] | null | null | null | 2021/2021-12-15.py | wac-/advent-of-code | 19022fe5660b30125cd99054565b70edbf3a06a0 | [
"BSD-3-Clause"
] | null | null | null | 2021/2021-12-15.py | wac-/advent-of-code | 19022fe5660b30125cd99054565b70edbf3a06a0 | [
"BSD-3-Clause"
] | null | null | null | from queue import PriorityQueue
Coordinate = tuple[int, int]
map: list[list[int]] = []
PART_TWO = True
# For each position, we store the lowest cost path to get there.
lowest_cost: list[list[None | tuple[int, list[Coordinate]]]] = []
with open('2021-12-15.txt') as f:
for line in (l.strip() for l in f):
map_values = [int(x) for x in line]
if PART_TWO:
for i in range(1,5):
map_values += [(int(x)+i) for x in line]
map.append(map_values)
lowest_cost.append([None] * len(map_values))
if PART_TWO:
# Expand map 4 times below.
orig_map_len = len(map)
for i in range(1,5):
for y in range(orig_map_len):
map.append([(x+i) for x in map[y]])
lowest_cost.append([None] * len(map[0]))
# Deal with overflows: At most 9+4, so just subtract 9 as needed.
for y in range(len(map)):
for x in range(len(map[y])):
if map[y][x] > 9:
map[y][x] -= 9
# Priority queue always draws the current lowest cost path
work_queue: PriorityQueue[tuple[int,Coordinate, list[Coordinate]]] = PriorityQueue()
work_queue.put_nowait((0,(0,0),[(0,0)]))
NEIGHBORS = ((-1, 0), (1, 0), (0, 1), (0, -1))
max_y, max_x = len(map)-1, len(map[0])-1
while not work_queue.empty():
cost, (x, y), path = work_queue.get_nowait()
if lowest_cost[max_y][max_x] is not None:
if lowest_cost[max_y][max_x][0] < cost:
# Drain task if there is already a cheaper way to reach the end.
work_queue.task_done()
break
if lowest_cost[y][x] is not None and lowest_cost[y][x][0] < cost:
work_queue.task_done()
continue
lowest_cost[y][x] = (cost, path)
for dx, dy in NEIGHBORS:
nx, ny = x+dx, y+dy
# Skip out of bounds
if min(nx, ny) < 0 or ny > max_y or nx > max_x:
continue
new_cost = cost + map[ny][nx]
new_path = path + [(nx, ny)]
# Skip unless we're getting there cheaper.
if lowest_cost[ny][nx] is not None:
if lowest_cost[ny][nx][0] <= new_cost:
continue
# NOT THREAD SAFE: Per cell threading.Lock on lowest_cost cells would fix.
lowest_cost[ny][nx] = (new_cost, new_path)
work_queue.put_nowait((new_cost, (nx, ny), new_path))
work_queue.task_done()
print(lowest_cost[max_y][max_x])
print(lowest_cost[max_y][max_x][0])
| 32.386667 | 84 | 0.592425 | from queue import PriorityQueue
Coordinate = tuple[int, int]
map: list[list[int]] = []
PART_TWO = True
# For each position, we store the lowest cost path to get there.
lowest_cost: list[list[None | tuple[int, list[Coordinate]]]] = []
with open('2021-12-15.txt') as f:
for line in (l.strip() for l in f):
map_values = [int(x) for x in line]
if PART_TWO:
for i in range(1,5):
map_values += [(int(x)+i) for x in line]
map.append(map_values)
lowest_cost.append([None] * len(map_values))
if PART_TWO:
# Expand map 4 times below.
orig_map_len = len(map)
for i in range(1,5):
for y in range(orig_map_len):
map.append([(x+i) for x in map[y]])
lowest_cost.append([None] * len(map[0]))
# Deal with overflows: At most 9+4, so just subtract 9 as needed.
for y in range(len(map)):
for x in range(len(map[y])):
if map[y][x] > 9:
map[y][x] -= 9
# Priority queue always draws the current lowest cost path
work_queue: PriorityQueue[tuple[int,Coordinate, list[Coordinate]]] = PriorityQueue()
work_queue.put_nowait((0,(0,0),[(0,0)]))
NEIGHBORS = ((-1, 0), (1, 0), (0, 1), (0, -1))
max_y, max_x = len(map)-1, len(map[0])-1
while not work_queue.empty():
cost, (x, y), path = work_queue.get_nowait()
if lowest_cost[max_y][max_x] is not None:
if lowest_cost[max_y][max_x][0] < cost:
# Drain task if there is already a cheaper way to reach the end.
work_queue.task_done()
break
if lowest_cost[y][x] is not None and lowest_cost[y][x][0] < cost:
work_queue.task_done()
continue
lowest_cost[y][x] = (cost, path)
for dx, dy in NEIGHBORS:
nx, ny = x+dx, y+dy
# Skip out of bounds
if min(nx, ny) < 0 or ny > max_y or nx > max_x:
continue
new_cost = cost + map[ny][nx]
new_path = path + [(nx, ny)]
# Skip unless we're getting there cheaper.
if lowest_cost[ny][nx] is not None:
if lowest_cost[ny][nx][0] <= new_cost:
continue
# NOT THREAD SAFE: Per cell threading.Lock on lowest_cost cells would fix.
lowest_cost[ny][nx] = (new_cost, new_path)
work_queue.put_nowait((new_cost, (nx, ny), new_path))
work_queue.task_done()
print(lowest_cost[max_y][max_x])
print(lowest_cost[max_y][max_x][0])
| 0 | 0 | 0 |
46b39cd3db3a390bf009f581a466407f58a570f1 | 6,446 | py | Python | clusterlogs/training_corpus.py | micolocco/ClusterLog | d5cdf48288e943d77271f675958a535eb7dfe38d | [
"MIT"
] | 1 | 2021-02-16T17:23:26.000Z | 2021-02-16T17:23:26.000Z | clusterlogs/training_corpus.py | micolocco/ClusterLog | d5cdf48288e943d77271f675958a535eb7dfe38d | [
"MIT"
] | null | null | null | clusterlogs/training_corpus.py | micolocco/ClusterLog | d5cdf48288e943d77271f675958a535eb7dfe38d | [
"MIT"
] | null | null | null | #!/usr/bin/python
import sys
import getopt
from gensim.models import Word2Vec
import pprint
from time import time
from pyonmttok import Tokenizer
#from smart_open import open
import json
from pyspark.sql import DataFrame
from pyspark.sql.types import StructType, StructField, StringType,ArrayType
from pyspark.sql.functions import col,udf,struct,collect_list
from pyspark import SparkContext, StorageLevel
from pyspark.sql import SparkSession
import csv
import os
import re
import logging
import site
from pyspark.sql.functions import col, lit, regexp_replace, trim, lower, concat, count
import numpy as np
import pandas as pd
import nltk
import uuid
def spark_session(appName="log-parser"):
"""
Function to create new spark session
"""
sc = SparkContext(appName="log-parser")
return SparkSession.builder.config(conf=sc._conf).getOrCreate()
@udf(returnType=StringType())
class MyCorpus(object):
"""An interator that yields sentences (lists of str)."""
if __name__ == "__main__":
main(sys.argv[1:]) # get everything after the script name
| 36.625 | 175 | 0.607198 | #!/usr/bin/python
import sys
import getopt
from gensim.models import Word2Vec
import pprint
from time import time
from pyonmttok import Tokenizer
#from smart_open import open
import json
from pyspark.sql import DataFrame
from pyspark.sql.types import StructType, StructField, StringType,ArrayType
from pyspark.sql.functions import col,udf,struct,collect_list
from pyspark import SparkContext, StorageLevel
from pyspark.sql import SparkSession
import csv
import os
import re
import logging
import site
from pyspark.sql.functions import col, lit, regexp_replace, trim, lower, concat, count
import numpy as np
import pandas as pd
import nltk
import uuid
def spark_context(appname='cms', yarn=None, verbose=False, python_files=[]):
# define spark context, it's main object which allow
# to communicate with spark
if python_files:
return SparkContext(appName=appname, pyFiles=python_files)
else:
return SparkContext(appName=appname)
def spark_session(appName="log-parser"):
"""
Function to create new spark session
"""
sc = SparkContext(appName="log-parser")
return SparkSession.builder.config(conf=sc._conf).getOrCreate()
@udf(returnType=StringType())
def clean_message(message):
import re
message = re.sub(r'\S+\.\S+', ' ', message) # any URL
message = re.sub(r'([a-zA-Z_.|:;-]*\d+[a-zA-Z_.|:;-]*)+', ' ', message) # remove all substrings with digits
message = re.sub(r'(\d+)', ' ', message) # remove all other digits
message = re.sub(r'[^\w\s]', ' ', message) # removes all punctuation
message = re.sub(r' +', r' ', message)
message=message.lower()
return message
def tokenize_message(message, tokenizer_type, spacer_annotate, preserve_placeholders,spacer_new):
tokenizer = Tokenizer(tokenizer_type, spacer_annotate=spacer_annotate, preserve_placeholders= preserve_placeholders, spacer_new=spacer_new)
return tokenizer.tokenize(message)[0]
class uniqueMex(object):
def __init__(self,spark,month,days):
self.spark=spark
self.hdir='hdfs:///project/monitoring/archive/fts/raw/complete'
self.month=month
self.days=days
def fts_messages(self,verbose=False):
"""
Parse fts HDFS records
"""
#clean_mex_udf=udf(lambda row: clean_message(x) for x in row, StringType()) #user defined function to clean spark dataframe
clean_mex_udf=udf(lambda x: clean_message(x), StringType())
self.spark.udf.register('clean_mex_udf',clean_mex_udf)
if len(self.days)==0:
hpath=self.hdir+'/'+self.month
else:
hpath = [('%s/%s' % (self.hdir,self.month+iDate)) for iDate in self.days]
# create new spark DataFrame
schema = StructType([StructField('data', StructType([StructField('t__error_message', StringType(), nullable=True)]))])
df=self.spark.read.json(hpath, schema)
df=df.select(col('data.t__error_message').alias('error_message')).where('error_message <> ""')
df.cache()
bf_n=df.count()
print('before cleaning %i messages'% bf_n)
print('...cleaning messages')
#df=df.withColumn('error_message', clean_mex_udf(struct(df['error_message']))).dropDuplicates()
df=df.withColumn('error_message', clean_message(col('error_message'))).dropDuplicates()
af_n=df.count()
print('after cleaning %i different messages'% af_n)
#df.show()
return df,bf_n,af_n
class MyCorpus(object):
"""An interator that yields sentences (lists of str)."""
def __init__(self,inputDf):
self.inputDf=inputDf
self.list_err=self.inputDf.select(collect_list("error_message")).collect()[0][0]
def __iter__(self):
for line in self.list_err:
tokenized=tokenize_message(line, 'space',False,True,False)
yield tokenized
def main(argv):
spark = spark_session()
#inputfile = ''
outputfile = '' #name of the model
outputfile=sys.argv[1]
nDays=int(sys.argv[2]) #number of days to train over
# try:
# opts, args = getopt.getopt(argv, "o:",["ofile="]) #argv=argument list to be parsed
# #options that require an argument are followed by a colon ':'
# #opts, args = getopt.getopt(argv, "hi:o:", ["ifile=", "ofile="])
# except getopt.GetoptError:
# print
# #'training_corpus.py -i <inputfile> -o <outputfile>'
# 'training_corpus.py -o <outputfile>'
# sys.exit(2)
# if opts[0] in ("-o", "--ofile"):
# outputfile = opts[1]
#for opt, arg in opts:
# if opt == '-h':
# print
# 'test.py -i <inputfile> -o <outputfile>'
# sys.exit()
# #elif opt in ("-i", "--ifile"):
# #inputfile = arg
# elif opt in ("-o", "--ofile"):
# outputfile = arg
days_vec=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
# days_vec=['01','02','03','04','05']
days=[days_vec[i] for i in np.arange(0,nDays)]
month='2020/01/'
fts,bf_n,af_n=uniqueMex(spark,month,days).fts_messages() #bf_n and af_n number of messages
tokenized = MyCorpus(fts)
print('...starting training')
try:
start_time = time()
model = Word2Vec(sentences=tokenized,compute_loss=True,size=300,window=7, min_count=1, workers=4, iter=30)
tot_time=time() - start_time
print("--- %f seconds ---" % tot_time)
loss=model.get_latest_training_loss()
print('latest training loss:',loss)
with open('training_parameters.csv', mode='a',newline='') as tFile:
file_writer = csv.writer(tFile)
file_writer.writerow([nDays,bf_n,af_n,loss,tot_time])
model.save(outputfile)
print('Training has finished. Model saved in file. Thanks for coming :)')
except Exception as e:
print('Training model error:', e)
if __name__ == "__main__":
main(sys.argv[1:]) # get everything after the script name
| 3,737 | 1,369 | 182 |
16d7bc475bce333c6dadc5fa01d18582b63193f4 | 2,185 | py | Python | Z - Tool Box/x2john/vmx2john.py | dfirpaul/Active-Directory-Exploitation-Cheat-Sheet-1 | 1dcf54522e9d20711ff1114550dc2893ed3e9ed0 | [
"MIT"
] | 1,290 | 2020-05-28T21:24:43.000Z | 2022-03-31T16:38:43.000Z | Z - Tool Box/x2john/vmx2john.py | asim06/Active-Directory-Exploitation-Cheat-Sheet | 708f57c83aa99d80df22f4e50f21479a709fb359 | [
"MIT"
] | 1 | 2020-07-03T21:14:52.000Z | 2020-07-03T21:14:52.000Z | Z - Tool Box/x2john/vmx2john.py | asim06/Active-Directory-Exploitation-Cheat-Sheet | 708f57c83aa99d80df22f4e50f21479a709fb359 | [
"MIT"
] | 280 | 2020-05-29T17:28:38.000Z | 2022-03-31T13:54:15.000Z | #!/usr/bin/env python
# This software is Copyright (c) 2019 - Dhiru Kholia, Copyright (c) 2018 -
# axcheron, and it is hereby released under the MIT License.
#
# Key parts of this program are borrowed from the pyvmx-cracker project.
#
# See https://github.com/axcheron/pyvmx-cracker for details.
import os
import re
import sys
import base64
import argparse
from binascii import hexlify
PY3 = sys.version_info[0] == 3
if PY3:
from urllib.parse import unquote
else:
from urllib import unquote
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.stderr.write("Usage: %s [.vmx files]\n" % sys.argv[0])
sys.exit(-1)
for i in range(1, len(sys.argv)):
process_file(sys.argv[i])
| 29.133333 | 96 | 0.62746 | #!/usr/bin/env python
# This software is Copyright (c) 2019 - Dhiru Kholia, Copyright (c) 2018 -
# axcheron, and it is hereby released under the MIT License.
#
# Key parts of this program are borrowed from the pyvmx-cracker project.
#
# See https://github.com/axcheron/pyvmx-cracker for details.
import os
import re
import sys
import base64
import argparse
from binascii import hexlify
PY3 = sys.version_info[0] == 3
if PY3:
from urllib.parse import unquote
else:
from urllib import unquote
def process_file(target):
ks_re = '.+phrase/(.*?)/pass2key=(.*?):cipher=(.*?):rounds=(.*?):salt=(.*?),(.*?),(.*?)\)'
name = "Unknown"
keysafe = None
with open(target, "r") as f:
for line in f:
if 'encryption.keySafe' in line:
keysafe = line
if "displayName" in line:
name = line.split(" = ")[1].rstrip().strip('"')
keysafe = unquote(keysafe)
match = re.match(ks_re, keysafe)
if not match:
sys.stderr.write("Unsupported format of the encryption.keySafe line:\n")
return
iden = hexlify(base64.b64decode(match.group(1))).decode()
password_hash = match.group(2)
if password_hash != "PBKDF2-HMAC-SHA-1":
sys.stderr.write("Unsupported password hashing algorithm (%s) found!\n" % password_hash)
return
password_cipher = match.group(3)
if password_cipher != "AES-256":
sys.stderr.write("Unsupported cipher (%s) found!\n" % password_cipher)
return
iterations = int(match.group(4))
salt = hexlify(base64.b64decode(unquote(match.group(5))))
config_hash = match.group(6)
if config_hash != "HMAC-SHA-1":
sys.stderr.write("Unsupported hashing algorithm (%s) found!\n" % config_hash)
return
cipherdata = hexlify(base64.b64decode(match.group(7)))
sys.stdout.write("%s-%s:$vmx$1$0$0$%d$%s$%s\n" % (os.path.basename(target),
name, iterations, salt, cipherdata))
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.stderr.write("Usage: %s [.vmx files]\n" % sys.argv[0])
sys.exit(-1)
for i in range(1, len(sys.argv)):
process_file(sys.argv[i])
| 1,442 | 0 | 23 |
49bd426fc80af0986b4a0f58db16c4dc8d4bd457 | 2,981 | py | Python | pytorch_image_classification/optim/__init__.py | doulemint/pytorch_image_classification | 3553295218b30775272027b8234bb8a2276af30f | [
"MIT"
] | 1 | 2021-08-25T03:07:48.000Z | 2021-08-25T03:07:48.000Z | pytorch_image_classification/optim/__init__.py | doulemint/pytorch_image_classification | 3553295218b30775272027b8234bb8a2276af30f | [
"MIT"
] | null | null | null | pytorch_image_classification/optim/__init__.py | doulemint/pytorch_image_classification | 3553295218b30775272027b8234bb8a2276af30f | [
"MIT"
] | null | null | null | import torch
from .adabound import AdaBound, AdaBoundW
from .lars import LARSOptimizer
| 41.402778 | 77 | 0.497149 | import torch
from .adabound import AdaBound, AdaBoundW
from .lars import LARSOptimizer
def get_param_list(config, model):
if config.train.no_weight_decay_on_bn:
param_list = []
for name, params in model.named_parameters():
if 'conv.weight' in name:
param_list.append({
'params': params,
'weight_decay': config.train.weight_decay,
})
else:
param_list.append({
'params': params,
'weight_decay': 0,
})
else:
param_list = [{
'params': list(model.parameters()),
'weight_decay': config.train.weight_decay,
}]
return param_list
def create_optimizer(config, model,params=None):
if params is None:
params = get_param_list(config, model)
if config.train.optimizer == 'sgd':
optimizer = torch.optim.SGD(params,
lr=config.train.base_lr,
momentum=config.train.momentum,
nesterov=config.train.nesterov)
elif config.train.optimizer == 'adam':
optimizer = torch.optim.Adam(params,
lr=config.train.base_lr,
betas=config.optim.adam.weight_decay)
elif config.train.optimizer == 'admw':
optimizer = torch.optim.AdamW(model.parameters(),
lr=config.train.base_lr,
weight_decay=config.train.weight_decay)
elif config.train.optimizer == 'amsgrad':
optimizer = torch.optim.Adam(params,
lr=config.train.base_lr,
betas=config.optim.adam.betas,
amsgrad=True)
elif config.train.optimizer == 'adabound':
optimizer = AdaBound(params,
lr=config.train.base_lr,
betas=config.optim.adabound.betas,
final_lr=config.optim.adabound.final_lr,
gamma=config.optim.adabound.gamma)
elif config.train.optimizer == 'adaboundw':
optimizer = AdaBoundW(params,
lr=config.train.base_lr,
betas=config.optim.adabound.betas,
final_lr=config.optim.adabound.final_lr,
gamma=config.optim.adabound.gamma)
elif config.train.optimizer == 'lars':
optimizer = LARSOptimizer(params,
lr=config.train.base_lr,
momentum=config.train.momentum,
eps=config.optim.lars.eps,
thresh=config.optim.lars.threshold)
else:
raise ValueError()
return optimizer
| 2,845 | 0 | 46 |
48ffb26072679d98b02cf4a876329ab8e109cd27 | 10,551 | py | Python | examples/ewallet_example.py | jlodonia/xendit-python | d1148f6d736fee17e22a1432d337a94538505595 | [
"MIT"
] | 10 | 2020-10-31T23:34:34.000Z | 2022-03-08T19:08:55.000Z | examples/ewallet_example.py | jlodonia/xendit-python | d1148f6d736fee17e22a1432d337a94538505595 | [
"MIT"
] | 22 | 2020-07-30T14:25:07.000Z | 2022-03-31T03:55:46.000Z | examples/ewallet_example.py | jlodonia/xendit-python | d1148f6d736fee17e22a1432d337a94538505595 | [
"MIT"
] | 11 | 2020-07-28T08:09:40.000Z | 2022-03-18T00:14:02.000Z | from print_running_function import print_running_function
import time
# Hackish method to import from another directory
# Useful while xendit-python isn't released yet to the public
import importlib.machinery
loader = importlib.machinery.SourceFileLoader("xendit", "../xendit/__init__.py")
xendit = loader.load_module("xendit")
| 33.709265 | 88 | 0.598711 | from print_running_function import print_running_function
import time
# Hackish method to import from another directory
# Useful while xendit-python isn't released yet to the public
import importlib.machinery
loader = importlib.machinery.SourceFileLoader("xendit", "../xendit/__init__.py")
xendit = loader.load_module("xendit")
class CreateOVOPayment:
@staticmethod
def run(xendit_instance, external_id, amount, phone, **kwargs):
try:
ewallet = xendit_instance.EWallet.create_ovo_payment(
external_id=external_id, amount=amount, phone=phone,
)
print(ewallet)
except xendit.XenditError as e:
print("Error status code:", e.status_code)
print("Error message:", e)
@staticmethod
def example(xendit_instance):
args = {
"external_id": f"ovo-ewallet-testing-id-{int(time.time())}",
"amount": "80001",
"phone": "08123123123",
}
print_running_function("xendit.EWallet.create_ovo_payment", args)
CreateOVOPayment.run(xendit_instance, **args)
class CreateDANAPayment:
@staticmethod
def run(xendit_instance, external_id, amount, callback_url, redirect_url, **kwargs):
try:
ewallet = xendit_instance.EWallet.create_dana_payment(
external_id=external_id,
amount=amount,
callback_url=callback_url,
redirect_url=redirect_url,
**kwargs,
)
print(ewallet)
except xendit.XenditError as e:
print("Error status code:", e.status_code)
print("Error message:", e)
@staticmethod
def example(xendit_instance):
args = {
"external_id": f"dana-ewallet-test-{int(time.time())}",
"amount": "1001",
"callback_url": "https://my-shop.com/callbacks",
"redirect_url": "https://my-shop.com/home",
}
print_running_function("xendit.EWallet.create_dana_payment", args)
CreateDANAPayment.run(xendit_instance, **args)
class CreateLinkAjaPayment:
@staticmethod
def run(
xendit_instance,
external_id,
phone,
amount,
items,
callback_url,
redirect_url,
**kwargs,
):
try:
ewallet = xendit_instance.EWallet.create_linkaja_payment(
external_id=external_id,
phone=phone,
amount=amount,
items=items,
callback_url=callback_url,
redirect_url=redirect_url,
**kwargs,
)
print(ewallet)
except xendit.XenditError as e:
print("Error status code:", e.status_code)
print("Error message:", e)
@staticmethod
def example(xendit_instance):
items = []
item = xendit.EWallet.helper_create_linkaja_item(
id="123123", name="Phone Case", price=100000, quantity=1
)
items.append(item)
args = {
"external_id": f"linkaja-ewallet-test-{int(time.time())}",
"phone": "089911111111",
"items": {items},
"amount": 300000,
"callback_url": "https://my-shop.com/callbacks",
"redirect_url": "https://xendit.co/",
}
print_running_function("xendit.EWallet.create_linkaja_payment", args)
CreateLinkAjaPayment.run(xendit_instance, **args)
class GetOVOPaymentStatus:
@staticmethod
def run(xendit_instance, external_id, ewallet_type, **kwargs):
try:
ewallet = xendit_instance.EWallet.get_payment_status(
external_id=external_id, ewallet_type=ewallet_type, **kwargs,
)
print(ewallet)
except xendit.XenditError as e:
print("Error status code:", e.status_code)
print("Error message:", e)
@staticmethod
def example(xendit_instance):
args = {
"ewallet_type": xendit.EWalletType.OVO,
"external_id": "ovo-ewallet-testing-id-1234",
}
print_running_function("xendit.EWallet.get_payment_status", args)
GetOVOPaymentStatus.run(xendit_instance, **args)
class GetDANAPaymentStatus:
@staticmethod
def run(xendit_instance, external_id, ewallet_type, **kwargs):
try:
ewallet = xendit_instance.EWallet.get_payment_status(
external_id=external_id, ewallet_type=ewallet_type, **kwargs,
)
print(ewallet)
except xendit.XenditError as e:
print("Error status code:", e.status_code)
print("Error message:", e)
@staticmethod
def example(xendit_instance):
args = {
"ewallet_type": xendit.EWalletType.DANA,
"external_id": "dana-ewallet-test-1234",
}
print_running_function("xendit.EWallet.get_payment_status", args)
GetDANAPaymentStatus.run(xendit_instance, **args)
class GetLinkAjaPaymentStatus:
@staticmethod
def run(xendit_instance, external_id, ewallet_type, **kwargs):
try:
ewallet = xendit_instance.EWallet.get_payment_status(
external_id=external_id, ewallet_type=ewallet_type, **kwargs,
)
print(ewallet)
except xendit.XenditError as e:
print("Error status code:", e.status_code)
print("Error message:", e)
@staticmethod
def example(xendit_instance):
args = {
"ewallet_type": xendit.EWalletType.LINKAJA,
"external_id": "linkaja-ewallet-test-123",
}
print_running_function("xendit.EWallet.get_payment_status", args)
GetLinkAjaPaymentStatus.run(xendit_instance, **args)
class CreateEWalletCharge:
@staticmethod
def run(
xendit_instance,
reference_id,
currency,
amount,
checkout_method,
channel_code=None,
channel_properties=None,
customer_id=None,
basket=None,
metadata=None,
**kwargs,
):
try:
ewallet_charge = xendit_instance.EWallet.create_ewallet_charge(
reference_id=reference_id,
currency=currency,
amount=amount,
checkout_method=checkout_method,
channel_code=channel_code,
channel_properties=channel_properties,
customer_id=customer_id,
basket=basket,
metadata=metadata,
**kwargs,
)
print(ewallet_charge)
except xendit.XenditError as e:
print("Error status code:", e.status_code)
print("Error message:", e)
@staticmethod
def example(xendit_instance):
basket = []
basket_item = xendit.EWallet.helper_create_basket_item(
reference_id="basket-product-ref-id",
name="product_name",
category="mechanics",
currency="IDR",
price=50000,
quantity=5,
type="wht",
sub_category="evr",
metadata={
"meta": "data"
}
)
basket.append(basket_item)
args = {
"reference_id": "test-reference-id",
"currency": "IDR",
"amount": 1688,
"checkout_method": "ONE_TIME_PAYMENT",
"channel_code": "ID_SHOPEEPAY",
"channel_properties": {
"success_redirect_url": "https://yourwebsite.com/order/123",
},
"basket": basket,
"metadata": {
"meta2": "data2",
},
}
print_running_function("xendit.EWallet.create_ewallet_charge", args)
CreateEWalletCharge.run(xendit_instance, **args)
class GetEWalletChargeStatus:
@staticmethod
def run(xendit_instance, charge_id, **kwargs):
try:
ewallet = xendit_instance.EWallet.get_ewallet_charge_status(
charge_id=charge_id, **kwargs,
)
print(ewallet)
except xendit.XenditError as e:
print("Error status code:", e.status_code)
print("Error message:", e)
@staticmethod
def example(xendit_instance):
args = {
"charge_id": "ewc_f3925450-5c54-4777-98c1-fcf22b0d1e1c",
}
print_running_function("xendit.EWallet.get_ewallet_charge_status", args)
GetEWalletChargeStatus.run(xendit_instance, **args)
def ask_ewallet_input():
print("Input the action that you want to use")
print("0. Exit")
print("1. Create OVO Payment")
print("2. Create DANA Payment")
print("3. Create LinkAja Payment")
print("4. Get OVO Payment Status")
print("5. Get DANA Payment Status")
print("6. Get LinkAja Payment Status")
print("7. Create E-Wallet Charge")
print("8. Get E-Wallet Charge Status")
try:
return int(input())
except ValueError:
print("Invalid input. Please type a number")
return ask_ewallet_input()
def ewallet_example(xendit_instance):
ewallet_input = ask_ewallet_input()
while ewallet_input != 0:
if ewallet_input == 1:
print("Running example of Create OVO Payment")
CreateOVOPayment.example(xendit_instance)
elif ewallet_input == 2:
print("Running example of Create DANA Payment")
CreateDANAPayment.example(xendit_instance)
elif ewallet_input == 3:
print("Running example of Create LinkAja Payment")
CreateLinkAjaPayment.example(xendit_instance)
elif ewallet_input == 4:
print("Running example of Get Payment Status of OVO")
GetOVOPaymentStatus.example(xendit_instance)
elif ewallet_input == 5:
print("Running example of Get Payment Status of DANA")
GetDANAPaymentStatus.example(xendit_instance)
elif ewallet_input == 6:
print("Running example of Get Payment Status of LinkAja")
GetLinkAjaPaymentStatus.example(xendit_instance)
elif ewallet_input == 7:
print("Running example of Create E-Wallet Charge")
CreateEWalletCharge.example(xendit_instance)
elif ewallet_input == 8:
print("Running example of Get E-Wallet Charge Status")
GetEWalletChargeStatus.example(xendit_instance)
ewallet_input = ask_ewallet_input()
| 9,224 | 756 | 230 |
912ab713f612a5d3afa5649cae094ada2ef4d441 | 664 | py | Python | Django/e-commerce-website/store/urls.py | piaochung/blog | 3fc518c6b681e070e46dffaf64fae1086423a5e6 | [
"MIT"
] | null | null | null | Django/e-commerce-website/store/urls.py | piaochung/blog | 3fc518c6b681e070e46dffaf64fae1086423a5e6 | [
"MIT"
] | null | null | null | Django/e-commerce-website/store/urls.py | piaochung/blog | 3fc518c6b681e070e46dffaf64fae1086423a5e6 | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
urlpatterns = [
path('category/<int:category_slug>/',
views.store, name='products_by_category'),
path('category/<int:category_slug>/<int:subcategory_slug>/',
views.store, name='products_by_subcategory'),
path('category/<int:category_slug>/<int:subcategory_slug>/<slug:product_slug>/',
views.product_detail, name='product_detail'),
path('search/', views.search, name='search'),
path('submit_review/<int:product_id>/',
views.submit_review, name='submit_review'),
path('brand_detail/<int:brand_id>/',
views.brand_detail, name='brand_detail'),
]
| 33.2 | 84 | 0.680723 | from django.urls import path
from . import views
urlpatterns = [
path('category/<int:category_slug>/',
views.store, name='products_by_category'),
path('category/<int:category_slug>/<int:subcategory_slug>/',
views.store, name='products_by_subcategory'),
path('category/<int:category_slug>/<int:subcategory_slug>/<slug:product_slug>/',
views.product_detail, name='product_detail'),
path('search/', views.search, name='search'),
path('submit_review/<int:product_id>/',
views.submit_review, name='submit_review'),
path('brand_detail/<int:brand_id>/',
views.brand_detail, name='brand_detail'),
]
| 0 | 0 | 0 |
6ad9b47cec7a5fd0dfb6b60edcd5a979be228713 | 1,620 | py | Python | project_automation/commands/typescript.py | Guigui14460/project-automation | 98f9b73be2000b0ecb07b1cca758693c29032947 | [
"Apache-2.0"
] | null | null | null | project_automation/commands/typescript.py | Guigui14460/project-automation | 98f9b73be2000b0ecb07b1cca758693c29032947 | [
"Apache-2.0"
] | 2 | 2021-01-17T16:04:03.000Z | 2021-08-13T13:00:49.000Z | project_automation/commands/typescript.py | Guigui14460/project-automation | 98f9b73be2000b0ecb07b1cca758693c29032947 | [
"Apache-2.0"
] | null | null | null | from typing import NoReturn
from .command_program import CommandProgram
from .utils import WindowsInstallationPackage, MacOSInstallationPackage, GNULinuxDistributionInstallationPackage
class TypescriptCommand(CommandProgram):
"""
Command to verify if ``tsc`` command is recognized by the operating system.
If its not verify, the class install it automatically if you want.
"""
def __init__(self, allow_install: bool, update_package_manager: bool = True) -> NoReturn:
"""
Constructor and initializer.
Parameters
----------
allow_install : bool
True if you want to automatically install the required package, False otherwise
update_package_manager : bool
allows this program to automatically update and upgrade all packages installed in the system (via the package manager used)
"""
windows = WindowsInstallationPackage(
choco_command="choco install typescript",
standard_command="npm install -g typescript",
update_package_manager=update_package_manager
)
macos = MacOSInstallationPackage(
standard_command="npm install -g typescript",
brew_command="brew install typescript",
update_package_manager=update_package_manager
)
linux = GNULinuxDistributionInstallationPackage(
standard_command="npm install -g typescript",
update_package_manager=update_package_manager
)
super().__init__("tsc --version", allow_install,
windows, macos, linux)
| 40.5 | 135 | 0.680864 | from typing import NoReturn
from .command_program import CommandProgram
from .utils import WindowsInstallationPackage, MacOSInstallationPackage, GNULinuxDistributionInstallationPackage
class TypescriptCommand(CommandProgram):
"""
Command to verify if ``tsc`` command is recognized by the operating system.
If its not verify, the class install it automatically if you want.
"""
def __init__(self, allow_install: bool, update_package_manager: bool = True) -> NoReturn:
"""
Constructor and initializer.
Parameters
----------
allow_install : bool
True if you want to automatically install the required package, False otherwise
update_package_manager : bool
allows this program to automatically update and upgrade all packages installed in the system (via the package manager used)
"""
windows = WindowsInstallationPackage(
choco_command="choco install typescript",
standard_command="npm install -g typescript",
update_package_manager=update_package_manager
)
macos = MacOSInstallationPackage(
standard_command="npm install -g typescript",
brew_command="brew install typescript",
update_package_manager=update_package_manager
)
linux = GNULinuxDistributionInstallationPackage(
standard_command="npm install -g typescript",
update_package_manager=update_package_manager
)
super().__init__("tsc --version", allow_install,
windows, macos, linux)
| 0 | 0 | 0 |
d7db3bf9e380d300103425108f9bd77a08df6a4a | 12,741 | py | Python | fastNLP/modules/decoder/CRF.py | YanLiqi/fastNLP | a8c45796158a3b8dbc81e22412bd6e4e4b97539a | [
"Apache-2.0"
] | 4 | 2019-01-19T13:58:10.000Z | 2019-01-19T15:07:48.000Z | fastNLP/modules/decoder/CRF.py | YanLiqi/fastNLP | a8c45796158a3b8dbc81e22412bd6e4e4b97539a | [
"Apache-2.0"
] | null | null | null | fastNLP/modules/decoder/CRF.py | YanLiqi/fastNLP | a8c45796158a3b8dbc81e22412bd6e4e4b97539a | [
"Apache-2.0"
] | null | null | null | import torch
from torch import nn
from fastNLP.modules.utils import initial_parameter
def allowed_transitions(id2label, encoding_type='bio'):
"""
:param dict id2label: key是label的indices,value是str类型的tag或tag-label。value可以是只有tag的, 比如"B", "M"; 也可以是
"B-NN", "M-NN", tag和label之间一定要用"-"隔开。一般可以通过Vocabulary.get_id2word()id2label。
:param encoding_type: str, 支持"bio", "bmes"。
:return: List[Tuple(int, int)]], 内部的Tuple是(from_tag_id, to_tag_id)。 返回的结果考虑了start和end,比如"BIO"中,B、O可以
位于序列的开端,而I不行。所以返回的结果中会包含(start_idx, B_idx), (start_idx, O_idx), 但是不包含(start_idx, I_idx).
start_idx=len(id2label), end_idx=len(id2label)+1。
"""
num_tags = len(id2label)
start_idx = num_tags
end_idx = num_tags + 1
encoding_type = encoding_type.lower()
allowed_trans = []
id_label_lst = list(id2label.items()) + [(start_idx, 'start'), (end_idx, 'end')]
for from_id, from_label in id_label_lst:
if from_label in ['<pad>', '<unk>']:
continue
from_tag, from_label = split_tag_label(from_label)
for to_id, to_label in id_label_lst:
if to_label in ['<pad>', '<unk>']:
continue
to_tag, to_label = split_tag_label(to_label)
if is_transition_allowed(encoding_type, from_tag, from_label, to_tag, to_label):
allowed_trans.append((from_id, to_id))
return allowed_trans
def is_transition_allowed(encoding_type, from_tag, from_label, to_tag, to_label):
"""
:param encoding_type: str, 支持"BIO", "BMES"。
:param from_tag: str, 比如"B", "M"之类的标注tag. 还包括start, end等两种特殊tag
:param from_label: str, 比如"PER", "LOC"等label
:param to_tag: str, 比如"B", "M"之类的标注tag. 还包括start, end等两种特殊tag
:param to_label: str, 比如"PER", "LOC"等label
:return: bool,能否跃迁
"""
if to_tag=='start' or from_tag=='end':
return False
encoding_type = encoding_type.lower()
if encoding_type == 'bio':
"""
第一行是to_tag, 第一列是from_tag. y任意条件下可转,-只有在label相同时可转,n不可转
+-------+---+---+---+-------+-----+
| | B | I | O | start | end |
+-------+---+---+---+-------+-----+
| B | y | - | y | n | y |
+-------+---+---+---+-------+-----+
| I | y | - | y | n | y |
+-------+---+---+---+-------+-----+
| O | y | n | y | n | y |
+-------+---+---+---+-------+-----+
| start | y | n | y | n | n |
+-------+---+---+---+-------+-----+
| end | n | n | n | n | n |
+-------+---+---+---+-------+-----+
"""
if from_tag == 'start':
return to_tag in ('b', 'o')
elif from_tag in ['b', 'i']:
return any([to_tag in ['end', 'b', 'o'], to_tag=='i' and from_label==to_label])
elif from_tag == 'o':
return to_tag in ['end', 'b', 'o']
else:
raise ValueError("Unexpect tag {}. Expect only 'B', 'I', 'O'.".format(from_tag))
elif encoding_type == 'bmes':
"""
第一行是to_tag, 第一列是from_tag,y任意条件下可转,-只有在label相同时可转,n不可转
+-------+---+---+---+---+-------+-----+
| | B | M | E | S | start | end |
+-------+---+---+---+---+-------+-----+
| B | n | - | - | n | n | n |
+-------+---+---+---+---+-------+-----+
| M | n | - | - | n | n | n |
+-------+---+---+---+---+-------+-----+
| E | y | n | n | y | n | y |
+-------+---+---+---+---+-------+-----+
| S | y | n | n | y | n | y |
+-------+---+---+---+---+-------+-----+
| start | y | n | n | y | n | n |
+-------+---+---+---+---+-------+-----+
| end | n | n | n | n | n | n |
+-------+---+---+---+---+-------+-----+
"""
if from_tag == 'start':
return to_tag in ['b', 's']
elif from_tag == 'b':
return to_tag in ['m', 'e'] and from_label==to_label
elif from_tag == 'm':
return to_tag in ['m', 'e'] and from_label==to_label
elif from_tag in ['e', 's']:
return to_tag in ['b', 's', 'end']
else:
raise ValueError("Unexpect tag type {}. Expect only 'B', 'M', 'E', 'S'.".format(from_tag))
else:
raise ValueError("Only support BIO, BMES encoding type, got {}.".format(encoding_type))
class ConditionalRandomField(nn.Module):
"""
:param int num_tags: 标签的数量。
:param bool include_start_end_trans: 是否包含起始tag
:param list allowed_transitions: ``List[Tuple[from_tag_id(int), to_tag_id(int)]]``. 允许的跃迁,可以通过allowed_transitions()得到。
如果为None,则所有跃迁均为合法
:param str initial_method:
"""
def _normalizer_likelihood(self, logits, mask):
"""Computes the (batch_size,) denominator term for the log-likelihood, which is the
sum of the likelihoods across all possible state sequences.
:param logits:FloatTensor, max_len x batch_size x num_tags
:param mask:ByteTensor, max_len x batch_size
:return:FloatTensor, batch_size
"""
seq_len, batch_size, n_tags = logits.size()
alpha = logits[0]
if self.include_start_end_trans:
alpha += self.start_scores.view(1, -1)
for i in range(1, seq_len):
emit_score = logits[i].view(batch_size, 1, n_tags)
trans_score = self.trans_m.view(1, n_tags, n_tags)
tmp = alpha.view(batch_size, n_tags, 1) + emit_score + trans_score
alpha = log_sum_exp(tmp, 1) * mask[i].view(batch_size, 1) + alpha * (1 - mask[i]).view(batch_size, 1)
if self.include_start_end_trans:
alpha += self.end_scores.view(1, -1)
return log_sum_exp(alpha, 1)
def _glod_score(self, logits, tags, mask):
"""
Compute the score for the gold path.
:param logits: FloatTensor, max_len x batch_size x num_tags
:param tags: LongTensor, max_len x batch_size
:param mask: ByteTensor, max_len x batch_size
:return:FloatTensor, batch_size
"""
seq_len, batch_size, _ = logits.size()
batch_idx = torch.arange(batch_size, dtype=torch.long, device=logits.device)
seq_idx = torch.arange(seq_len, dtype=torch.long, device=logits.device)
# trans_socre [L-1, B]
trans_score = self.trans_m[tags[:seq_len-1], tags[1:]] * mask[1:, :]
# emit_score [L, B]
emit_score = logits[seq_idx.view(-1,1), batch_idx.view(1,-1), tags] * mask
# score [L-1, B]
score = trans_score + emit_score[:seq_len-1, :]
score = score.sum(0) + emit_score[-1] * mask[-1]
if self.include_start_end_trans:
st_scores = self.start_scores.view(1, -1).repeat(batch_size, 1)[batch_idx, tags[0]]
last_idx = mask.long().sum(0) - 1
ed_scores = self.end_scores.view(1, -1).repeat(batch_size, 1)[batch_idx, tags[last_idx, batch_idx]]
score += st_scores + ed_scores
# return [B,]
return score
def forward(self, feats, tags, mask):
"""
Calculate the neg log likelihood
:param feats:FloatTensor, batch_size x max_len x num_tags
:param tags:LongTensor, batch_size x max_len
:param mask:ByteTensor batch_size x max_len
:return:FloatTensor, batch_size
"""
feats = feats.transpose(0, 1)
tags = tags.transpose(0, 1).long()
mask = mask.transpose(0, 1).float()
all_path_score = self._normalizer_likelihood(feats, mask)
gold_path_score = self._glod_score(feats, tags, mask)
return all_path_score - gold_path_score
def viterbi_decode(self, data, mask, get_score=False, unpad=False):
"""Given a feats matrix, return best decode path and best score.
:param data:FloatTensor, batch_size x max_len x num_tags
:param mask:ByteTensor batch_size x max_len
:param get_score: bool, whether to output the decode score.
:param unpad: bool, 是否将结果unpad,
如果False, 返回的是batch_size x max_len的tensor,
如果True,返回的是List[List[int]], List[int]为每个sequence的label,已经unpadding了,即每个
List[int]的长度是这个sample的有效长度
:return: 如果get_score为False,返回结果根据unpadding变动
如果get_score为True, 返回 (paths, List[float], )。第一个仍然是解码后的路径(根据unpad变化),第二个List[Float]
为每个seqence的解码分数。
"""
batch_size, seq_len, n_tags = data.size()
data = data.transpose(0, 1).data # L, B, H
mask = mask.transpose(0, 1).data.float() # L, B
# dp
vpath = data.new_zeros((seq_len, batch_size, n_tags), dtype=torch.long)
vscore = data[0]
transitions = self._constrain.data.clone()
transitions[:n_tags, :n_tags] += self.trans_m.data
if self.include_start_end_trans:
transitions[n_tags, :n_tags] += self.start_scores.data
transitions[:n_tags, n_tags+1] += self.end_scores.data
vscore += transitions[n_tags, :n_tags]
trans_score = transitions[:n_tags, :n_tags].view(1, n_tags, n_tags).data
for i in range(1, seq_len):
prev_score = vscore.view(batch_size, n_tags, 1)
cur_score = data[i].view(batch_size, 1, n_tags)
score = prev_score + trans_score + cur_score
best_score, best_dst = score.max(1)
vpath[i] = best_dst
vscore = best_score * mask[i].view(batch_size, 1) + vscore * (1 - mask[i]).view(batch_size, 1)
vscore += transitions[:n_tags, n_tags+1].view(1, -1)
# backtrace
batch_idx = torch.arange(batch_size, dtype=torch.long, device=data.device)
seq_idx = torch.arange(seq_len, dtype=torch.long, device=data.device)
lens = (mask.long().sum(0) - 1)
# idxes [L, B], batched idx from seq_len-1 to 0
idxes = (lens.view(1,-1) - seq_idx.view(-1,1)) % seq_len
ans = data.new_empty((seq_len, batch_size), dtype=torch.long)
ans_score, last_tags = vscore.max(1)
ans[idxes[0], batch_idx] = last_tags
for i in range(seq_len - 1):
last_tags = vpath[idxes[i], batch_idx, last_tags]
ans[idxes[i+1], batch_idx] = last_tags
ans = ans.transpose(0, 1)
if unpad:
paths = []
for idx, seq_len in enumerate(lens):
paths.append(ans[idx, :seq_len+1].tolist())
else:
paths = ans
if get_score:
return paths, ans_score.tolist()
return paths
| 42.188742 | 122 | 0.566204 | import torch
from torch import nn
from fastNLP.modules.utils import initial_parameter
def log_sum_exp(x, dim=-1):
max_value, _ = x.max(dim=dim, keepdim=True)
res = torch.log(torch.sum(torch.exp(x - max_value), dim=dim, keepdim=True)) + max_value
return res.squeeze(dim)
def seq_len_to_byte_mask(seq_lens):
# usually seq_lens: LongTensor, batch_size
# return value: ByteTensor, batch_size x max_len
batch_size = seq_lens.size(0)
max_len = seq_lens.max()
broadcast_arange = torch.arange(max_len).view(1, -1).repeat(batch_size, 1).to(seq_lens.device)
mask = broadcast_arange.float().lt(seq_lens.float().view(-1, 1))
return mask
def allowed_transitions(id2label, encoding_type='bio'):
"""
:param dict id2label: key是label的indices,value是str类型的tag或tag-label。value可以是只有tag的, 比如"B", "M"; 也可以是
"B-NN", "M-NN", tag和label之间一定要用"-"隔开。一般可以通过Vocabulary.get_id2word()id2label。
:param encoding_type: str, 支持"bio", "bmes"。
:return: List[Tuple(int, int)]], 内部的Tuple是(from_tag_id, to_tag_id)。 返回的结果考虑了start和end,比如"BIO"中,B、O可以
位于序列的开端,而I不行。所以返回的结果中会包含(start_idx, B_idx), (start_idx, O_idx), 但是不包含(start_idx, I_idx).
start_idx=len(id2label), end_idx=len(id2label)+1。
"""
num_tags = len(id2label)
start_idx = num_tags
end_idx = num_tags + 1
encoding_type = encoding_type.lower()
allowed_trans = []
id_label_lst = list(id2label.items()) + [(start_idx, 'start'), (end_idx, 'end')]
def split_tag_label(from_label):
from_label = from_label.lower()
if from_label in ['start', 'end']:
from_tag = from_label
from_label = ''
else:
from_tag = from_label[:1]
from_label = from_label[2:]
return from_tag, from_label
for from_id, from_label in id_label_lst:
if from_label in ['<pad>', '<unk>']:
continue
from_tag, from_label = split_tag_label(from_label)
for to_id, to_label in id_label_lst:
if to_label in ['<pad>', '<unk>']:
continue
to_tag, to_label = split_tag_label(to_label)
if is_transition_allowed(encoding_type, from_tag, from_label, to_tag, to_label):
allowed_trans.append((from_id, to_id))
return allowed_trans
def is_transition_allowed(encoding_type, from_tag, from_label, to_tag, to_label):
"""
:param encoding_type: str, 支持"BIO", "BMES"。
:param from_tag: str, 比如"B", "M"之类的标注tag. 还包括start, end等两种特殊tag
:param from_label: str, 比如"PER", "LOC"等label
:param to_tag: str, 比如"B", "M"之类的标注tag. 还包括start, end等两种特殊tag
:param to_label: str, 比如"PER", "LOC"等label
:return: bool,能否跃迁
"""
if to_tag=='start' or from_tag=='end':
return False
encoding_type = encoding_type.lower()
if encoding_type == 'bio':
"""
第一行是to_tag, 第一列是from_tag. y任意条件下可转,-只有在label相同时可转,n不可转
+-------+---+---+---+-------+-----+
| | B | I | O | start | end |
+-------+---+---+---+-------+-----+
| B | y | - | y | n | y |
+-------+---+---+---+-------+-----+
| I | y | - | y | n | y |
+-------+---+---+---+-------+-----+
| O | y | n | y | n | y |
+-------+---+---+---+-------+-----+
| start | y | n | y | n | n |
+-------+---+---+---+-------+-----+
| end | n | n | n | n | n |
+-------+---+---+---+-------+-----+
"""
if from_tag == 'start':
return to_tag in ('b', 'o')
elif from_tag in ['b', 'i']:
return any([to_tag in ['end', 'b', 'o'], to_tag=='i' and from_label==to_label])
elif from_tag == 'o':
return to_tag in ['end', 'b', 'o']
else:
raise ValueError("Unexpect tag {}. Expect only 'B', 'I', 'O'.".format(from_tag))
elif encoding_type == 'bmes':
"""
第一行是to_tag, 第一列是from_tag,y任意条件下可转,-只有在label相同时可转,n不可转
+-------+---+---+---+---+-------+-----+
| | B | M | E | S | start | end |
+-------+---+---+---+---+-------+-----+
| B | n | - | - | n | n | n |
+-------+---+---+---+---+-------+-----+
| M | n | - | - | n | n | n |
+-------+---+---+---+---+-------+-----+
| E | y | n | n | y | n | y |
+-------+---+---+---+---+-------+-----+
| S | y | n | n | y | n | y |
+-------+---+---+---+---+-------+-----+
| start | y | n | n | y | n | n |
+-------+---+---+---+---+-------+-----+
| end | n | n | n | n | n | n |
+-------+---+---+---+---+-------+-----+
"""
if from_tag == 'start':
return to_tag in ['b', 's']
elif from_tag == 'b':
return to_tag in ['m', 'e'] and from_label==to_label
elif from_tag == 'm':
return to_tag in ['m', 'e'] and from_label==to_label
elif from_tag in ['e', 's']:
return to_tag in ['b', 's', 'end']
else:
raise ValueError("Unexpect tag type {}. Expect only 'B', 'M', 'E', 'S'.".format(from_tag))
else:
raise ValueError("Only support BIO, BMES encoding type, got {}.".format(encoding_type))
class ConditionalRandomField(nn.Module):
"""
:param int num_tags: 标签的数量。
:param bool include_start_end_trans: 是否包含起始tag
:param list allowed_transitions: ``List[Tuple[from_tag_id(int), to_tag_id(int)]]``. 允许的跃迁,可以通过allowed_transitions()得到。
如果为None,则所有跃迁均为合法
:param str initial_method:
"""
def __init__(self, num_tags, include_start_end_trans=False, allowed_transitions=None, initial_method=None):
super(ConditionalRandomField, self).__init__()
self.include_start_end_trans = include_start_end_trans
self.num_tags = num_tags
# the meaning of entry in this matrix is (from_tag_id, to_tag_id) score
self.trans_m = nn.Parameter(torch.randn(num_tags, num_tags))
if self.include_start_end_trans:
self.start_scores = nn.Parameter(torch.randn(num_tags))
self.end_scores = nn.Parameter(torch.randn(num_tags))
if allowed_transitions is None:
constrain = torch.zeros(num_tags + 2, num_tags + 2)
else:
constrain = torch.ones(num_tags + 2, num_tags + 2) * -1000
for from_tag_id, to_tag_id in allowed_transitions:
constrain[from_tag_id, to_tag_id] = 0
self._constrain = nn.Parameter(constrain, requires_grad=False)
# self.reset_parameter()
initial_parameter(self, initial_method)
def reset_parameter(self):
nn.init.xavier_normal_(self.trans_m)
if self.include_start_end_trans:
nn.init.normal_(self.start_scores)
nn.init.normal_(self.end_scores)
def _normalizer_likelihood(self, logits, mask):
"""Computes the (batch_size,) denominator term for the log-likelihood, which is the
sum of the likelihoods across all possible state sequences.
:param logits:FloatTensor, max_len x batch_size x num_tags
:param mask:ByteTensor, max_len x batch_size
:return:FloatTensor, batch_size
"""
seq_len, batch_size, n_tags = logits.size()
alpha = logits[0]
if self.include_start_end_trans:
alpha += self.start_scores.view(1, -1)
for i in range(1, seq_len):
emit_score = logits[i].view(batch_size, 1, n_tags)
trans_score = self.trans_m.view(1, n_tags, n_tags)
tmp = alpha.view(batch_size, n_tags, 1) + emit_score + trans_score
alpha = log_sum_exp(tmp, 1) * mask[i].view(batch_size, 1) + alpha * (1 - mask[i]).view(batch_size, 1)
if self.include_start_end_trans:
alpha += self.end_scores.view(1, -1)
return log_sum_exp(alpha, 1)
def _glod_score(self, logits, tags, mask):
"""
Compute the score for the gold path.
:param logits: FloatTensor, max_len x batch_size x num_tags
:param tags: LongTensor, max_len x batch_size
:param mask: ByteTensor, max_len x batch_size
:return:FloatTensor, batch_size
"""
seq_len, batch_size, _ = logits.size()
batch_idx = torch.arange(batch_size, dtype=torch.long, device=logits.device)
seq_idx = torch.arange(seq_len, dtype=torch.long, device=logits.device)
# trans_socre [L-1, B]
trans_score = self.trans_m[tags[:seq_len-1], tags[1:]] * mask[1:, :]
# emit_score [L, B]
emit_score = logits[seq_idx.view(-1,1), batch_idx.view(1,-1), tags] * mask
# score [L-1, B]
score = trans_score + emit_score[:seq_len-1, :]
score = score.sum(0) + emit_score[-1] * mask[-1]
if self.include_start_end_trans:
st_scores = self.start_scores.view(1, -1).repeat(batch_size, 1)[batch_idx, tags[0]]
last_idx = mask.long().sum(0) - 1
ed_scores = self.end_scores.view(1, -1).repeat(batch_size, 1)[batch_idx, tags[last_idx, batch_idx]]
score += st_scores + ed_scores
# return [B,]
return score
def forward(self, feats, tags, mask):
"""
Calculate the neg log likelihood
:param feats:FloatTensor, batch_size x max_len x num_tags
:param tags:LongTensor, batch_size x max_len
:param mask:ByteTensor batch_size x max_len
:return:FloatTensor, batch_size
"""
feats = feats.transpose(0, 1)
tags = tags.transpose(0, 1).long()
mask = mask.transpose(0, 1).float()
all_path_score = self._normalizer_likelihood(feats, mask)
gold_path_score = self._glod_score(feats, tags, mask)
return all_path_score - gold_path_score
def viterbi_decode(self, data, mask, get_score=False, unpad=False):
"""Given a feats matrix, return best decode path and best score.
:param data:FloatTensor, batch_size x max_len x num_tags
:param mask:ByteTensor batch_size x max_len
:param get_score: bool, whether to output the decode score.
:param unpad: bool, 是否将结果unpad,
如果False, 返回的是batch_size x max_len的tensor,
如果True,返回的是List[List[int]], List[int]为每个sequence的label,已经unpadding了,即每个
List[int]的长度是这个sample的有效长度
:return: 如果get_score为False,返回结果根据unpadding变动
如果get_score为True, 返回 (paths, List[float], )。第一个仍然是解码后的路径(根据unpad变化),第二个List[Float]
为每个seqence的解码分数。
"""
batch_size, seq_len, n_tags = data.size()
data = data.transpose(0, 1).data # L, B, H
mask = mask.transpose(0, 1).data.float() # L, B
# dp
vpath = data.new_zeros((seq_len, batch_size, n_tags), dtype=torch.long)
vscore = data[0]
transitions = self._constrain.data.clone()
transitions[:n_tags, :n_tags] += self.trans_m.data
if self.include_start_end_trans:
transitions[n_tags, :n_tags] += self.start_scores.data
transitions[:n_tags, n_tags+1] += self.end_scores.data
vscore += transitions[n_tags, :n_tags]
trans_score = transitions[:n_tags, :n_tags].view(1, n_tags, n_tags).data
for i in range(1, seq_len):
prev_score = vscore.view(batch_size, n_tags, 1)
cur_score = data[i].view(batch_size, 1, n_tags)
score = prev_score + trans_score + cur_score
best_score, best_dst = score.max(1)
vpath[i] = best_dst
vscore = best_score * mask[i].view(batch_size, 1) + vscore * (1 - mask[i]).view(batch_size, 1)
vscore += transitions[:n_tags, n_tags+1].view(1, -1)
# backtrace
batch_idx = torch.arange(batch_size, dtype=torch.long, device=data.device)
seq_idx = torch.arange(seq_len, dtype=torch.long, device=data.device)
lens = (mask.long().sum(0) - 1)
# idxes [L, B], batched idx from seq_len-1 to 0
idxes = (lens.view(1,-1) - seq_idx.view(-1,1)) % seq_len
ans = data.new_empty((seq_len, batch_size), dtype=torch.long)
ans_score, last_tags = vscore.max(1)
ans[idxes[0], batch_idx] = last_tags
for i in range(seq_len - 1):
last_tags = vpath[idxes[i], batch_idx, last_tags]
ans[idxes[i+1], batch_idx] = last_tags
ans = ans.transpose(0, 1)
if unpad:
paths = []
for idx, seq_len in enumerate(lens):
paths.append(ans[idx, :seq_len+1].tolist())
else:
paths = ans
if get_score:
return paths, ans_score.tolist()
return paths
| 2,025 | 0 | 125 |
c62ff6985348eba85720ddafd2a35829d859a862 | 7,472 | py | Python | tests/benchmark.py | imartinezl/cpab | d18692528f5ae2028e2deaf38def1562e595660d | [
"MIT"
] | null | null | null | tests/benchmark.py | imartinezl/cpab | d18692528f5ae2028e2deaf38def1562e595660d | [
"MIT"
] | null | null | null | tests/benchmark.py | imartinezl/cpab | d18692528f5ae2028e2deaf38def1562e595660d | [
"MIT"
] | null | null | null | # %%
import sys
sys.path.insert(0, "..")
import time
import timeit
import numpy as np
import torch
import matplotlib.pyplot as plt
import cpab
import torch.autograd.profiler as profiler
import torch.utils.benchmark as benchmark
# %% SETUP
tess_size = 50
backend = "pytorch" # ["pytorch", "numpy"]
device = "gpu" # ["cpu", "gpu"]
zero_boundary = True
use_slow = False
outsize = 100
batch_size = 20
method = "closed_form"
T = cpab.Cpab(tess_size, backend, device, zero_boundary)
T.params.use_slow = use_slow
grid = T.uniform_meshgrid(outsize)
theta = T.sample_transformation(batch_size)
theta = T.identity(batch_size, epsilon=1.0)
# T.params.nSteps1 = 5
# T.params.nSteps2 = 5
grid_t = T.transform_grid(grid, theta, method)
# plt.plot(grid_t.cpu().T)
print(1)
# %% PYTORCH BENCHMARK
t0 = benchmark.Timer(
stmt="""
theta_grad = torch.autograd.Variable(theta, requires_grad=True)
grid_t = T.transform_grid(grid, theta_grad, method)
loss = torch.norm(grid_t)
loss.backward()
""",
globals={"T": T, "grid": grid, "theta": theta, "method": method}
)
# t0.timeit(1)
t0.blocked_autorange(min_run_time=0.5)
# %% CPROFILE
import cProfile
cProfile.run(
"""
theta_grad = torch.autograd.Variable(theta, requires_grad=True)
for i in range(1000):
grid_t = T.transform_grid(grid, theta_grad, method)
# loss = torch.norm(grid_t)
# loss.backward()
""",
sort="cumtime",
)
# %% YEP + PPROF
import yep
# torch.set_num_threads(1)
theta_grad = torch.autograd.Variable(theta, requires_grad=True)
yep.start("profile.prof")
for i in range(100):
grid_t = T.transform_grid(grid, theta_grad, method)
# loss = torch.norm(grid_t)
# loss.backward()
yep.stop()
# %% TIMEIT
repetitions = 1000
n = 10
timing = timeit.Timer(
lambda: T.transform_grid(grid, theta),
# setup="gc.enable()"
).repeat(repetitions, n)
print("Time: ", np.mean(timing) / n, "+-", np.std(timing) / np.sqrt(n))
# %% PYTORCH PROFILER
with profiler.profile(with_stack=True, profile_memory=True) as prof:
T.transform_grid(grid, theta, method)
print(prof.key_averages().table(sort_by="self_cpu_time_total", row_limit=50))
# prof.export_chrome_trace("trace.json")
# %% snakeviz
# %prun -D program.prof T.transform_grid(grid, theta)
# %%
from itertools import product
results = []
num_threads_arr = [1] # [1, 2, 4]
backend_arr = ["pytorch"] # ["pytorch", "numpy"]
device_arr = ["cpu", "gpu"] # ["cpu", "gpu"]
method_arr = ["closed_form"] # ["closed_form", "numeric"]
use_slow_arr = [False] # [True, False]
zero_boundary_arr = [True] # [True, False]
tess_size_arr = [50]
outsize_arr = [1000]
batch_size_arr = [200]
for (
backend,
device,
method,
use_slow,
zero_boundary,
tess_size,
outsize,
batch_size,
) in product(
backend_arr,
device_arr,
method_arr,
use_slow_arr,
zero_boundary_arr,
tess_size_arr,
outsize_arr,
batch_size_arr,
):
# SETUP
T = cpab.Cpab(tess_size, backend, device, zero_boundary)
T.params.use_slow = use_slow
grid = T.uniform_meshgrid(outsize)
theta = T.identity(batch_size, epsilon=1)
label = "CPAB: backend, device, method, use_slow, zero_boundary, tess_size, outsize, batch_size"
# sub_label = f"[{backend}, {device}, {method}, {'slow' if use_slow else 'fast'}, {'zero_boundary' if zero_boundary else 'no_zero_boundary'}, {tess_size}, {outsize}, {batch_size}]"
sub_label = f"[{backend}, {device}, {method}, {use_slow}, {zero_boundary}, {tess_size}, {outsize}, {batch_size}]"
print(sub_label)
for num_threads in num_threads_arr:
repetitions = 1
# FORWARD
t0 = benchmark.Timer(
stmt=
"""
grid_t = T.transform_grid(grid, theta, method)
""",
globals={"T": T, "grid": grid, "theta": theta, "method": method},
num_threads=num_threads,
label=label,
sub_label=sub_label,
description="Forward",
)
# results.append(t0.timeit(repetitions))
results.append(t0.blocked_autorange(min_run_time=0.5))
# results.append(t0.adaptive_autorange())
# BACKWARD
t1 = benchmark.Timer(
stmt=
"""
theta_grad = torch.autograd.Variable(theta, requires_grad=True)
grid_t = T.transform_grid(grid, theta_grad, method)
loss = torch.norm(grid_t)
loss.backward()
""",
globals={"T": T, "grid": grid, "theta": theta, "method": method},
num_threads=num_threads,
label=label,
sub_label=sub_label,
description="Backward",
)
# results.append(t1.timeit(repetitions))
results.append(t1.blocked_autorange(min_run_time=0.5))
# results.append(t1.adaptive_autorange())
# %%
compare = benchmark.Compare(results)
compare.trim_significant_figures()
compare.colorize()
compare.print()
# %% RESULTS TO LATEX
import pandas as pd
df = [
pd.DataFrame({
'experiment': t.as_row_name.replace('[', '').replace(']', ''),
'description': t.task_spec.description,
'threads': t.task_spec.num_threads,
'time': t.raw_times,
'time_mean': np.mean(t.raw_times),
'time_std': np.std(t.raw_times),
})
for t in results
]
df = pd.concat(df, ignore_index=True)
header = ['Backend', 'Device', 'Method', 'Speed', 'Boundary', 'Tess Size', 'Grid Size', 'Batch Size']
parameters = pd.DataFrame(df["experiment"].str.split(',', expand=True).values, columns=header)
a = pd.concat([parameters, df], axis=1).drop(columns=['experiment'])
a.to_latex(index=False, escape=False)
# %% RESULTS TO PLOT
import seaborn as sns
import pandas as pd
df = [
pd.DataFrame({
'experiment': t.as_row_name,
'description': t.task_spec.description,
'threads': t.task_spec.num_threads,
'time': t.raw_times})
for t in results
]
df = pd.concat(df, ignore_index=True)
df['experiment_id'] = df.groupby('experiment', sort=False).ngroup().apply(str)
n = pd.unique(df.experiment_id)
exps = pd.unique(df.experiment)
caption = '\n'.join([k + ": " + exps[int(k)] for k in n])
header = ['Backend', 'Device', 'Method', 'Speed', 'Boundary', 'Tess Size', 'Grid Size', 'Batch Size']
cell_text = [e.replace('[','').replace(']','').split(', ') for e in exps]
vlen = np.vectorize(len)
w = np.max(vlen(cell_text + [header]), axis=0)
# %%
import matplotlib
with sns.axes_style("whitegrid"):
g = sns.catplot(
x="time", y="experiment_id",
hue="threads", col="description",
data=df, kind="box", ci=None, sharex=True,
fliersize=2, linewidth=1, width=0.75)
sns.despine(top=False, right=False, left=False, bottom=False)
plt.xticks(np.logspace(-10,-1, num=10))
# plt.figtext(0, -0.1, caption, wrap=True,
# verticalalignment='top', horizontalalignment='left', fontsize=10)
table = plt.table(
cellText=cell_text,
rowLabels=n,
colLabels=header,
colWidths = w,
cellLoc='center',
loc='bottom',
# fontsize=50
bbox=[-1.0,-0.5, 1.2, 0.35]
)
table.auto_set_font_size(False)
table.set_fontsize(8)
# table.auto_set_column_width(n)
# table.scale(1, 1)
for ax in g.axes[0]:
ax.set_xscale('log')
ax.grid(axis="x", which="minor", ls="--", c='gray', alpha=0.2)
plt.savefig('example.png')
# %%
| 26.590747 | 184 | 0.632227 | # %%
import sys
sys.path.insert(0, "..")
import time
import timeit
import numpy as np
import torch
import matplotlib.pyplot as plt
import cpab
import torch.autograd.profiler as profiler
import torch.utils.benchmark as benchmark
# %% SETUP
tess_size = 50
backend = "pytorch" # ["pytorch", "numpy"]
device = "gpu" # ["cpu", "gpu"]
zero_boundary = True
use_slow = False
outsize = 100
batch_size = 20
method = "closed_form"
T = cpab.Cpab(tess_size, backend, device, zero_boundary)
T.params.use_slow = use_slow
grid = T.uniform_meshgrid(outsize)
theta = T.sample_transformation(batch_size)
theta = T.identity(batch_size, epsilon=1.0)
# T.params.nSteps1 = 5
# T.params.nSteps2 = 5
grid_t = T.transform_grid(grid, theta, method)
# plt.plot(grid_t.cpu().T)
print(1)
# %% PYTORCH BENCHMARK
t0 = benchmark.Timer(
stmt="""
theta_grad = torch.autograd.Variable(theta, requires_grad=True)
grid_t = T.transform_grid(grid, theta_grad, method)
loss = torch.norm(grid_t)
loss.backward()
""",
globals={"T": T, "grid": grid, "theta": theta, "method": method}
)
# t0.timeit(1)
t0.blocked_autorange(min_run_time=0.5)
# %% CPROFILE
import cProfile
cProfile.run(
"""
theta_grad = torch.autograd.Variable(theta, requires_grad=True)
for i in range(1000):
grid_t = T.transform_grid(grid, theta_grad, method)
# loss = torch.norm(grid_t)
# loss.backward()
""",
sort="cumtime",
)
# %% YEP + PPROF
import yep
# torch.set_num_threads(1)
theta_grad = torch.autograd.Variable(theta, requires_grad=True)
yep.start("profile.prof")
for i in range(100):
grid_t = T.transform_grid(grid, theta_grad, method)
# loss = torch.norm(grid_t)
# loss.backward()
yep.stop()
# %% TIMEIT
repetitions = 1000
n = 10
timing = timeit.Timer(
lambda: T.transform_grid(grid, theta),
# setup="gc.enable()"
).repeat(repetitions, n)
print("Time: ", np.mean(timing) / n, "+-", np.std(timing) / np.sqrt(n))
# %% PYTORCH PROFILER
with profiler.profile(with_stack=True, profile_memory=True) as prof:
T.transform_grid(grid, theta, method)
print(prof.key_averages().table(sort_by="self_cpu_time_total", row_limit=50))
# prof.export_chrome_trace("trace.json")
# %% snakeviz
# %prun -D program.prof T.transform_grid(grid, theta)
# %%
from itertools import product
results = []
num_threads_arr = [1] # [1, 2, 4]
backend_arr = ["pytorch"] # ["pytorch", "numpy"]
device_arr = ["cpu", "gpu"] # ["cpu", "gpu"]
method_arr = ["closed_form"] # ["closed_form", "numeric"]
use_slow_arr = [False] # [True, False]
zero_boundary_arr = [True] # [True, False]
tess_size_arr = [50]
outsize_arr = [1000]
batch_size_arr = [200]
for (
backend,
device,
method,
use_slow,
zero_boundary,
tess_size,
outsize,
batch_size,
) in product(
backend_arr,
device_arr,
method_arr,
use_slow_arr,
zero_boundary_arr,
tess_size_arr,
outsize_arr,
batch_size_arr,
):
# SETUP
T = cpab.Cpab(tess_size, backend, device, zero_boundary)
T.params.use_slow = use_slow
grid = T.uniform_meshgrid(outsize)
theta = T.identity(batch_size, epsilon=1)
label = "CPAB: backend, device, method, use_slow, zero_boundary, tess_size, outsize, batch_size"
# sub_label = f"[{backend}, {device}, {method}, {'slow' if use_slow else 'fast'}, {'zero_boundary' if zero_boundary else 'no_zero_boundary'}, {tess_size}, {outsize}, {batch_size}]"
sub_label = f"[{backend}, {device}, {method}, {use_slow}, {zero_boundary}, {tess_size}, {outsize}, {batch_size}]"
print(sub_label)
for num_threads in num_threads_arr:
repetitions = 1
# FORWARD
t0 = benchmark.Timer(
stmt=
"""
grid_t = T.transform_grid(grid, theta, method)
""",
globals={"T": T, "grid": grid, "theta": theta, "method": method},
num_threads=num_threads,
label=label,
sub_label=sub_label,
description="Forward",
)
# results.append(t0.timeit(repetitions))
results.append(t0.blocked_autorange(min_run_time=0.5))
# results.append(t0.adaptive_autorange())
# BACKWARD
t1 = benchmark.Timer(
stmt=
"""
theta_grad = torch.autograd.Variable(theta, requires_grad=True)
grid_t = T.transform_grid(grid, theta_grad, method)
loss = torch.norm(grid_t)
loss.backward()
""",
globals={"T": T, "grid": grid, "theta": theta, "method": method},
num_threads=num_threads,
label=label,
sub_label=sub_label,
description="Backward",
)
# results.append(t1.timeit(repetitions))
results.append(t1.blocked_autorange(min_run_time=0.5))
# results.append(t1.adaptive_autorange())
# %%
compare = benchmark.Compare(results)
compare.trim_significant_figures()
compare.colorize()
compare.print()
# %% RESULTS TO LATEX
import pandas as pd
df = [
pd.DataFrame({
'experiment': t.as_row_name.replace('[', '').replace(']', ''),
'description': t.task_spec.description,
'threads': t.task_spec.num_threads,
'time': t.raw_times,
'time_mean': np.mean(t.raw_times),
'time_std': np.std(t.raw_times),
})
for t in results
]
df = pd.concat(df, ignore_index=True)
header = ['Backend', 'Device', 'Method', 'Speed', 'Boundary', 'Tess Size', 'Grid Size', 'Batch Size']
parameters = pd.DataFrame(df["experiment"].str.split(',', expand=True).values, columns=header)
a = pd.concat([parameters, df], axis=1).drop(columns=['experiment'])
a.to_latex(index=False, escape=False)
# %% RESULTS TO PLOT
import seaborn as sns
import pandas as pd
df = [
pd.DataFrame({
'experiment': t.as_row_name,
'description': t.task_spec.description,
'threads': t.task_spec.num_threads,
'time': t.raw_times})
for t in results
]
df = pd.concat(df, ignore_index=True)
df['experiment_id'] = df.groupby('experiment', sort=False).ngroup().apply(str)
n = pd.unique(df.experiment_id)
exps = pd.unique(df.experiment)
caption = '\n'.join([k + ": " + exps[int(k)] for k in n])
header = ['Backend', 'Device', 'Method', 'Speed', 'Boundary', 'Tess Size', 'Grid Size', 'Batch Size']
cell_text = [e.replace('[','').replace(']','').split(', ') for e in exps]
vlen = np.vectorize(len)
w = np.max(vlen(cell_text + [header]), axis=0)
# %%
import matplotlib
with sns.axes_style("whitegrid"):
g = sns.catplot(
x="time", y="experiment_id",
hue="threads", col="description",
data=df, kind="box", ci=None, sharex=True,
fliersize=2, linewidth=1, width=0.75)
sns.despine(top=False, right=False, left=False, bottom=False)
plt.xticks(np.logspace(-10,-1, num=10))
# plt.figtext(0, -0.1, caption, wrap=True,
# verticalalignment='top', horizontalalignment='left', fontsize=10)
table = plt.table(
cellText=cell_text,
rowLabels=n,
colLabels=header,
colWidths = w,
cellLoc='center',
loc='bottom',
# fontsize=50
bbox=[-1.0,-0.5, 1.2, 0.35]
)
table.auto_set_font_size(False)
table.set_fontsize(8)
# table.auto_set_column_width(n)
# table.scale(1, 1)
for ax in g.axes[0]:
ax.set_xscale('log')
ax.grid(axis="x", which="minor", ls="--", c='gray', alpha=0.2)
plt.savefig('example.png')
# %%
| 0 | 0 | 0 |
5f7fe7b5ac3f8175ed7329cfaae3957cb23f47d4 | 61,031 | py | Python | home/osmc/DLP/dlp_lightcrafter-1.0.19/dlp_lightcrafter/dpp2607.py | bietiekay/TI_LightCrafter_DLP2000_OSMC_RaspberryPi | 27f62f973f7d8c8cc3a49007599a7bf592d86289 | [
"BSD-2-Clause"
] | 3 | 2018-12-03T15:11:59.000Z | 2021-07-17T16:25:16.000Z | home/osmc/DLP/dlp_lightcrafter-1.0.19/dlp_lightcrafter/dpp2607.py | bietiekay/TI_LightCrafter_DLP2000_OSMC_RaspberryPi | 27f62f973f7d8c8cc3a49007599a7bf592d86289 | [
"BSD-2-Clause"
] | 1 | 2019-01-04T17:35:37.000Z | 2019-01-04T17:35:37.000Z | home/osmc/DLP/dlp_lightcrafter-1.0.19/dlp_lightcrafter/dpp2607.py | bietiekay/TI_LightCrafter_DLP2000_OSMC_RaspberryPi | 27f62f973f7d8c8cc3a49007599a7bf592d86289 | [
"BSD-2-Clause"
] | 1 | 2020-08-15T04:48:51.000Z | 2020-08-15T04:48:51.000Z | # -*- coding: windows-1252 -*-
# dpp2607.py
#
# sends commands to DPP2607 ASIC using I2C
#
# Copyright (C) 2017 Texas Instruments Incorporated - http://www.ti.com/
#
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of Texas Instruments Incorporated nor the names of
# its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
*** Note *** - this module is generated, changes will be lost!
Python Interface to DLP DPP2607
"""
import time
import struct
from enum import IntEnum
from logging import log, DEBUG
import i2c
COMPOUND_CMD_TIMEOUT = 2.0 # seconds
#####################################################
# Constants
#####################################################
X_0_TO_255_YCRCB = 0
X_16_TO_240_Y_112_TO_112_CRCB = 1
X_1_WHITE_AND_1_BLACK = 9
X_1_WHITE_AND_7_BLACK = 7
X_2_3_VGA_PORTRAIT = 4
X_3_2_VGA_LANDSCAPE = 5
X_4_2_2_YCR_CB_16_BIT = 8
X_4_2_2_YCR_CB_8_BIT = 9
X_90_DEGREE_ROTATION = 1
ACTIVE_HIGH = 1
ACTIVE_HIGH_PDM = 1
ACTIVE_HIGH_PULSE = 1
ACTIVE_LOW = 0
ACTIVE_LOW_PDM = 0
ACTIVE_LOW_PULSE = 0
ANSI_4X4_CHECKERBOARD = 0
BLACK = 0
BLUE = 4
BT_601 = 0
BT_656_I_F = 4
BT_709 = 1
COMPLETE = 1
CYAN = 6
DATA_SAMPLES_ON_FALLING_EDGE = 1
DATA_SAMPLES_ON_RISING_EDGE = 0
DIAGONAL_LINES = 10
DISABLED = 0
DLPC2601 = 130
DLPC2607 = 138
DSYS_PORTA_BIT_0 = 0
DSYS_PORTA_BIT_1 = 1
DSYS_PORTA_BIT_2 = 2
DSYS_PORTA_BIT_3 = 3
DSYS_PORTA_BIT_4 = 4
DSYS_PORTA_BIT_5 = 5
DSYS_PORTA_BIT_6 = 6
DSYS_PORTA_BIT_7 = 7
DSYS_PORTB_BIT_0 = 8
DSYS_PORTB_BIT_1 = 9
DSYS_PORTB_BIT_2 = 10
DSYS_PORTB_BIT_3 = 11
DSYS_PORTB_BIT_4 = 12
DSYS_PORTB_BIT_5 = 13
DSYS_PORTB_BIT_6 = 14
DSYS_PORTB_BIT_7 = 15
DSYS_PORTC_BIT_4 = 16
DSYS_PORTC_BIT_5 = 17
DSYS_PORTC_BIT_6 = 18
DSYS_PORTC_BIT_7 = 19
ENABLED = 1
ENABLED_ACTIVATES_CONTROL_BELOW = 1
ERROR_DETECTED = 0
EXTERNAL_VIDEO_PARALLEL_I_F = 0
FINE_CHECKERBOARD = 13
FLASH_BUSY = 1
GAMMA_CURVE_0 = 0
GAMMA_CURVE_1 = 1
GAMMA_CURVE_2 = 2
GAMMA_CURVE_3 = 3
GAMMA_CURVE_4 = 4
GAMMA_CURVE_5 = 5
GAMMA_CURVE_6 = 6
GREEN = 2
HORIZONTAL_GREY_RAMPS = 12
HORIZONTAL_LINES_1W_1B = 9
HORIZONTAL_LINES_1W_7B = 7
INITIALIZATION_COMPLETE = 0
INTERNAL_TEST_PATTERNS = 1
IN_PROGRESS = 0
MAGENTA = 5
NHD_LANDSCAPE = 27
NHD_PORTRAIT = 26
NOT_COMPLETE = 1
NO_ERRORS = 1
NO_ROTATION = 0
NO_TIMEOUTS = 0
NTSC_LANDSCAPE = 23
OFFSET__0 = 0
OFFSET__16 = 1
OPTICAL_TEST_IMAGE = 9
PAL_LANDSCAPE = 25
PARK_THE_DMD = 1
PIO_CYUSBI2C = 16
PIO_CYUSBSPI = 17
PIO_DEVASYS = 3
PIO_GENERICSERIAL = 7
PIO_MMKUSB = 9
PIO_SERIAL = 4
PIO_TESTER = 6
PIO_USB = 5
PIO_USBHID = 10
PIO_USBI2CPRO = 8
QVGA_LANDSCAPE = 1
QVGA_PORTRAIT = 0
QWVGA_LANDSCAPE = 3
QWVGA_PORTRAIT = 2
RED = 1
RGB565_16_BIT = 0
RGB565_8_BIT = 3
RGB666_16_BIT = 7
RGB666_18_BIT = 1
RGB666_8_BIT = 6
RGB888_16_BIT = 5
RGB888_24_BIT = 2
RGB888_8_BIT = 4
SEQUENCE_0 = 0
SEQUENCE_10 = 10
SEQUENCE_11 = 11
SEQUENCE_12 = 12
SEQUENCE_13 = 13
SEQUENCE_14 = 14
SEQUENCE_15 = 15
SEQUENCE_1 = 1
SEQUENCE_2 = 2
SEQUENCE_3 = 3
SEQUENCE_4 = 4
SEQUENCE_5 = 5
SEQUENCE_6 = 6
SEQUENCE_7 = 7
SEQUENCE_8 = 8
SEQUENCE_9 = 9
SET_AS_OFFSET_OFFSET__128 = 1
SET_AS_SIGNED_OFFSET__0 = 0
SOLID_BLACK = 1
SOLID_BLUE = 4
SOLID_GREEN = 3
SOLID_RED = 5
SOLID_WHITE = 2
SPLASH_IMAGE_0 = 0
SPLASH_IMAGE_1 = 1
SPLASH_IMAGE_2 = 2
SPLASH_IMAGE_3 = 3
SPLASH_SCREEN = 2
TIMEOUT_ERROR_HAS_OCCURRED = 1
UNPARK_THE_DMD = 0
VERTICAL_GREY_RAMPS = 11
VERTICAL_LINES_1W_1B = 8
VERTICAL_LINES_1W_7B = 6
VGA_LANDSCAPE = 7
VGA_PORTRAIT = 6
WHITE = 7
WVGA_720_LANDSCAPE = 9
WVGA_720_PORTRAIT = 8
WVGA_752_LANDSCAPE = 11
WVGA_752_PORTRAIT = 10
WVGA_800_LANDSCAPE = 13
WVGA_800_PORTRAIT = 12
WVGA_852_LANDSCAPE = 15
WVGA_852_PORTRAIT = 14
WVGA_853_LANDSCAPE = 17
WVGA_853_PORTRAIT = 16
WVGA_854_LANDSCAPE = 19
WVGA_854_OR_VGA_OUTPUT = 29
WVGA_854_PORTRAIT = 18
WVGA_864_LANDSCAPE = 21
WVGA_864_PORTRAIT = 20
YELLOW = 3
#####################################################
# Enumerations uses by function parameters
#####################################################
class DMDCurtainColor(IntEnum):
"""
DMD Curtain Color
"""
BLACK = 0x00
RED = 0x01
GREEN = 0x02
BLUE = 0x04
YELLOW = 0x03
MAGENTA = 0x05
CYAN = 0x06
WHITE = 0x07
class TestPatternVLines(IntEnum):
"""
Line Count
"""
X_1_WHITE_AND_7_BLACK = 0x06
X_1_WHITE_AND_1_BLACK = 0x08
class TestPatternHLines(IntEnum):
"""
Line Count
"""
X_1_WHITE_AND_7_BLACK = 0x07
X_1_WHITE_AND_1_BLACK = 0x09
class PolarityPixelClock(IntEnum):
"""
Pixel Clock Polarity
"""
DATA_SAMPLES_ON_RISING_EDGE = 0x00
DATA_SAMPLES_ON_FALLING_EDGE = 0x01
class DevLEDStatus(IntEnum):
"""
LED Timeout Status
"""
NO_TIMEOUTS = 0x00
TIMEOUT_ERROR_HAS_OCCURRED = 0x01
class PixFormat(IntEnum):
"""
Pixel Data Format
"""
RGB565_16_BIT_ = 0x00
RGB666_18_BIT_ = 0x01
RGB888_24_BIT_ = 0x02
RGB565_8_BIT_ = 0x03
RGB888_8_BIT_ = 0x04
RGB888_16_BIT_ = 0x05
RGB666_8_BIT_ = 0x06
RGB666_16_BIT_ = 0x07
X_4_2_2_YCR_CB_16_BIT_ = 0x08
X_4_2_2_YCR_CB_8_BIT_ = 0x09
class DMDPARK(IntEnum):
"""
DMD Park Control
"""
UNPARK_THE_DMD = 0x00
PARK_THE_DMD = 0x01
class Resolution(IntEnum):
"""
Resolution
"""
QVGA_PORTRAIT = 0x00
QVGA_LANDSCAPE = 0x01
QWVGA_PORTRAIT = 0x02
QWVGA_LANDSCAPE = 0x03
X_2_3_VGA_PORTRAIT = 0x04
X_3_2_VGA_LANDSCAPE = 0x05
VGA_PORTRAIT = 0x06
VGA_LANDSCAPE = 0x07
WVGA_720_PORTRAIT = 0x08
WVGA_720_LANDSCAPE = 0x09
WVGA_752_PORTRAIT = 0x0A
WVGA_752_LANDSCAPE = 0x0B
WVGA_800_PORTRAIT = 0x0C
WVGA_800_LANDSCAPE = 0x0D
WVGA_852_PORTRAIT = 0x0E
WVGA_852_LANDSCAPE = 0x0F
WVGA_853_PORTRAIT = 0x10
WVGA_853_LANDSCAPE = 0x11
WVGA_854_PORTRAIT = 0x12
WVGA_854_LANDSCAPE = 0x13
WVGA_864_PORTRAIT = 0x14
WVGA_864_LANDSCAPE = 0x15
NTSC_LANDSCAPE = 0x17
PAL_LANDSCAPE = 0x19
NHD_PORTRAIT = 0x1A
NHD_LANDSCAPE = 0x1B
WVGA_854_OR_VGA_OUTPUT = 0x1D
class CompoundStat(IntEnum):
"""
LED Calibration State
mDDR Built-In Self-Test State
"""
COMPLETE = 0x00
NOT_COMPLETE = 0x01
class TestPattern(IntEnum):
"""
Current Pattern
"""
ANSI_4X4_CHECKERBOARD = 0x00
SOLID_BLACK = 0x01
SOLID_WHITE = 0x02
SOLID_GREEN = 0x03
SOLID_BLUE = 0x04
SOLID_RED = 0x05
VERTICAL_LINES_1W_7B_ = 0x06
HORIZONTAL_LINES_1W_7B_ = 0x07
VERTICAL_LINES_1W_1B_ = 0x08
HORIZONTAL_LINES_1W_1B_ = 0x09
DIAGONAL_LINES = 0x0A
VERTICAL_GREY_RAMPS = 0x0B
HORIZONTAL_GREY_RAMPS = 0x0C
FINE_CHECKERBOARD = 0x0D
class RotationSetting(IntEnum):
"""
Rotation Setting
"""
NO_ROTATION = 0x00
X_90_DEGREE_ROTATION = 0x01
class PolarityDataEn(IntEnum):
"""
DATAEN Signal Polarity
"""
ACTIVE_LOW = 0x00
ACTIVE_HIGH = 0x01
class TestPatternSolids(IntEnum):
"""
Color
"""
BLACK = 0x01
WHITE = 0x02
GREEN = 0x03
BLUE = 0x04
RED = 0x05
class SourceSel(IntEnum):
"""
Input Source
"""
EXTERNAL_VIDEO_PARALLEL_I_F_ = 0x00
INTERNAL_TEST_PATTERNS = 0x01
SPLASH_SCREEN = 0x02
BT_656_I_F = 0x04
class DevID(IntEnum):
"""
Device ID
"""
DLPC2601 = 0x82
DLPC2607 = 0x8A
class DevInitStatus(IntEnum):
"""
Auto-Initialization Status
"""
IN_PROGRESS = 0x00
INITIALIZATION_COMPLETE = 0x01
class CompoundLooks(IntEnum):
"""
Selected Looks Sequence
"""
SEQUENCE_0 = 0x00
SEQUENCE_1 = 0x01
SEQUENCE_2 = 0x02
SEQUENCE_3 = 0x03
SEQUENCE_4 = 0x04
SEQUENCE_5 = 0x05
SEQUENCE_6 = 0x06
SEQUENCE_7 = 0x07
SEQUENCE_8 = 0x08
SEQUENCE_9 = 0x09
SEQUENCE_10 = 0x0a
SEQUENCE_11 = 0x0b
SEQUENCE_12 = 0x0c
SEQUENCE_13 = 0x0d
SEQUENCE_14 = 0x0e
SEQUENCE_15 = 0x0f
class EnabledDisabled(IntEnum):
"""
Blue LED State
DMD Curtain Control
DMD Long Side Flip
DMD Short Side Flip
Green LED State
Red LED State
"""
DISABLED = 0x00
ENABLED = 0x01
class Polarity(IntEnum):
"""
HSYNC Signal Polarity
VSYNC Signal Polarity
"""
ACTIVE_LOW_PULSE = 0x00
ACTIVE_HIGH_PULSE = 0x01
class DevFlashStatus(IntEnum):
"""
Flash Initialization Status
"""
INITIALIZATION_COMPLETE = 0x00
FLASH_BUSY = 0x01
class CompoundSplash(IntEnum):
"""
Splash Screen Select
"""
SPLASH_IMAGE_0 = 0x00
SPLASH_IMAGE_1 = 0x01
SPLASH_IMAGE_2 = 0x02
SPLASH_IMAGE_3 = 0x03
OPTICAL_TEST_IMAGE = 0x09
#####################################################
# Support functions
#####################################################
def DPP2607_Open(*args):
"""
Open I2C interface.
"""
log(DEBUG, "DPP2607_Open()")
i2c.initialize()
def DPP2607_Close():
"""
Close I2C interface
DPP2607_Close().
:rtype: None
"""
log(DEBUG, "DPP2607_Close()")
i2c.terminate()
def DPP2607_GetIODebug():
"""
Return the IO debugging status.
:returns: enable, log_path
:rtype: tuple[bool, str|None]
"""
return i2c.get_debug(), None
def DPP2607_SetIODebug(enable, log_path=None):
"""
Enable/disable logging IO to a log file. Log_path is ignored.
:type enable: bool
:type log_path: str, not used
:rtype: None
"""
log(DEBUG, "DPP2607_SetIODebug(%s, %s)", enable, log_path)
i2c.set_debug(enable)
def DPP2607_GetSlaveAddr():
"""
Get the I2C slave address (default: 0x36).
:returns: slave_addr
:rtype: int
"""
return i2c.get_slave_address()
def DPP2607_SetSlaveAddr(slave_addr):
"""
Set the I2C slave address (default: 0x36).
:type slave_addr: int
:rtype: None
"""
if slave_addr != i2c.get_slave_address():
log(DEBUG, "DPP2607_SetSlaveAddr(%s)", hex(slave_addr))
i2c.terminate()
i2c.initialize(slave_addr)
#####################################################
# ASIC Command Functions
#####################################################
def DPP2607_Read_CcaC1r1Coefficient():
"""
Reads: CCA C1R1 Coefficient.
DPP2607_Read_CcaC1r1Coefficient(DWORD &&CCAC1R1).
:returns: ccac1r1
:rtype: int
"""
i2c.write([0x15, 0x5F])
payload = i2c.read(4)
ccac1r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC1r1Coefficient: ccac1r1=%r', ccac1r1)
return ccac1r1
def DPP2607_Read_CcaC1r2Coefficient():
"""
Reads: CCA C1R2 Coefficient.
DPP2607_Read_CcaC1r2Coefficient(DWORD &&CCAC1R2).
:returns: ccac1r2
:rtype: int
"""
i2c.write([0x15, 0x60])
payload = i2c.read(4)
ccac1r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC1r2Coefficient: ccac1r2=%r', ccac1r2)
return ccac1r2
def DPP2607_Read_CcaC1r3Coefficient():
"""
Reads: CCA C1R3 Coefficient.
DPP2607_Read_CcaC1r3Coefficient(DWORD &&CCAC1R3).
:returns: ccac1r3
:rtype: int
"""
i2c.write([0x15, 0x61])
payload = i2c.read(4)
ccac1r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC1r3Coefficient: ccac1r3=%r', ccac1r3)
return ccac1r3
def DPP2607_Read_CcaC2r1Coefficient():
"""
Reads: CCA C2R1 Coefficient.
DPP2607_Read_CcaC2r1Coefficient(DWORD &&CCAC2R1).
:returns: ccac2r1
:rtype: int
"""
i2c.write([0x15, 0x62])
payload = i2c.read(4)
ccac2r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC2r1Coefficient: ccac2r1=%r', ccac2r1)
return ccac2r1
def DPP2607_Read_CcaC2r2Coefficient():
"""
Reads: CCA C2R2 Coefficient.
DPP2607_Read_CcaC2r2Coefficient(DWORD &&CCAC2R2).
:returns: ccac2r2
:rtype: int
"""
i2c.write([0x15, 0x63])
payload = i2c.read(4)
ccac2r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC2r2Coefficient: ccac2r2=%r', ccac2r2)
return ccac2r2
def DPP2607_Read_CcaC2r3Coefficient():
"""
Reads: CCA C2R3 Coefficient.
DPP2607_Read_CcaC2r3Coefficient(DWORD &&CCAC2R3).
:returns: ccac2r3
:rtype: int
"""
i2c.write([0x15, 0x64])
payload = i2c.read(4)
ccac2r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC2r3Coefficient: ccac2r3=%r', ccac2r3)
return ccac2r3
def DPP2607_Read_CcaC3r1Coefficient():
"""
Reads: CCA C3R1 Coefficient.
DPP2607_Read_CcaC3r1Coefficient(DWORD &&CCAC3R1).
:returns: ccac3r1
:rtype: int
"""
i2c.write([0x15, 0x65])
payload = i2c.read(4)
ccac3r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC3r1Coefficient: ccac3r1=%r', ccac3r1)
return ccac3r1
def DPP2607_Read_CcaC3r2Coefficient():
"""
Reads: CCA C3R2 Coefficient.
DPP2607_Read_CcaC3r2Coefficient(DWORD &&CCAC3R2).
:returns: ccac3r2
:rtype: int
"""
i2c.write([0x15, 0x66])
payload = i2c.read(4)
ccac3r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC3r2Coefficient: ccac3r2=%r', ccac3r2)
return ccac3r2
def DPP2607_Read_CcaC3r3Coefficient():
"""
Reads: CCA C3R3 Coefficient.
DPP2607_Read_CcaC3r3Coefficient(DWORD &&CCAC3R3).
:returns: ccac3r3
:rtype: int
"""
i2c.write([0x15, 0x67])
payload = i2c.read(4)
ccac3r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC3r3Coefficient: ccac3r3=%r', ccac3r3)
return ccac3r3
def DPP2607_Read_CcaC4r1Coefficient():
"""
Reads: CCA C4R1 Coefficient.
DPP2607_Read_CcaC4r1Coefficient(DWORD &&CCAC4R1).
:returns: ccac4r1
:rtype: int
"""
i2c.write([0x15, 0x68])
payload = i2c.read(4)
ccac4r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC4r1Coefficient: ccac4r1=%r', ccac4r1)
return ccac4r1
def DPP2607_Read_CcaC4r2Coefficient():
"""
Reads: CCA C4R2 Coefficient.
DPP2607_Read_CcaC4r2Coefficient(DWORD &&CCAC4R2).
:returns: ccac4r2
:rtype: int
"""
i2c.write([0x15, 0x69])
payload = i2c.read(4)
ccac4r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC4r2Coefficient: ccac4r2=%r', ccac4r2)
return ccac4r2
def DPP2607_Read_CcaC4r3Coefficient():
"""
Reads: CCA C4R3 Coefficient.
DPP2607_Read_CcaC4r3Coefficient(DWORD &&CCAC4R3).
:returns: ccac4r3
:rtype: int
"""
i2c.write([0x15, 0x6A])
payload = i2c.read(4)
ccac4r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC4r3Coefficient: ccac4r3=%r', ccac4r3)
return ccac4r3
def DPP2607_Read_CcaC5r1Coefficient():
"""
Reads: CCA C5R1 Coefficient.
DPP2607_Read_CcaC5r1Coefficient(DWORD &&CCAC5R1).
:returns: ccac5r1
:rtype: int
"""
i2c.write([0x15, 0x6B])
payload = i2c.read(4)
ccac5r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC5r1Coefficient: ccac5r1=%r', ccac5r1)
return ccac5r1
def DPP2607_Read_CcaC5r2Coefficient():
"""
Reads: CCA C5R2 Coefficient.
DPP2607_Read_CcaC5r2Coefficient(DWORD &&CCAC5R2).
:returns: ccac5r2
:rtype: int
"""
i2c.write([0x15, 0x6C])
payload = i2c.read(4)
ccac5r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC5r2Coefficient: ccac5r2=%r', ccac5r2)
return ccac5r2
def DPP2607_Read_CcaC5r3Coefficient():
"""
Reads: CCA C5R3 Coefficient.
DPP2607_Read_CcaC5r3Coefficient(DWORD &&CCAC5R3).
:returns: ccac5r3
:rtype: int
"""
i2c.write([0x15, 0x6D])
payload = i2c.read(4)
ccac5r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC5r3Coefficient: ccac5r3=%r', ccac5r3)
return ccac5r3
def DPP2607_Read_CcaC6r1Coefficient():
"""
Reads: CCA C6R1 Coefficient.
DPP2607_Read_CcaC6r1Coefficient(DWORD &&CCAC6R1).
:returns: ccac6r1
:rtype: int
"""
i2c.write([0x15, 0x6E])
payload = i2c.read(4)
ccac6r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC6r1Coefficient: ccac6r1=%r', ccac6r1)
return ccac6r1
def DPP2607_Read_CcaC6r2Coefficient():
"""
Reads: CCA C6R2 Coefficient.
DPP2607_Read_CcaC6r2Coefficient(DWORD &&CCAC6R2).
:returns: ccac6r2
:rtype: int
"""
i2c.write([0x15, 0x6F])
payload = i2c.read(4)
ccac6r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC6r2Coefficient: ccac6r2=%r', ccac6r2)
return ccac6r2
def DPP2607_Read_CcaC6r3Coefficient():
"""
Reads: CCA C6R3 Coefficient.
DPP2607_Read_CcaC6r3Coefficient(DWORD &&CCAC6R3).
:returns: ccac6r3
:rtype: int
"""
i2c.write([0x15, 0x70])
payload = i2c.read(4)
ccac6r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC6r3Coefficient: ccac6r3=%r', ccac6r3)
return ccac6r3
def DPP2607_Read_CcaC7r1Coefficient():
"""
Reads: CCA C7R1 Coefficient.
DPP2607_Read_CcaC7r1Coefficient(DWORD &&CCAC7R1).
:returns: ccac7r1
:rtype: int
"""
i2c.write([0x15, 0x71])
payload = i2c.read(4)
ccac7r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC7r1Coefficient: ccac7r1=%r', ccac7r1)
return ccac7r1
def DPP2607_Read_CcaC7r2Coefficient():
"""
Reads: CCA C7R2 Coefficient.
DPP2607_Read_CcaC7r2Coefficient(DWORD &&CCAC7R2).
:returns: ccac7r2
:rtype: int
"""
i2c.write([0x15, 0x72])
payload = i2c.read(4)
ccac7r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC7r2Coefficient: ccac7r2=%r', ccac7r2)
return ccac7r2
def DPP2607_Read_CcaC7r3Coefficient():
"""
Reads: CCA C7R3 Coefficient.
DPP2607_Read_CcaC7r3Coefficient(DWORD &&CCAC7R3).
:returns: ccac7r3
:rtype: int
"""
i2c.write([0x15, 0x73])
payload = i2c.read(4)
ccac7r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC7r3Coefficient: ccac7r3=%r', ccac7r3)
return ccac7r3
def DPP2607_Read_CcaFunctionEnable():
"""
Reads: CCA Function Enable.
DPP2607_Read_CcaFunctionEnable(DWORD &&CCAEnable).
:returns: cca_enable
:rtype: int
"""
i2c.write([0x15, 0x5E])
payload = i2c.read(4)
cca_enable = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1
log(DEBUG, 'DPP2607_Read_CcaFunctionEnable: cca_enable=%r', cca_enable)
return cca_enable
def DPP2607_Read_CommunicationStatus():
"""
Reads: Communication Status.
DPP2607_Read_CommunicationStatus(DWORD &&CompoundStatInvCmd, DWORD &&CompoundStatParCmd, DWORD &&CompoundStatMemRd, DWORD &&CompoundStatCmdPar, DWORD &&CompoundStatCmdAbt).
:returns: compound_stat_inv_cmd, compound_stat_par_cmd, compound_stat_mem_rd, compound_stat_cmd_par, compound_stat_cmd_abt
:rtype: tuple[int, int, int, int, int]
"""
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0xC4])
_poll_complete()
i2c.write([0x15, 0x39])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
compound_stat_inv_cmd = (value >> 8) & 0x1
compound_stat_par_cmd = (value >> 9) & 0x1
compound_stat_mem_rd = (value >> 10) & 0x1
compound_stat_cmd_par = (value >> 11) & 0x1
compound_stat_cmd_abt = (value >> 12) & 0x1
log(DEBUG, 'DPP2607_Read_CommunicationStatus: compound_stat_inv_cmd=%r, compound_stat_par_cmd=%r, compound_stat_mem_rd=%r, compound_stat_cmd_par=%r, compound_stat_cmd_abt=%r', compound_stat_inv_cmd, compound_stat_par_cmd, compound_stat_mem_rd, compound_stat_cmd_par, compound_stat_cmd_abt)
return compound_stat_inv_cmd, compound_stat_par_cmd, compound_stat_mem_rd, compound_stat_cmd_par, compound_stat_cmd_abt
def DPP2607_Read_CropFirstLine():
"""
Reads: Crop - First Line.
DPP2607_Read_CropFirstLine(DWORD &&FirstActiveLine).
:returns: first_active_line
:rtype: int
"""
i2c.write([0x15, 0x29])
payload = i2c.read(4)
first_active_line = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_CropFirstLine: first_active_line=%r', first_active_line)
return first_active_line
def DPP2607_Read_CropFirstPixel():
"""
Reads: Crop - First Pixel.
DPP2607_Read_CropFirstPixel(DWORD &&FirstActivePixel).
:returns: first_active_pixel
:rtype: int
"""
i2c.write([0x15, 0x2B])
payload = i2c.read(4)
first_active_pixel = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_CropFirstPixel: first_active_pixel=%r', first_active_pixel)
return first_active_pixel
def DPP2607_Read_CropLastLine():
"""
Reads: Crop - Last Line.
DPP2607_Read_CropLastLine(DWORD &&LastActiveLine).
:returns: last_active_line
:rtype: int
"""
i2c.write([0x15, 0x2A])
payload = i2c.read(4)
last_active_line = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_CropLastLine: last_active_line=%r', last_active_line)
return last_active_line
def DPP2607_Read_CropLastPixel():
"""
Reads: Crop - Last Pixel.
DPP2607_Read_CropLastPixel(DWORD &&LastActivePixel).
:returns: last_active_pixel
:rtype: int
"""
i2c.write([0x15, 0x2C])
payload = i2c.read(4)
last_active_pixel = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_CropLastPixel: last_active_pixel=%r', last_active_pixel)
return last_active_pixel
def DPP2607_Read_DeviceStatus():
"""
Reads: Device Status.
DPP2607_Read_DeviceStatus(DWORD &&DevID, DWORD &&DevFlashStatus, DWORD &&DevInitStatus, DWORD &&DevLEDStatus).
:returns: dev_id, dev_flash_status, dev_init_status, dev_led_status
:rtype: tuple[DevID, DevFlashStatus, DevInitStatus, DevLEDStatus]
"""
i2c.write([0x15, 0x03])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
dev_id = DevID((value >> 0) & 0xff)
dev_flash_status = DevFlashStatus((value >> 10) & 0x1)
dev_init_status = DevInitStatus((value >> 11) & 0x1)
dev_led_status = DevLEDStatus((value >> 12) & 0x1)
log(DEBUG, 'DPP2607_Read_DeviceStatus: dev_id=%r, dev_flash_status=%r, dev_init_status=%r, dev_led_status=%r', dev_id, dev_flash_status, dev_init_status, dev_led_status)
return dev_id, dev_flash_status, dev_init_status, dev_led_status
def DPP2607_Read_DisplayCurtainControl():
"""
Reads: Display Curtain Control.
DPP2607_Read_DisplayCurtainControl(DWORD &&DMDCurtainCtl, DWORD &&DMDCurtainColor).
:returns: dmd_curtain_ctl, dmd_curtain_color
:rtype: tuple[EnabledDisabled, DMDCurtainColor]
"""
i2c.write([0x15, 0xA6])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
dmd_curtain_ctl = EnabledDisabled((value >> 0) & 0xf)
dmd_curtain_color = DMDCurtainColor((value >> 4) & 0xf)
log(DEBUG, 'DPP2607_Read_DisplayCurtainControl: dmd_curtain_ctl=%r, dmd_curtain_color=%r', dmd_curtain_ctl, dmd_curtain_color)
return dmd_curtain_ctl, dmd_curtain_color
def DPP2607_Read_DmdPark():
"""
Reads: DMD PARK.
DPP2607_Read_DmdPark(DWORD &&DMDPARK).
:returns: dmdpark
:rtype: DMDPARK
"""
i2c.write([0x15, 0x2D])
payload = i2c.read(4)
dmdpark = DMDPARK((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1)
log(DEBUG, 'DPP2607_Read_DmdPark: dmdpark=%r', dmdpark)
return dmdpark
def DPP2607_Read_EmbeddedSoftwareVersion():
"""
Reads: Embedded Software Version.
DPP2607_Read_EmbeddedSoftwareVersion(DWORD &&CompoundICPPatch, DWORD &&CompoundICPMinor, DWORD &&CompoundICPMajor).
:returns: compound_icp_patch, compound_icp_minor, compound_icp_major
:rtype: tuple[int, int, int]
"""
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0x02])
_poll_complete()
i2c.write([0x15, 0x39])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
compound_icp_patch = (value >> 0) & 0xffff
compound_icp_minor = (value >> 16) & 0xff
compound_icp_major = (value >> 24) & 0xff
log(DEBUG, 'DPP2607_Read_EmbeddedSoftwareVersion: compound_icp_patch=%r, compound_icp_minor=%r, compound_icp_major=%r', compound_icp_patch, compound_icp_minor, compound_icp_major)
return compound_icp_patch, compound_icp_minor, compound_icp_major
def DPP2607_Read_ImageLongFlip():
"""
Reads: Image Long Flip.
DPP2607_Read_ImageLongFlip(DWORD &&FlipLong).
:returns: flip_long
:rtype: EnabledDisabled
"""
i2c.write([0x15, 0x0F])
payload = i2c.read(4)
flip_long = EnabledDisabled((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1)
log(DEBUG, 'DPP2607_Read_ImageLongFlip: flip_long=%r', flip_long)
return flip_long
def DPP2607_Read_ImageRotationSettings():
"""
Reads: Image Rotation Settings.
DPP2607_Read_ImageRotationSettings(DWORD &&RotationSetting).
:returns: rotation_setting
:rtype: RotationSetting
"""
i2c.write([0x15, 0x0E])
payload = i2c.read(4)
rotation_setting = RotationSetting((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1)
log(DEBUG, 'DPP2607_Read_ImageRotationSettings: rotation_setting=%r', rotation_setting)
return rotation_setting
def DPP2607_Read_ImageShortFlip():
"""
Reads: Image Short Flip.
DPP2607_Read_ImageShortFlip(DWORD &&FlipShort).
:returns: flip_short
:rtype: EnabledDisabled
"""
i2c.write([0x15, 0x10])
payload = i2c.read(4)
flip_short = EnabledDisabled((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1)
log(DEBUG, 'DPP2607_Read_ImageShortFlip: flip_short=%r', flip_short)
return flip_short
def DPP2607_Read_InternalTestPattern():
"""
Reads: Internal Test Pattern.
DPP2607_Read_InternalTestPattern(DWORD &&TestPattern).
:returns: test_pattern
:rtype: TestPattern
"""
i2c.write([0x15, 0x11])
payload = i2c.read(4)
test_pattern = TestPattern((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0xf)
log(DEBUG, 'DPP2607_Read_InternalTestPattern: test_pattern=%r', test_pattern)
return test_pattern
def DPP2607_Read_InterruptStatus():
"""
Reads: Interrupt Status.
DPP2607_Read_InterruptStatus(DWORD &&IntSeqAbort, DWORD &&IntDMDResetOverrun, DWORD &&IntDMDBlockError, DWORD &&IntDMDIFOverrun, DWORD &&IntFormatBufOverflow, DWORD &&IntFormatStarvation, DWORD &&IntFlashFIFOErr, DWORD &&IntFlashDMAErr, DWORD &&IntFormatMultErr, DWORD &&IntFormatCmdErr, DWORD &&IntFormatQueueWarn, DWORD &&IntDDROverflowBP, DWORD &&IntDDROverflowFB, DWORD &&IntScalerLineErr, DWORD &&IntScalerPixerr, DWORD &&IntLEDTimeout).
:returns: int_seq_abort, int_dmd_reset_overrun, int_dmd_block_error, int_dmdif_overrun, int_format_buf_overflow, int_format_starvation, int_flash_fifo_err, int_flash_dma_err, int_format_mult_err, int_format_cmd_err, int_format_queue_warn, int_ddr_overflow_bp, int_ddr_overflow_fb, int_scaler_line_err, int_scaler_pixerr, int_led_timeout
:rtype: tuple[int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int]
"""
i2c.write([0x15, 0x00])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
int_seq_abort = (value >> 0) & 0x1
int_dmd_reset_overrun = (value >> 1) & 0x1
int_dmd_block_error = (value >> 2) & 0x1
int_dmdif_overrun = (value >> 3) & 0x1
int_format_buf_overflow = (value >> 4) & 0x1
int_format_starvation = (value >> 5) & 0x1
int_flash_fifo_err = (value >> 7) & 0x1
int_flash_dma_err = (value >> 8) & 0x1
int_format_mult_err = (value >> 9) & 0x1
int_format_cmd_err = (value >> 10) & 0x1
int_format_queue_warn = (value >> 11) & 0x1
int_ddr_overflow_bp = (value >> 12) & 0x1
int_ddr_overflow_fb = (value >> 13) & 0x1
int_scaler_line_err = (value >> 14) & 0x1
int_scaler_pixerr = (value >> 15) & 0x1
int_led_timeout = (value >> 18) & 0x1
log(DEBUG, 'DPP2607_Read_InterruptStatus: int_seq_abort=%r, int_dmd_reset_overrun=%r, int_dmd_block_error=%r, int_dmdif_overrun=%r, int_format_buf_overflow=%r, int_format_starvation=%r, int_flash_fifo_err=%r, int_flash_dma_err=%r, int_format_mult_err=%r, int_format_cmd_err=%r, int_format_queue_warn=%r, int_ddr_overflow_bp=%r, int_ddr_overflow_fb=%r, int_scaler_line_err=%r, int_scaler_pixerr=%r, int_led_timeout=%r', int_seq_abort, int_dmd_reset_overrun, int_dmd_block_error, int_dmdif_overrun, int_format_buf_overflow, int_format_starvation, int_flash_fifo_err, int_flash_dma_err, int_format_mult_err, int_format_cmd_err, int_format_queue_warn, int_ddr_overflow_bp, int_ddr_overflow_fb, int_scaler_line_err, int_scaler_pixerr, int_led_timeout)
return int_seq_abort, int_dmd_reset_overrun, int_dmd_block_error, int_dmdif_overrun, int_format_buf_overflow, int_format_starvation, int_flash_fifo_err, int_flash_dma_err, int_format_mult_err, int_format_cmd_err, int_format_queue_warn, int_ddr_overflow_bp, int_ddr_overflow_fb, int_scaler_line_err, int_scaler_pixerr, int_led_timeout
def DPP2607_Read_LedCurrentBlue():
"""
Reads: LED Current - Blue.
DPP2607_Read_LedCurrentBlue(DWORD &&PWMBlu).
:returns: pwm_blu
:rtype: int
"""
i2c.write([0x15, 0x14])
payload = i2c.read(4)
pwm_blu = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_LedCurrentBlue: pwm_blu=%r', pwm_blu)
return pwm_blu
def DPP2607_Read_LedCurrentGreen():
"""
Reads: LED Current - Green.
DPP2607_Read_LedCurrentGreen(DWORD &&PWMGrn).
:returns: pwm_grn
:rtype: int
"""
i2c.write([0x15, 0x13])
payload = i2c.read(4)
pwm_grn = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_LedCurrentGreen: pwm_grn=%r', pwm_grn)
return pwm_grn
def DPP2607_Read_LedCurrentRed():
"""
Reads: LED Current - Red.
DPP2607_Read_LedCurrentRed(DWORD &&PWMRed).
:returns: pwm_red
:rtype: int
"""
i2c.write([0x15, 0x12])
payload = i2c.read(4)
pwm_red = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_LedCurrentRed: pwm_red=%r', pwm_red)
return pwm_red
def DPP2607_Read_LedDriverEnable():
"""
Reads: LED Driver Enable.
DPP2607_Read_LedDriverEnable(DWORD &&LEDEnableRed, DWORD &&LEDEnableGrn, DWORD &&LEDEnableBlu).
:returns: led_enable_red, led_enable_grn, led_enable_blu
:rtype: tuple[EnabledDisabled, EnabledDisabled, EnabledDisabled]
"""
i2c.write([0x15, 0x16])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
led_enable_red = EnabledDisabled((value >> 0) & 0x1)
led_enable_grn = EnabledDisabled((value >> 1) & 0x1)
led_enable_blu = EnabledDisabled((value >> 2) & 0x1)
log(DEBUG, 'DPP2607_Read_LedDriverEnable: led_enable_red=%r, led_enable_grn=%r, led_enable_blu=%r', led_enable_red, led_enable_grn, led_enable_blu)
return led_enable_red, led_enable_grn, led_enable_blu
def DPP2607_Read_ParallelBusPolarityControl():
"""
Reads: Parallel Bus Polarity Control.
DPP2607_Read_ParallelBusPolarityControl(DWORD &&PolarityHSYNC, DWORD &&PolarityVSYNC, DWORD &&PolarityPixelClock, DWORD &&PolarityDataEn).
:returns: polarity_hsync, polarity_vsync, polarity_pixel_clock, polarity_data_en
:rtype: tuple[Polarity, Polarity, PolarityPixelClock, PolarityDataEn]
"""
i2c.write([0x15, 0xAF])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
polarity_hsync = Polarity((value >> 1) & 0x1)
polarity_vsync = Polarity((value >> 2) & 0x1)
polarity_pixel_clock = PolarityPixelClock((value >> 3) & 0x1)
polarity_data_en = PolarityDataEn((value >> 4) & 0x1)
log(DEBUG, 'DPP2607_Read_ParallelBusPolarityControl: polarity_hsync=%r, polarity_vsync=%r, polarity_pixel_clock=%r, polarity_data_en=%r', polarity_hsync, polarity_vsync, polarity_pixel_clock, polarity_data_en)
return polarity_hsync, polarity_vsync, polarity_pixel_clock, polarity_data_en
def DPP2607_Read_SystemStatus():
"""
Reads: System Status.
DPP2607_Read_SystemStatus(DWORD &&CompoundStatInit, DWORD &&CompoundStatFlash, DWORD &&CompoundStatTemp, DWORD &&CompoundStatPAD, DWORD &&CompoundStatLED, DWORD &&CompoundStatBIST).
:returns: compound_stat_init, compound_stat_flash, compound_stat_temp, compound_stat_pad, compound_stat_led, compound_stat_bist
:rtype: tuple[int, int, int, int, CompoundStat, CompoundStat]
"""
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0xC4])
_poll_complete()
i2c.write([0x15, 0x39])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
compound_stat_init = (value >> 0) & 0x1
compound_stat_flash = (value >> 1) & 0x1
compound_stat_temp = (value >> 2) & 0x1
compound_stat_pad = (value >> 3) & 0x1
compound_stat_led = CompoundStat((value >> 5) & 0x1)
compound_stat_bist = CompoundStat((value >> 6) & 0x1)
log(DEBUG, 'DPP2607_Read_SystemStatus: compound_stat_init=%r, compound_stat_flash=%r, compound_stat_temp=%r, compound_stat_pad=%r, compound_stat_led=%r, compound_stat_bist=%r', compound_stat_init, compound_stat_flash, compound_stat_temp, compound_stat_pad, compound_stat_led, compound_stat_bist)
return compound_stat_init, compound_stat_flash, compound_stat_temp, compound_stat_pad, compound_stat_led, compound_stat_bist
def DPP2607_Read_SystemTemperature():
"""
Reads: System Temperature.
DPP2607_Read_SystemTemperature(DWORD &&CompoundTemp).
:returns: compound_temp
:rtype: int
"""
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0xC5])
_poll_complete()
i2c.write([0x15, 0x39])
payload = i2c.read(4)
compound_temp = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0xffffffffL
log(DEBUG, 'DPP2607_Read_SystemTemperature: compound_temp=%r', compound_temp)
return compound_temp
def DPP2607_Read_VideoPixelFormat():
"""
Reads: Video Pixel Format.
DPP2607_Read_VideoPixelFormat(DWORD &&PixFormat).
:returns: pix_format
:rtype: PixFormat
"""
i2c.write([0x15, 0x0D])
payload = i2c.read(4)
pix_format = PixFormat((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0xf)
log(DEBUG, 'DPP2607_Read_VideoPixelFormat: pix_format=%r', pix_format)
return pix_format
def DPP2607_Read_VideoResolution():
"""
Reads: Video Resolution.
DPP2607_Read_VideoResolution(DWORD &&Resolution).
:returns: resolution
:rtype: Resolution
"""
i2c.write([0x15, 0x0C])
payload = i2c.read(4)
resolution = Resolution((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1f)
log(DEBUG, 'DPP2607_Read_VideoResolution: resolution=%r', resolution)
return resolution
def DPP2607_Read_VideoSourceSelection():
"""
Reads: Video Source Selection.
DPP2607_Read_VideoSourceSelection(DWORD &&SourceSel).
:returns: source_sel
:rtype: SourceSel
"""
i2c.write([0x15, 0x0B])
payload = i2c.read(4)
source_sel = SourceSel((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7)
log(DEBUG, 'DPP2607_Read_VideoSourceSelection: source_sel=%r', source_sel)
return source_sel
def DPP2607_Write_CcaC1r1Coefficient(ccac1r1):
"""
Writes: CCA C1R1 Coefficient.
DPP2607_Write_CcaC1r1Coefficient(DWORD CCAC1R1).
:type ccac1r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC1r1Coefficient(%r)', ccac1r1)
payload = [0x5F]
payload.extend(list(bytearray(struct.pack(">I", ccac1r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC1r2Coefficient(ccac1r2):
"""
Writes: CCA C1R2 Coefficient.
DPP2607_Write_CcaC1r2Coefficient(DWORD CCAC1R2).
:type ccac1r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC1r2Coefficient(%r)', ccac1r2)
payload = [0x60]
payload.extend(list(bytearray(struct.pack(">I", ccac1r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC1r3Coefficient(ccac1r3):
"""
Writes: CCA C1R3 Coefficient.
DPP2607_Write_CcaC1r3Coefficient(DWORD CCAC1R3).
:type ccac1r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC1r3Coefficient(%r)', ccac1r3)
payload = [0x61]
payload.extend(list(bytearray(struct.pack(">I", ccac1r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC2r1Coefficient(ccac2r1):
"""
Writes: CCA C2R1 Coefficient.
DPP2607_Write_CcaC2r1Coefficient(DWORD CCAC2R1).
:type ccac2r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC2r1Coefficient(%r)', ccac2r1)
payload = [0x62]
payload.extend(list(bytearray(struct.pack(">I", ccac2r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC2r2Coefficient(ccac2r2):
"""
Writes: CCA C2R2 Coefficient.
DPP2607_Write_CcaC2r2Coefficient(DWORD CCAC2R2).
:type ccac2r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC2r2Coefficient(%r)', ccac2r2)
payload = [0x63]
payload.extend(list(bytearray(struct.pack(">I", ccac2r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC2r3Coefficient(ccac2r3):
"""
Writes: CCA C2R3 Coefficient.
DPP2607_Write_CcaC2r3Coefficient(DWORD CCAC2R3).
:type ccac2r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC2r3Coefficient(%r)', ccac2r3)
payload = [0x64]
payload.extend(list(bytearray(struct.pack(">I", ccac2r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC3r1Coefficient(ccac3r1):
"""
Writes: CCA C3R1 Coefficient.
DPP2607_Write_CcaC3r1Coefficient(DWORD CCAC3R1).
:type ccac3r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC3r1Coefficient(%r)', ccac3r1)
payload = [0x65]
payload.extend(list(bytearray(struct.pack(">I", ccac3r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC3r2Coefficient(ccac3r2):
"""
Writes: CCA C3R2 Coefficient.
DPP2607_Write_CcaC3r2Coefficient(DWORD CCAC3R2).
:type ccac3r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC3r2Coefficient(%r)', ccac3r2)
payload = [0x66]
payload.extend(list(bytearray(struct.pack(">I", ccac3r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC3r3Coefficient(ccac3r3):
"""
Writes: CCA C3R3 Coefficient.
DPP2607_Write_CcaC3r3Coefficient(DWORD CCAC3R3).
:type ccac3r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC3r3Coefficient(%r)', ccac3r3)
payload = [0x67]
payload.extend(list(bytearray(struct.pack(">I", ccac3r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC4r1Coefficient(ccac4r1):
"""
Writes: CCA C4R1 Coefficient.
DPP2607_Write_CcaC4r1Coefficient(DWORD CCAC4R1).
:type ccac4r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC4r1Coefficient(%r)', ccac4r1)
payload = [0x68]
payload.extend(list(bytearray(struct.pack(">I", ccac4r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC4r2Coefficient(ccac4r2):
"""
Writes: CCA C4R2 Coefficient.
DPP2607_Write_CcaC4r2Coefficient(DWORD CCAC4R2).
:type ccac4r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC4r2Coefficient(%r)', ccac4r2)
payload = [0x69]
payload.extend(list(bytearray(struct.pack(">I", ccac4r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC4r3Coefficient(ccac4r3):
"""
Writes: CCA C4R3 Coefficient.
DPP2607_Write_CcaC4r3Coefficient(DWORD CCAC4R3).
:type ccac4r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC4r3Coefficient(%r)', ccac4r3)
payload = [0x6A]
payload.extend(list(bytearray(struct.pack(">I", ccac4r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC5r1Coefficient(ccac5r1):
"""
Writes: CCA C5R1 Coefficient.
DPP2607_Write_CcaC5r1Coefficient(DWORD CCAC5R1).
:type ccac5r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC5r1Coefficient(%r)', ccac5r1)
payload = [0x6B]
payload.extend(list(bytearray(struct.pack(">I", ccac5r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC5r2Coefficient(ccac5r2):
"""
Writes: CCA C5R2 Coefficient.
DPP2607_Write_CcaC5r2Coefficient(DWORD CCAC5R2).
:type ccac5r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC5r2Coefficient(%r)', ccac5r2)
payload = [0x6C]
payload.extend(list(bytearray(struct.pack(">I", ccac5r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC5r3Coefficient(ccac5r3):
"""
Writes: CCA C5R3 Coefficient.
DPP2607_Write_CcaC5r3Coefficient(DWORD CCAC5R3).
:type ccac5r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC5r3Coefficient(%r)', ccac5r3)
payload = [0x6D]
payload.extend(list(bytearray(struct.pack(">I", ccac5r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC6r1Coefficient(ccac6r1):
"""
Writes: CCA C6R1 Coefficient.
DPP2607_Write_CcaC6r1Coefficient(DWORD CCAC6R1).
:type ccac6r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC6r1Coefficient(%r)', ccac6r1)
payload = [0x6E]
payload.extend(list(bytearray(struct.pack(">I", ccac6r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC6r2Coefficient(ccac6r2):
"""
Writes: CCA C6R2 Coefficient.
DPP2607_Write_CcaC6r2Coefficient(DWORD CCAC6R2).
:type ccac6r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC6r2Coefficient(%r)', ccac6r2)
payload = [0x6F]
payload.extend(list(bytearray(struct.pack(">I", ccac6r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC6r3Coefficient(ccac6r3):
"""
Writes: CCA C6R3 Coefficient.
DPP2607_Write_CcaC6r3Coefficient(DWORD CCAC6R3).
:type ccac6r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC6r3Coefficient(%r)', ccac6r3)
payload = [0x70]
payload.extend(list(bytearray(struct.pack(">I", ccac6r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC7r1Coefficient(ccac7r1):
"""
Writes: CCA C7R1 Coefficient.
DPP2607_Write_CcaC7r1Coefficient(DWORD CCAC7R1).
:type ccac7r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC7r1Coefficient(%r)', ccac7r1)
payload = [0x71]
payload.extend(list(bytearray(struct.pack(">I", ccac7r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC7r2Coefficient(ccac7r2):
"""
Writes: CCA C7R2 Coefficient.
DPP2607_Write_CcaC7r2Coefficient(DWORD CCAC7R2).
:type ccac7r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC7r2Coefficient(%r)', ccac7r2)
payload = [0x72]
payload.extend(list(bytearray(struct.pack(">I", ccac7r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC7r3Coefficient(ccac7r3):
"""
Writes: CCA C7R3 Coefficient.
DPP2607_Write_CcaC7r3Coefficient(DWORD CCAC7R3).
:type ccac7r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC7r3Coefficient(%r)', ccac7r3)
payload = [0x73]
payload.extend(list(bytearray(struct.pack(">I", ccac7r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaFunctionEnable(cca_enable):
"""
Writes: CCA Function Enable.
DPP2607_Write_CcaFunctionEnable(DWORD CCAEnable).
:type cca_enable: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaFunctionEnable(%r)', cca_enable)
payload = [0x5E]
payload.extend(list(bytearray(struct.pack(">I", cca_enable & 0x1))))
i2c.write(payload)
def DPP2607_Write_CheckerboardAnsiPattern():
"""
Writes: Checkerboard ANSI Pattern.
DPP2607_Write_CheckerboardAnsiPattern().
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CheckerboardAnsiPattern()', )
payload = [0x11]
payload.extend([0, 0, 0, 13]) # test_pattern_ansi
i2c.write(payload)
def DPP2607_Write_CropFirstLine(first_active_line):
"""
Writes: Crop - First Line.
DPP2607_Write_CropFirstLine(DWORD FirstActiveLine).
:type first_active_line: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CropFirstLine(%r)', first_active_line)
payload = [0x29]
payload.extend(list(bytearray(struct.pack(">I", first_active_line & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_CropFirstPixel(first_active_pixel):
"""
Writes: Crop - First Pixel.
DPP2607_Write_CropFirstPixel(DWORD FirstActivePixel).
:type first_active_pixel: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CropFirstPixel(%r)', first_active_pixel)
payload = [0x2B]
payload.extend(list(bytearray(struct.pack(">I", first_active_pixel & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_CropLastLine(last_active_line):
"""
Writes: Crop - Last Line.
DPP2607_Write_CropLastLine(DWORD LastActiveLine).
:type last_active_line: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CropLastLine(%r)', last_active_line)
payload = [0x2A]
payload.extend(list(bytearray(struct.pack(">I", last_active_line & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_CropLastPixel(last_active_pixel):
"""
Writes: Crop - Last Pixel.
DPP2607_Write_CropLastPixel(DWORD LastActivePixel).
:type last_active_pixel: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CropLastPixel(%r)', last_active_pixel)
payload = [0x2C]
payload.extend(list(bytearray(struct.pack(">I", last_active_pixel & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_DiagonalLinesPattern():
"""
Writes: Diagonal Lines Pattern.
DPP2607_Write_DiagonalLinesPattern().
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_DiagonalLinesPattern()', )
payload = [0x11]
payload.extend([0, 0, 0, 10]) # test_pattern_d_lines
i2c.write(payload)
def DPP2607_Write_DisplayCurtainControl(dmd_curtain_ctl, dmd_curtain_color):
"""
Writes: Display Curtain Control.
DPP2607_Write_DisplayCurtainControl(DWORD DMDCurtainCtl, DWORD DMDCurtainColor).
:type dmd_curtain_ctl: EnabledDisabled
:type dmd_curtain_color: DMDCurtainColor
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_DisplayCurtainControl(%r, %r)', dmd_curtain_ctl, dmd_curtain_color)
payload = [0xA6]
value = 0
value |= (dmd_curtain_ctl & 0xf) << 0
value |= (dmd_curtain_color & 0xf) << 4
payload.extend(list(bytearray(struct.pack(">I", value))))
i2c.write(payload)
def DPP2607_Write_DmdPark(dmdpark):
"""
Writes: DMD PARK.
DPP2607_Write_DmdPark(DWORD DMDPARK).
:type dmdpark: DMDPARK
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_DmdPark(%r)', dmdpark)
payload = [0x2D]
payload.extend(list(bytearray(struct.pack(">I", dmdpark & 0x1))))
i2c.write(payload)
def DPP2607_Write_FineCheckerboardPattern():
"""
Writes: Fine Checkerboard Pattern.
DPP2607_Write_FineCheckerboardPattern().
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_FineCheckerboardPattern()', )
payload = [0x11]
payload.extend([0, 0, 0, 0]) # test_pattern_fine_checker
i2c.write(payload)
def DPP2607_Write_HorizontalGrayRampPattern():
"""
Writes: Horizontal Gray Ramp Pattern.
DPP2607_Write_HorizontalGrayRampPattern().
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_HorizontalGrayRampPattern()', )
payload = [0x11]
payload.extend([0, 0, 0, 12]) # test_pattern_gray_ramp_h
i2c.write(payload)
def DPP2607_Write_HorizontalLinesPattern(test_pattern_h_lines):
"""
Writes: Horizontal Lines Pattern.
DPP2607_Write_HorizontalLinesPattern(DWORD TestPatternHLines).
:type test_pattern_h_lines: TestPatternHLines
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_HorizontalLinesPattern(%r)', test_pattern_h_lines)
payload = [0x11]
payload.extend(list(bytearray(struct.pack(">I", test_pattern_h_lines & 0xf))))
i2c.write(payload)
def DPP2607_Write_ImageLongFlip(flip_long):
"""
Writes: Image Long Flip.
DPP2607_Write_ImageLongFlip(DWORD FlipLong).
:type flip_long: EnabledDisabled
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_ImageLongFlip(%r)', flip_long)
payload = [0x0F]
payload.extend(list(bytearray(struct.pack(">I", flip_long & 0x1))))
i2c.write(payload)
def DPP2607_Write_ImageRotationSettings(rotation_setting):
"""
Writes: Image Rotation Settings.
DPP2607_Write_ImageRotationSettings(DWORD RotationSetting).
:type rotation_setting: RotationSetting
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_ImageRotationSettings(%r)', rotation_setting)
payload = [0x0E]
payload.extend(list(bytearray(struct.pack(">I", rotation_setting & 0x1))))
i2c.write(payload)
def DPP2607_Write_ImageShortFlip(flip_short):
"""
Writes: Image Short Flip.
DPP2607_Write_ImageShortFlip(DWORD FlipShort).
:type flip_short: EnabledDisabled
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_ImageShortFlip(%r)', flip_short)
payload = [0x10]
payload.extend(list(bytearray(struct.pack(">I", flip_short & 0x1))))
i2c.write(payload)
def DPP2607_Write_InterruptStatus(int_seq_abort, int_dmd_reset_overrun, int_dmd_block_error, int_dmdif_overrun, int_format_buf_overflow, int_format_starvation, int_flash_fifo_err, int_flash_dma_err, int_format_mult_err, int_format_cmd_err, int_format_queue_warn, int_ddr_overflow_bp, int_ddr_overflow_fb, int_scaler_line_err, int_scaler_pixerr, int_led_timeout):
"""
Writes: Interrupt Status.
DPP2607_Write_InterruptStatus(DWORD IntSeqAbort, DWORD IntDMDResetOverrun, DWORD IntDMDBlockError, DWORD IntDMDIFOverrun, DWORD IntFormatBufOverflow, DWORD IntFormatStarvation, DWORD IntFlashFIFOErr, DWORD IntFlashDMAErr, DWORD IntFormatMultErr, DWORD IntFormatCmdErr, DWORD IntFormatQueueWarn, DWORD IntDDROverflowBP, DWORD IntDDROverflowFB, DWORD IntScalerLineErr, DWORD IntScalerPixerr, DWORD IntLEDTimeout).
:type int_seq_abort: int
:type int_dmd_reset_overrun: int
:type int_dmd_block_error: int
:type int_dmdif_overrun: int
:type int_format_buf_overflow: int
:type int_format_starvation: int
:type int_flash_fifo_err: int
:type int_flash_dma_err: int
:type int_format_mult_err: int
:type int_format_cmd_err: int
:type int_format_queue_warn: int
:type int_ddr_overflow_bp: int
:type int_ddr_overflow_fb: int
:type int_scaler_line_err: int
:type int_scaler_pixerr: int
:type int_led_timeout: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_InterruptStatus(%r, %r, %r, %r, %r, %r, %r, %r, %r, %r, %r, %r, %r, %r, %r, %r)', int_seq_abort, int_dmd_reset_overrun, int_dmd_block_error, int_dmdif_overrun, int_format_buf_overflow, int_format_starvation, int_flash_fifo_err, int_flash_dma_err, int_format_mult_err, int_format_cmd_err, int_format_queue_warn, int_ddr_overflow_bp, int_ddr_overflow_fb, int_scaler_line_err, int_scaler_pixerr, int_led_timeout)
payload = [0x00]
value = 0
value |= (int_seq_abort & 0x1) << 0
value |= (int_dmd_reset_overrun & 0x1) << 1
value |= (int_dmd_block_error & 0x1) << 2
value |= (int_dmdif_overrun & 0x1) << 3
value |= (int_format_buf_overflow & 0x1) << 4
value |= (int_format_starvation & 0x1) << 5
value |= (int_flash_fifo_err & 0x1) << 7
value |= (int_flash_dma_err & 0x1) << 8
value |= (int_format_mult_err & 0x1) << 9
value |= (int_format_cmd_err & 0x1) << 10
value |= (int_format_queue_warn & 0x1) << 11
value |= (int_ddr_overflow_bp & 0x1) << 12
value |= (int_ddr_overflow_fb & 0x1) << 13
value |= (int_scaler_line_err & 0x1) << 14
value |= (int_scaler_pixerr & 0x1) << 15
value |= (int_led_timeout & 0x1) << 18
payload.extend(list(bytearray(struct.pack(">I", value))))
i2c.write(payload)
def DPP2607_Write_LedCurrentBlue(pwm_blu):
"""
Writes: LED Current - Blue.
DPP2607_Write_LedCurrentBlue(DWORD PWMBlu).
:type pwm_blu: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_LedCurrentBlue(%r)', pwm_blu)
payload = [0x14]
payload.extend(list(bytearray(struct.pack(">I", pwm_blu & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_LedCurrentGreen(pwm_grn):
"""
Writes: LED Current - Green.
DPP2607_Write_LedCurrentGreen(DWORD PWMGrn).
:type pwm_grn: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_LedCurrentGreen(%r)', pwm_grn)
payload = [0x13]
payload.extend(list(bytearray(struct.pack(">I", pwm_grn & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_LedCurrentRed(pwm_red):
"""
Writes: LED Current - Red.
DPP2607_Write_LedCurrentRed(DWORD PWMRed).
:type pwm_red: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_LedCurrentRed(%r)', pwm_red)
payload = [0x12]
payload.extend(list(bytearray(struct.pack(">I", pwm_red & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_LedDriverEnable(led_enable_red, led_enable_grn, led_enable_blu):
"""
Writes: LED Driver Enable.
DPP2607_Write_LedDriverEnable(DWORD LEDEnableRed, DWORD LEDEnableGrn, DWORD LEDEnableBlu).
:type led_enable_red: EnabledDisabled
:type led_enable_grn: EnabledDisabled
:type led_enable_blu: EnabledDisabled
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_LedDriverEnable(%r, %r, %r)', led_enable_red, led_enable_grn, led_enable_blu)
payload = [0x16]
value = 0
value |= (led_enable_red & 0x1) << 0
value |= (led_enable_grn & 0x1) << 1
value |= (led_enable_blu & 0x1) << 2
payload.extend(list(bytearray(struct.pack(">I", value))))
i2c.write(payload)
def DPP2607_Write_ParallelBusPolarityControl(polarity_hsync, polarity_vsync, polarity_pixel_clock, polarity_data_en):
"""
Writes: Parallel Bus Polarity Control.
DPP2607_Write_ParallelBusPolarityControl(DWORD PolarityHSYNC, DWORD PolarityVSYNC, DWORD PolarityPixelClock, DWORD PolarityDataEn).
:type polarity_hsync: Polarity
:type polarity_vsync: Polarity
:type polarity_pixel_clock: PolarityPixelClock
:type polarity_data_en: PolarityDataEn
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_ParallelBusPolarityControl(%r, %r, %r, %r)', polarity_hsync, polarity_vsync, polarity_pixel_clock, polarity_data_en)
payload = [0xAF]
value = 0
value |= (polarity_hsync & 0x1) << 1
value |= (polarity_vsync & 0x1) << 2
value |= (polarity_pixel_clock & 0x1) << 3
value |= (polarity_data_en & 0x1) << 4
payload.extend(list(bytearray(struct.pack(">I", value))))
i2c.write(payload)
def DPP2607_Write_PropagateLedCurrents(led_latch):
"""
Writes: Propagate LED Currents.
DPP2607_Write_PropagateLedCurrents(DWORD LEDLatch).
:type led_latch: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_PropagateLedCurrents(%r)', led_latch)
payload = [0x39]
payload.extend(list(bytearray(struct.pack(">I", led_latch))))
i2c.write(payload)
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0xD3])
_poll_complete()
def DPP2607_Write_SequenceSelect(compound_looks):
"""
Writes: Sequence Select.
DPP2607_Write_SequenceSelect(DWORD CompoundLooks).
:type compound_looks: CompoundLooks
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_SequenceSelect(%r)', compound_looks)
payload = [0x39]
payload.extend(list(bytearray(struct.pack(">I", compound_looks))))
i2c.write(payload)
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0xC1])
_poll_complete()
def DPP2607_Write_SetSplashScreen(compound_splash):
"""
Writes: Set Splash Screen.
DPP2607_Write_SetSplashScreen(DWORD CompoundSplash).
:type compound_splash: CompoundSplash
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_SetSplashScreen(%r)', compound_splash)
payload = [0x39]
payload.extend(list(bytearray(struct.pack(">I", compound_splash))))
i2c.write(payload)
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0xBD])
_poll_complete()
def DPP2607_Write_SolidFieldPattern(test_pattern_solids):
"""
Writes: Solid Field Pattern.
DPP2607_Write_SolidFieldPattern(DWORD TestPatternSolids).
:type test_pattern_solids: TestPatternSolids
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_SolidFieldPattern(%r)', test_pattern_solids)
payload = [0x11]
payload.extend(list(bytearray(struct.pack(">I", test_pattern_solids & 0xf))))
i2c.write(payload)
def DPP2607_Write_SystemReset():
"""
Writes: System Reset.
DPP2607_Write_SystemReset().
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_SystemReset()', )
payload = [0x1F]
payload.extend([0, 0, 0, 1]) # dev_rst
i2c.write(payload)
def DPP2607_Write_VeritcalLinesPattern(test_pattern_v_lines):
"""
Writes: Veritcal Lines Pattern.
DPP2607_Write_VeritcalLinesPattern(DWORD TestPatternVLines).
:type test_pattern_v_lines: TestPatternVLines
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_VeritcalLinesPattern(%r)', test_pattern_v_lines)
payload = [0x11]
payload.extend(list(bytearray(struct.pack(">I", test_pattern_v_lines & 0xf))))
i2c.write(payload)
def DPP2607_Write_VerticalGrayRampPattern():
"""
Writes: Vertical Gray Ramp Pattern.
DPP2607_Write_VerticalGrayRampPattern().
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_VerticalGrayRampPattern()', )
payload = [0x11]
payload.extend([0, 0, 0, 11]) # test_pattern_gray_ramp_v
i2c.write(payload)
def DPP2607_Write_VideoPixelFormat(pix_format):
"""
Writes: Video Pixel Format.
DPP2607_Write_VideoPixelFormat(DWORD PixFormat).
:type pix_format: PixFormat
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_VideoPixelFormat(%r)', pix_format)
payload = [0x0D]
payload.extend(list(bytearray(struct.pack(">I", pix_format & 0xf))))
i2c.write(payload)
def DPP2607_Write_VideoResolution(resolution):
"""
Writes: Video Resolution.
DPP2607_Write_VideoResolution(DWORD Resolution).
:type resolution: Resolution
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_VideoResolution(%r)', resolution)
payload = [0x0C]
payload.extend(list(bytearray(struct.pack(">I", resolution & 0x1f))))
i2c.write(payload)
def DPP2607_Write_VideoSourceSelection(source_sel):
"""
Writes: Video Source Selection.
DPP2607_Write_VideoSourceSelection(DWORD SourceSel).
:type source_sel: SourceSel
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_VideoSourceSelection(%r)', source_sel)
payload = [0x0B]
payload.extend(list(bytearray(struct.pack(">I", source_sel & 0x7))))
i2c.write(payload)
| 30.964485 | 750 | 0.69527 | # -*- coding: windows-1252 -*-
# dpp2607.py
#
# sends commands to DPP2607 ASIC using I2C
#
# Copyright (C) 2017 Texas Instruments Incorporated - http://www.ti.com/
#
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of Texas Instruments Incorporated nor the names of
# its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
*** Note *** - this module is generated, changes will be lost!
Python Interface to DLP DPP2607
"""
import time
import struct
from enum import IntEnum
from logging import log, DEBUG
import i2c
COMPOUND_CMD_TIMEOUT = 2.0 # seconds
#####################################################
# Constants
#####################################################
X_0_TO_255_YCRCB = 0
X_16_TO_240_Y_112_TO_112_CRCB = 1
X_1_WHITE_AND_1_BLACK = 9
X_1_WHITE_AND_7_BLACK = 7
X_2_3_VGA_PORTRAIT = 4
X_3_2_VGA_LANDSCAPE = 5
X_4_2_2_YCR_CB_16_BIT = 8
X_4_2_2_YCR_CB_8_BIT = 9
X_90_DEGREE_ROTATION = 1
ACTIVE_HIGH = 1
ACTIVE_HIGH_PDM = 1
ACTIVE_HIGH_PULSE = 1
ACTIVE_LOW = 0
ACTIVE_LOW_PDM = 0
ACTIVE_LOW_PULSE = 0
ANSI_4X4_CHECKERBOARD = 0
BLACK = 0
BLUE = 4
BT_601 = 0
BT_656_I_F = 4
BT_709 = 1
COMPLETE = 1
CYAN = 6
DATA_SAMPLES_ON_FALLING_EDGE = 1
DATA_SAMPLES_ON_RISING_EDGE = 0
DIAGONAL_LINES = 10
DISABLED = 0
DLPC2601 = 130
DLPC2607 = 138
DSYS_PORTA_BIT_0 = 0
DSYS_PORTA_BIT_1 = 1
DSYS_PORTA_BIT_2 = 2
DSYS_PORTA_BIT_3 = 3
DSYS_PORTA_BIT_4 = 4
DSYS_PORTA_BIT_5 = 5
DSYS_PORTA_BIT_6 = 6
DSYS_PORTA_BIT_7 = 7
DSYS_PORTB_BIT_0 = 8
DSYS_PORTB_BIT_1 = 9
DSYS_PORTB_BIT_2 = 10
DSYS_PORTB_BIT_3 = 11
DSYS_PORTB_BIT_4 = 12
DSYS_PORTB_BIT_5 = 13
DSYS_PORTB_BIT_6 = 14
DSYS_PORTB_BIT_7 = 15
DSYS_PORTC_BIT_4 = 16
DSYS_PORTC_BIT_5 = 17
DSYS_PORTC_BIT_6 = 18
DSYS_PORTC_BIT_7 = 19
ENABLED = 1
ENABLED_ACTIVATES_CONTROL_BELOW = 1
ERROR_DETECTED = 0
EXTERNAL_VIDEO_PARALLEL_I_F = 0
FINE_CHECKERBOARD = 13
FLASH_BUSY = 1
GAMMA_CURVE_0 = 0
GAMMA_CURVE_1 = 1
GAMMA_CURVE_2 = 2
GAMMA_CURVE_3 = 3
GAMMA_CURVE_4 = 4
GAMMA_CURVE_5 = 5
GAMMA_CURVE_6 = 6
GREEN = 2
HORIZONTAL_GREY_RAMPS = 12
HORIZONTAL_LINES_1W_1B = 9
HORIZONTAL_LINES_1W_7B = 7
INITIALIZATION_COMPLETE = 0
INTERNAL_TEST_PATTERNS = 1
IN_PROGRESS = 0
MAGENTA = 5
NHD_LANDSCAPE = 27
NHD_PORTRAIT = 26
NOT_COMPLETE = 1
NO_ERRORS = 1
NO_ROTATION = 0
NO_TIMEOUTS = 0
NTSC_LANDSCAPE = 23
OFFSET__0 = 0
OFFSET__16 = 1
OPTICAL_TEST_IMAGE = 9
PAL_LANDSCAPE = 25
PARK_THE_DMD = 1
PIO_CYUSBI2C = 16
PIO_CYUSBSPI = 17
PIO_DEVASYS = 3
PIO_GENERICSERIAL = 7
PIO_MMKUSB = 9
PIO_SERIAL = 4
PIO_TESTER = 6
PIO_USB = 5
PIO_USBHID = 10
PIO_USBI2CPRO = 8
QVGA_LANDSCAPE = 1
QVGA_PORTRAIT = 0
QWVGA_LANDSCAPE = 3
QWVGA_PORTRAIT = 2
RED = 1
RGB565_16_BIT = 0
RGB565_8_BIT = 3
RGB666_16_BIT = 7
RGB666_18_BIT = 1
RGB666_8_BIT = 6
RGB888_16_BIT = 5
RGB888_24_BIT = 2
RGB888_8_BIT = 4
SEQUENCE_0 = 0
SEQUENCE_10 = 10
SEQUENCE_11 = 11
SEQUENCE_12 = 12
SEQUENCE_13 = 13
SEQUENCE_14 = 14
SEQUENCE_15 = 15
SEQUENCE_1 = 1
SEQUENCE_2 = 2
SEQUENCE_3 = 3
SEQUENCE_4 = 4
SEQUENCE_5 = 5
SEQUENCE_6 = 6
SEQUENCE_7 = 7
SEQUENCE_8 = 8
SEQUENCE_9 = 9
SET_AS_OFFSET_OFFSET__128 = 1
SET_AS_SIGNED_OFFSET__0 = 0
SOLID_BLACK = 1
SOLID_BLUE = 4
SOLID_GREEN = 3
SOLID_RED = 5
SOLID_WHITE = 2
SPLASH_IMAGE_0 = 0
SPLASH_IMAGE_1 = 1
SPLASH_IMAGE_2 = 2
SPLASH_IMAGE_3 = 3
SPLASH_SCREEN = 2
TIMEOUT_ERROR_HAS_OCCURRED = 1
UNPARK_THE_DMD = 0
VERTICAL_GREY_RAMPS = 11
VERTICAL_LINES_1W_1B = 8
VERTICAL_LINES_1W_7B = 6
VGA_LANDSCAPE = 7
VGA_PORTRAIT = 6
WHITE = 7
WVGA_720_LANDSCAPE = 9
WVGA_720_PORTRAIT = 8
WVGA_752_LANDSCAPE = 11
WVGA_752_PORTRAIT = 10
WVGA_800_LANDSCAPE = 13
WVGA_800_PORTRAIT = 12
WVGA_852_LANDSCAPE = 15
WVGA_852_PORTRAIT = 14
WVGA_853_LANDSCAPE = 17
WVGA_853_PORTRAIT = 16
WVGA_854_LANDSCAPE = 19
WVGA_854_OR_VGA_OUTPUT = 29
WVGA_854_PORTRAIT = 18
WVGA_864_LANDSCAPE = 21
WVGA_864_PORTRAIT = 20
YELLOW = 3
#####################################################
# Enumerations uses by function parameters
#####################################################
class DMDCurtainColor(IntEnum):
"""
DMD Curtain Color
"""
BLACK = 0x00
RED = 0x01
GREEN = 0x02
BLUE = 0x04
YELLOW = 0x03
MAGENTA = 0x05
CYAN = 0x06
WHITE = 0x07
class TestPatternVLines(IntEnum):
"""
Line Count
"""
X_1_WHITE_AND_7_BLACK = 0x06
X_1_WHITE_AND_1_BLACK = 0x08
class TestPatternHLines(IntEnum):
"""
Line Count
"""
X_1_WHITE_AND_7_BLACK = 0x07
X_1_WHITE_AND_1_BLACK = 0x09
class PolarityPixelClock(IntEnum):
"""
Pixel Clock Polarity
"""
DATA_SAMPLES_ON_RISING_EDGE = 0x00
DATA_SAMPLES_ON_FALLING_EDGE = 0x01
class DevLEDStatus(IntEnum):
"""
LED Timeout Status
"""
NO_TIMEOUTS = 0x00
TIMEOUT_ERROR_HAS_OCCURRED = 0x01
class PixFormat(IntEnum):
"""
Pixel Data Format
"""
RGB565_16_BIT_ = 0x00
RGB666_18_BIT_ = 0x01
RGB888_24_BIT_ = 0x02
RGB565_8_BIT_ = 0x03
RGB888_8_BIT_ = 0x04
RGB888_16_BIT_ = 0x05
RGB666_8_BIT_ = 0x06
RGB666_16_BIT_ = 0x07
X_4_2_2_YCR_CB_16_BIT_ = 0x08
X_4_2_2_YCR_CB_8_BIT_ = 0x09
class DMDPARK(IntEnum):
"""
DMD Park Control
"""
UNPARK_THE_DMD = 0x00
PARK_THE_DMD = 0x01
class Resolution(IntEnum):
"""
Resolution
"""
QVGA_PORTRAIT = 0x00
QVGA_LANDSCAPE = 0x01
QWVGA_PORTRAIT = 0x02
QWVGA_LANDSCAPE = 0x03
X_2_3_VGA_PORTRAIT = 0x04
X_3_2_VGA_LANDSCAPE = 0x05
VGA_PORTRAIT = 0x06
VGA_LANDSCAPE = 0x07
WVGA_720_PORTRAIT = 0x08
WVGA_720_LANDSCAPE = 0x09
WVGA_752_PORTRAIT = 0x0A
WVGA_752_LANDSCAPE = 0x0B
WVGA_800_PORTRAIT = 0x0C
WVGA_800_LANDSCAPE = 0x0D
WVGA_852_PORTRAIT = 0x0E
WVGA_852_LANDSCAPE = 0x0F
WVGA_853_PORTRAIT = 0x10
WVGA_853_LANDSCAPE = 0x11
WVGA_854_PORTRAIT = 0x12
WVGA_854_LANDSCAPE = 0x13
WVGA_864_PORTRAIT = 0x14
WVGA_864_LANDSCAPE = 0x15
NTSC_LANDSCAPE = 0x17
PAL_LANDSCAPE = 0x19
NHD_PORTRAIT = 0x1A
NHD_LANDSCAPE = 0x1B
WVGA_854_OR_VGA_OUTPUT = 0x1D
class CompoundStat(IntEnum):
"""
LED Calibration State
mDDR Built-In Self-Test State
"""
COMPLETE = 0x00
NOT_COMPLETE = 0x01
class TestPattern(IntEnum):
"""
Current Pattern
"""
ANSI_4X4_CHECKERBOARD = 0x00
SOLID_BLACK = 0x01
SOLID_WHITE = 0x02
SOLID_GREEN = 0x03
SOLID_BLUE = 0x04
SOLID_RED = 0x05
VERTICAL_LINES_1W_7B_ = 0x06
HORIZONTAL_LINES_1W_7B_ = 0x07
VERTICAL_LINES_1W_1B_ = 0x08
HORIZONTAL_LINES_1W_1B_ = 0x09
DIAGONAL_LINES = 0x0A
VERTICAL_GREY_RAMPS = 0x0B
HORIZONTAL_GREY_RAMPS = 0x0C
FINE_CHECKERBOARD = 0x0D
class RotationSetting(IntEnum):
"""
Rotation Setting
"""
NO_ROTATION = 0x00
X_90_DEGREE_ROTATION = 0x01
class PolarityDataEn(IntEnum):
"""
DATAEN Signal Polarity
"""
ACTIVE_LOW = 0x00
ACTIVE_HIGH = 0x01
class TestPatternSolids(IntEnum):
"""
Color
"""
BLACK = 0x01
WHITE = 0x02
GREEN = 0x03
BLUE = 0x04
RED = 0x05
class SourceSel(IntEnum):
"""
Input Source
"""
EXTERNAL_VIDEO_PARALLEL_I_F_ = 0x00
INTERNAL_TEST_PATTERNS = 0x01
SPLASH_SCREEN = 0x02
BT_656_I_F = 0x04
class DevID(IntEnum):
"""
Device ID
"""
DLPC2601 = 0x82
DLPC2607 = 0x8A
class DevInitStatus(IntEnum):
"""
Auto-Initialization Status
"""
IN_PROGRESS = 0x00
INITIALIZATION_COMPLETE = 0x01
class CompoundLooks(IntEnum):
"""
Selected Looks Sequence
"""
SEQUENCE_0 = 0x00
SEQUENCE_1 = 0x01
SEQUENCE_2 = 0x02
SEQUENCE_3 = 0x03
SEQUENCE_4 = 0x04
SEQUENCE_5 = 0x05
SEQUENCE_6 = 0x06
SEQUENCE_7 = 0x07
SEQUENCE_8 = 0x08
SEQUENCE_9 = 0x09
SEQUENCE_10 = 0x0a
SEQUENCE_11 = 0x0b
SEQUENCE_12 = 0x0c
SEQUENCE_13 = 0x0d
SEQUENCE_14 = 0x0e
SEQUENCE_15 = 0x0f
class EnabledDisabled(IntEnum):
"""
Blue LED State
DMD Curtain Control
DMD Long Side Flip
DMD Short Side Flip
Green LED State
Red LED State
"""
DISABLED = 0x00
ENABLED = 0x01
class Polarity(IntEnum):
"""
HSYNC Signal Polarity
VSYNC Signal Polarity
"""
ACTIVE_LOW_PULSE = 0x00
ACTIVE_HIGH_PULSE = 0x01
class DevFlashStatus(IntEnum):
"""
Flash Initialization Status
"""
INITIALIZATION_COMPLETE = 0x00
FLASH_BUSY = 0x01
class CompoundSplash(IntEnum):
"""
Splash Screen Select
"""
SPLASH_IMAGE_0 = 0x00
SPLASH_IMAGE_1 = 0x01
SPLASH_IMAGE_2 = 0x02
SPLASH_IMAGE_3 = 0x03
OPTICAL_TEST_IMAGE = 0x09
#####################################################
# Support functions
#####################################################
def DPP2607_Open(*args):
"""
Open I2C interface.
"""
log(DEBUG, "DPP2607_Open()")
i2c.initialize()
def DPP2607_Close():
"""
Close I2C interface
DPP2607_Close().
:rtype: None
"""
log(DEBUG, "DPP2607_Close()")
i2c.terminate()
def DPP2607_GetIODebug():
"""
Return the IO debugging status.
:returns: enable, log_path
:rtype: tuple[bool, str|None]
"""
return i2c.get_debug(), None
def DPP2607_SetIODebug(enable, log_path=None):
"""
Enable/disable logging IO to a log file. Log_path is ignored.
:type enable: bool
:type log_path: str, not used
:rtype: None
"""
log(DEBUG, "DPP2607_SetIODebug(%s, %s)", enable, log_path)
i2c.set_debug(enable)
def DPP2607_GetSlaveAddr():
"""
Get the I2C slave address (default: 0x36).
:returns: slave_addr
:rtype: int
"""
return i2c.get_slave_address()
def DPP2607_SetSlaveAddr(slave_addr):
"""
Set the I2C slave address (default: 0x36).
:type slave_addr: int
:rtype: None
"""
if slave_addr != i2c.get_slave_address():
log(DEBUG, "DPP2607_SetSlaveAddr(%s)", hex(slave_addr))
i2c.terminate()
i2c.initialize(slave_addr)
def _poll_complete():
deadline = time.clock() + COMPOUND_CMD_TIMEOUT
while time.clock() <= deadline:
i2c.write([0x15, 0x3A])
status = i2c.read(4)
if status[3] == 0:
break # bit is zero - complete
else:
raise IOError(0, "Timeout waiting for DPP2607 Compound Command Completion")
#####################################################
# ASIC Command Functions
#####################################################
def DPP2607_Read_CcaC1r1Coefficient():
"""
Reads: CCA C1R1 Coefficient.
DPP2607_Read_CcaC1r1Coefficient(DWORD &&CCAC1R1).
:returns: ccac1r1
:rtype: int
"""
i2c.write([0x15, 0x5F])
payload = i2c.read(4)
ccac1r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC1r1Coefficient: ccac1r1=%r', ccac1r1)
return ccac1r1
def DPP2607_Read_CcaC1r2Coefficient():
"""
Reads: CCA C1R2 Coefficient.
DPP2607_Read_CcaC1r2Coefficient(DWORD &&CCAC1R2).
:returns: ccac1r2
:rtype: int
"""
i2c.write([0x15, 0x60])
payload = i2c.read(4)
ccac1r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC1r2Coefficient: ccac1r2=%r', ccac1r2)
return ccac1r2
def DPP2607_Read_CcaC1r3Coefficient():
"""
Reads: CCA C1R3 Coefficient.
DPP2607_Read_CcaC1r3Coefficient(DWORD &&CCAC1R3).
:returns: ccac1r3
:rtype: int
"""
i2c.write([0x15, 0x61])
payload = i2c.read(4)
ccac1r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC1r3Coefficient: ccac1r3=%r', ccac1r3)
return ccac1r3
def DPP2607_Read_CcaC2r1Coefficient():
"""
Reads: CCA C2R1 Coefficient.
DPP2607_Read_CcaC2r1Coefficient(DWORD &&CCAC2R1).
:returns: ccac2r1
:rtype: int
"""
i2c.write([0x15, 0x62])
payload = i2c.read(4)
ccac2r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC2r1Coefficient: ccac2r1=%r', ccac2r1)
return ccac2r1
def DPP2607_Read_CcaC2r2Coefficient():
"""
Reads: CCA C2R2 Coefficient.
DPP2607_Read_CcaC2r2Coefficient(DWORD &&CCAC2R2).
:returns: ccac2r2
:rtype: int
"""
i2c.write([0x15, 0x63])
payload = i2c.read(4)
ccac2r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC2r2Coefficient: ccac2r2=%r', ccac2r2)
return ccac2r2
def DPP2607_Read_CcaC2r3Coefficient():
"""
Reads: CCA C2R3 Coefficient.
DPP2607_Read_CcaC2r3Coefficient(DWORD &&CCAC2R3).
:returns: ccac2r3
:rtype: int
"""
i2c.write([0x15, 0x64])
payload = i2c.read(4)
ccac2r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC2r3Coefficient: ccac2r3=%r', ccac2r3)
return ccac2r3
def DPP2607_Read_CcaC3r1Coefficient():
"""
Reads: CCA C3R1 Coefficient.
DPP2607_Read_CcaC3r1Coefficient(DWORD &&CCAC3R1).
:returns: ccac3r1
:rtype: int
"""
i2c.write([0x15, 0x65])
payload = i2c.read(4)
ccac3r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC3r1Coefficient: ccac3r1=%r', ccac3r1)
return ccac3r1
def DPP2607_Read_CcaC3r2Coefficient():
"""
Reads: CCA C3R2 Coefficient.
DPP2607_Read_CcaC3r2Coefficient(DWORD &&CCAC3R2).
:returns: ccac3r2
:rtype: int
"""
i2c.write([0x15, 0x66])
payload = i2c.read(4)
ccac3r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC3r2Coefficient: ccac3r2=%r', ccac3r2)
return ccac3r2
def DPP2607_Read_CcaC3r3Coefficient():
"""
Reads: CCA C3R3 Coefficient.
DPP2607_Read_CcaC3r3Coefficient(DWORD &&CCAC3R3).
:returns: ccac3r3
:rtype: int
"""
i2c.write([0x15, 0x67])
payload = i2c.read(4)
ccac3r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC3r3Coefficient: ccac3r3=%r', ccac3r3)
return ccac3r3
def DPP2607_Read_CcaC4r1Coefficient():
"""
Reads: CCA C4R1 Coefficient.
DPP2607_Read_CcaC4r1Coefficient(DWORD &&CCAC4R1).
:returns: ccac4r1
:rtype: int
"""
i2c.write([0x15, 0x68])
payload = i2c.read(4)
ccac4r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC4r1Coefficient: ccac4r1=%r', ccac4r1)
return ccac4r1
def DPP2607_Read_CcaC4r2Coefficient():
"""
Reads: CCA C4R2 Coefficient.
DPP2607_Read_CcaC4r2Coefficient(DWORD &&CCAC4R2).
:returns: ccac4r2
:rtype: int
"""
i2c.write([0x15, 0x69])
payload = i2c.read(4)
ccac4r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC4r2Coefficient: ccac4r2=%r', ccac4r2)
return ccac4r2
def DPP2607_Read_CcaC4r3Coefficient():
"""
Reads: CCA C4R3 Coefficient.
DPP2607_Read_CcaC4r3Coefficient(DWORD &&CCAC4R3).
:returns: ccac4r3
:rtype: int
"""
i2c.write([0x15, 0x6A])
payload = i2c.read(4)
ccac4r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC4r3Coefficient: ccac4r3=%r', ccac4r3)
return ccac4r3
def DPP2607_Read_CcaC5r1Coefficient():
"""
Reads: CCA C5R1 Coefficient.
DPP2607_Read_CcaC5r1Coefficient(DWORD &&CCAC5R1).
:returns: ccac5r1
:rtype: int
"""
i2c.write([0x15, 0x6B])
payload = i2c.read(4)
ccac5r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC5r1Coefficient: ccac5r1=%r', ccac5r1)
return ccac5r1
def DPP2607_Read_CcaC5r2Coefficient():
"""
Reads: CCA C5R2 Coefficient.
DPP2607_Read_CcaC5r2Coefficient(DWORD &&CCAC5R2).
:returns: ccac5r2
:rtype: int
"""
i2c.write([0x15, 0x6C])
payload = i2c.read(4)
ccac5r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC5r2Coefficient: ccac5r2=%r', ccac5r2)
return ccac5r2
def DPP2607_Read_CcaC5r3Coefficient():
"""
Reads: CCA C5R3 Coefficient.
DPP2607_Read_CcaC5r3Coefficient(DWORD &&CCAC5R3).
:returns: ccac5r3
:rtype: int
"""
i2c.write([0x15, 0x6D])
payload = i2c.read(4)
ccac5r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC5r3Coefficient: ccac5r3=%r', ccac5r3)
return ccac5r3
def DPP2607_Read_CcaC6r1Coefficient():
"""
Reads: CCA C6R1 Coefficient.
DPP2607_Read_CcaC6r1Coefficient(DWORD &&CCAC6R1).
:returns: ccac6r1
:rtype: int
"""
i2c.write([0x15, 0x6E])
payload = i2c.read(4)
ccac6r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC6r1Coefficient: ccac6r1=%r', ccac6r1)
return ccac6r1
def DPP2607_Read_CcaC6r2Coefficient():
"""
Reads: CCA C6R2 Coefficient.
DPP2607_Read_CcaC6r2Coefficient(DWORD &&CCAC6R2).
:returns: ccac6r2
:rtype: int
"""
i2c.write([0x15, 0x6F])
payload = i2c.read(4)
ccac6r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC6r2Coefficient: ccac6r2=%r', ccac6r2)
return ccac6r2
def DPP2607_Read_CcaC6r3Coefficient():
"""
Reads: CCA C6R3 Coefficient.
DPP2607_Read_CcaC6r3Coefficient(DWORD &&CCAC6R3).
:returns: ccac6r3
:rtype: int
"""
i2c.write([0x15, 0x70])
payload = i2c.read(4)
ccac6r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC6r3Coefficient: ccac6r3=%r', ccac6r3)
return ccac6r3
def DPP2607_Read_CcaC7r1Coefficient():
"""
Reads: CCA C7R1 Coefficient.
DPP2607_Read_CcaC7r1Coefficient(DWORD &&CCAC7R1).
:returns: ccac7r1
:rtype: int
"""
i2c.write([0x15, 0x71])
payload = i2c.read(4)
ccac7r1 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC7r1Coefficient: ccac7r1=%r', ccac7r1)
return ccac7r1
def DPP2607_Read_CcaC7r2Coefficient():
"""
Reads: CCA C7R2 Coefficient.
DPP2607_Read_CcaC7r2Coefficient(DWORD &&CCAC7R2).
:returns: ccac7r2
:rtype: int
"""
i2c.write([0x15, 0x72])
payload = i2c.read(4)
ccac7r2 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC7r2Coefficient: ccac7r2=%r', ccac7r2)
return ccac7r2
def DPP2607_Read_CcaC7r3Coefficient():
"""
Reads: CCA C7R3 Coefficient.
DPP2607_Read_CcaC7r3Coefficient(DWORD &&CCAC7R3).
:returns: ccac7r3
:rtype: int
"""
i2c.write([0x15, 0x73])
payload = i2c.read(4)
ccac7r3 = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1ff
log(DEBUG, 'DPP2607_Read_CcaC7r3Coefficient: ccac7r3=%r', ccac7r3)
return ccac7r3
def DPP2607_Read_CcaFunctionEnable():
"""
Reads: CCA Function Enable.
DPP2607_Read_CcaFunctionEnable(DWORD &&CCAEnable).
:returns: cca_enable
:rtype: int
"""
i2c.write([0x15, 0x5E])
payload = i2c.read(4)
cca_enable = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1
log(DEBUG, 'DPP2607_Read_CcaFunctionEnable: cca_enable=%r', cca_enable)
return cca_enable
def DPP2607_Read_CommunicationStatus():
"""
Reads: Communication Status.
DPP2607_Read_CommunicationStatus(DWORD &&CompoundStatInvCmd, DWORD &&CompoundStatParCmd, DWORD &&CompoundStatMemRd, DWORD &&CompoundStatCmdPar, DWORD &&CompoundStatCmdAbt).
:returns: compound_stat_inv_cmd, compound_stat_par_cmd, compound_stat_mem_rd, compound_stat_cmd_par, compound_stat_cmd_abt
:rtype: tuple[int, int, int, int, int]
"""
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0xC4])
_poll_complete()
i2c.write([0x15, 0x39])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
compound_stat_inv_cmd = (value >> 8) & 0x1
compound_stat_par_cmd = (value >> 9) & 0x1
compound_stat_mem_rd = (value >> 10) & 0x1
compound_stat_cmd_par = (value >> 11) & 0x1
compound_stat_cmd_abt = (value >> 12) & 0x1
log(DEBUG, 'DPP2607_Read_CommunicationStatus: compound_stat_inv_cmd=%r, compound_stat_par_cmd=%r, compound_stat_mem_rd=%r, compound_stat_cmd_par=%r, compound_stat_cmd_abt=%r', compound_stat_inv_cmd, compound_stat_par_cmd, compound_stat_mem_rd, compound_stat_cmd_par, compound_stat_cmd_abt)
return compound_stat_inv_cmd, compound_stat_par_cmd, compound_stat_mem_rd, compound_stat_cmd_par, compound_stat_cmd_abt
def DPP2607_Read_CropFirstLine():
"""
Reads: Crop - First Line.
DPP2607_Read_CropFirstLine(DWORD &&FirstActiveLine).
:returns: first_active_line
:rtype: int
"""
i2c.write([0x15, 0x29])
payload = i2c.read(4)
first_active_line = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_CropFirstLine: first_active_line=%r', first_active_line)
return first_active_line
def DPP2607_Read_CropFirstPixel():
"""
Reads: Crop - First Pixel.
DPP2607_Read_CropFirstPixel(DWORD &&FirstActivePixel).
:returns: first_active_pixel
:rtype: int
"""
i2c.write([0x15, 0x2B])
payload = i2c.read(4)
first_active_pixel = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_CropFirstPixel: first_active_pixel=%r', first_active_pixel)
return first_active_pixel
def DPP2607_Read_CropLastLine():
"""
Reads: Crop - Last Line.
DPP2607_Read_CropLastLine(DWORD &&LastActiveLine).
:returns: last_active_line
:rtype: int
"""
i2c.write([0x15, 0x2A])
payload = i2c.read(4)
last_active_line = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_CropLastLine: last_active_line=%r', last_active_line)
return last_active_line
def DPP2607_Read_CropLastPixel():
"""
Reads: Crop - Last Pixel.
DPP2607_Read_CropLastPixel(DWORD &&LastActivePixel).
:returns: last_active_pixel
:rtype: int
"""
i2c.write([0x15, 0x2C])
payload = i2c.read(4)
last_active_pixel = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_CropLastPixel: last_active_pixel=%r', last_active_pixel)
return last_active_pixel
def DPP2607_Read_DeviceStatus():
"""
Reads: Device Status.
DPP2607_Read_DeviceStatus(DWORD &&DevID, DWORD &&DevFlashStatus, DWORD &&DevInitStatus, DWORD &&DevLEDStatus).
:returns: dev_id, dev_flash_status, dev_init_status, dev_led_status
:rtype: tuple[DevID, DevFlashStatus, DevInitStatus, DevLEDStatus]
"""
i2c.write([0x15, 0x03])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
dev_id = DevID((value >> 0) & 0xff)
dev_flash_status = DevFlashStatus((value >> 10) & 0x1)
dev_init_status = DevInitStatus((value >> 11) & 0x1)
dev_led_status = DevLEDStatus((value >> 12) & 0x1)
log(DEBUG, 'DPP2607_Read_DeviceStatus: dev_id=%r, dev_flash_status=%r, dev_init_status=%r, dev_led_status=%r', dev_id, dev_flash_status, dev_init_status, dev_led_status)
return dev_id, dev_flash_status, dev_init_status, dev_led_status
def DPP2607_Read_DisplayCurtainControl():
"""
Reads: Display Curtain Control.
DPP2607_Read_DisplayCurtainControl(DWORD &&DMDCurtainCtl, DWORD &&DMDCurtainColor).
:returns: dmd_curtain_ctl, dmd_curtain_color
:rtype: tuple[EnabledDisabled, DMDCurtainColor]
"""
i2c.write([0x15, 0xA6])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
dmd_curtain_ctl = EnabledDisabled((value >> 0) & 0xf)
dmd_curtain_color = DMDCurtainColor((value >> 4) & 0xf)
log(DEBUG, 'DPP2607_Read_DisplayCurtainControl: dmd_curtain_ctl=%r, dmd_curtain_color=%r', dmd_curtain_ctl, dmd_curtain_color)
return dmd_curtain_ctl, dmd_curtain_color
def DPP2607_Read_DmdPark():
"""
Reads: DMD PARK.
DPP2607_Read_DmdPark(DWORD &&DMDPARK).
:returns: dmdpark
:rtype: DMDPARK
"""
i2c.write([0x15, 0x2D])
payload = i2c.read(4)
dmdpark = DMDPARK((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1)
log(DEBUG, 'DPP2607_Read_DmdPark: dmdpark=%r', dmdpark)
return dmdpark
def DPP2607_Read_EmbeddedSoftwareVersion():
"""
Reads: Embedded Software Version.
DPP2607_Read_EmbeddedSoftwareVersion(DWORD &&CompoundICPPatch, DWORD &&CompoundICPMinor, DWORD &&CompoundICPMajor).
:returns: compound_icp_patch, compound_icp_minor, compound_icp_major
:rtype: tuple[int, int, int]
"""
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0x02])
_poll_complete()
i2c.write([0x15, 0x39])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
compound_icp_patch = (value >> 0) & 0xffff
compound_icp_minor = (value >> 16) & 0xff
compound_icp_major = (value >> 24) & 0xff
log(DEBUG, 'DPP2607_Read_EmbeddedSoftwareVersion: compound_icp_patch=%r, compound_icp_minor=%r, compound_icp_major=%r', compound_icp_patch, compound_icp_minor, compound_icp_major)
return compound_icp_patch, compound_icp_minor, compound_icp_major
def DPP2607_Read_ImageLongFlip():
"""
Reads: Image Long Flip.
DPP2607_Read_ImageLongFlip(DWORD &&FlipLong).
:returns: flip_long
:rtype: EnabledDisabled
"""
i2c.write([0x15, 0x0F])
payload = i2c.read(4)
flip_long = EnabledDisabled((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1)
log(DEBUG, 'DPP2607_Read_ImageLongFlip: flip_long=%r', flip_long)
return flip_long
def DPP2607_Read_ImageRotationSettings():
"""
Reads: Image Rotation Settings.
DPP2607_Read_ImageRotationSettings(DWORD &&RotationSetting).
:returns: rotation_setting
:rtype: RotationSetting
"""
i2c.write([0x15, 0x0E])
payload = i2c.read(4)
rotation_setting = RotationSetting((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1)
log(DEBUG, 'DPP2607_Read_ImageRotationSettings: rotation_setting=%r', rotation_setting)
return rotation_setting
def DPP2607_Read_ImageShortFlip():
"""
Reads: Image Short Flip.
DPP2607_Read_ImageShortFlip(DWORD &&FlipShort).
:returns: flip_short
:rtype: EnabledDisabled
"""
i2c.write([0x15, 0x10])
payload = i2c.read(4)
flip_short = EnabledDisabled((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1)
log(DEBUG, 'DPP2607_Read_ImageShortFlip: flip_short=%r', flip_short)
return flip_short
def DPP2607_Read_InternalTestPattern():
"""
Reads: Internal Test Pattern.
DPP2607_Read_InternalTestPattern(DWORD &&TestPattern).
:returns: test_pattern
:rtype: TestPattern
"""
i2c.write([0x15, 0x11])
payload = i2c.read(4)
test_pattern = TestPattern((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0xf)
log(DEBUG, 'DPP2607_Read_InternalTestPattern: test_pattern=%r', test_pattern)
return test_pattern
def DPP2607_Read_InterruptStatus():
"""
Reads: Interrupt Status.
DPP2607_Read_InterruptStatus(DWORD &&IntSeqAbort, DWORD &&IntDMDResetOverrun, DWORD &&IntDMDBlockError, DWORD &&IntDMDIFOverrun, DWORD &&IntFormatBufOverflow, DWORD &&IntFormatStarvation, DWORD &&IntFlashFIFOErr, DWORD &&IntFlashDMAErr, DWORD &&IntFormatMultErr, DWORD &&IntFormatCmdErr, DWORD &&IntFormatQueueWarn, DWORD &&IntDDROverflowBP, DWORD &&IntDDROverflowFB, DWORD &&IntScalerLineErr, DWORD &&IntScalerPixerr, DWORD &&IntLEDTimeout).
:returns: int_seq_abort, int_dmd_reset_overrun, int_dmd_block_error, int_dmdif_overrun, int_format_buf_overflow, int_format_starvation, int_flash_fifo_err, int_flash_dma_err, int_format_mult_err, int_format_cmd_err, int_format_queue_warn, int_ddr_overflow_bp, int_ddr_overflow_fb, int_scaler_line_err, int_scaler_pixerr, int_led_timeout
:rtype: tuple[int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int]
"""
i2c.write([0x15, 0x00])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
int_seq_abort = (value >> 0) & 0x1
int_dmd_reset_overrun = (value >> 1) & 0x1
int_dmd_block_error = (value >> 2) & 0x1
int_dmdif_overrun = (value >> 3) & 0x1
int_format_buf_overflow = (value >> 4) & 0x1
int_format_starvation = (value >> 5) & 0x1
int_flash_fifo_err = (value >> 7) & 0x1
int_flash_dma_err = (value >> 8) & 0x1
int_format_mult_err = (value >> 9) & 0x1
int_format_cmd_err = (value >> 10) & 0x1
int_format_queue_warn = (value >> 11) & 0x1
int_ddr_overflow_bp = (value >> 12) & 0x1
int_ddr_overflow_fb = (value >> 13) & 0x1
int_scaler_line_err = (value >> 14) & 0x1
int_scaler_pixerr = (value >> 15) & 0x1
int_led_timeout = (value >> 18) & 0x1
log(DEBUG, 'DPP2607_Read_InterruptStatus: int_seq_abort=%r, int_dmd_reset_overrun=%r, int_dmd_block_error=%r, int_dmdif_overrun=%r, int_format_buf_overflow=%r, int_format_starvation=%r, int_flash_fifo_err=%r, int_flash_dma_err=%r, int_format_mult_err=%r, int_format_cmd_err=%r, int_format_queue_warn=%r, int_ddr_overflow_bp=%r, int_ddr_overflow_fb=%r, int_scaler_line_err=%r, int_scaler_pixerr=%r, int_led_timeout=%r', int_seq_abort, int_dmd_reset_overrun, int_dmd_block_error, int_dmdif_overrun, int_format_buf_overflow, int_format_starvation, int_flash_fifo_err, int_flash_dma_err, int_format_mult_err, int_format_cmd_err, int_format_queue_warn, int_ddr_overflow_bp, int_ddr_overflow_fb, int_scaler_line_err, int_scaler_pixerr, int_led_timeout)
return int_seq_abort, int_dmd_reset_overrun, int_dmd_block_error, int_dmdif_overrun, int_format_buf_overflow, int_format_starvation, int_flash_fifo_err, int_flash_dma_err, int_format_mult_err, int_format_cmd_err, int_format_queue_warn, int_ddr_overflow_bp, int_ddr_overflow_fb, int_scaler_line_err, int_scaler_pixerr, int_led_timeout
def DPP2607_Read_LedCurrentBlue():
"""
Reads: LED Current - Blue.
DPP2607_Read_LedCurrentBlue(DWORD &&PWMBlu).
:returns: pwm_blu
:rtype: int
"""
i2c.write([0x15, 0x14])
payload = i2c.read(4)
pwm_blu = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_LedCurrentBlue: pwm_blu=%r', pwm_blu)
return pwm_blu
def DPP2607_Read_LedCurrentGreen():
"""
Reads: LED Current - Green.
DPP2607_Read_LedCurrentGreen(DWORD &&PWMGrn).
:returns: pwm_grn
:rtype: int
"""
i2c.write([0x15, 0x13])
payload = i2c.read(4)
pwm_grn = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_LedCurrentGreen: pwm_grn=%r', pwm_grn)
return pwm_grn
def DPP2607_Read_LedCurrentRed():
"""
Reads: LED Current - Red.
DPP2607_Read_LedCurrentRed(DWORD &&PWMRed).
:returns: pwm_red
:rtype: int
"""
i2c.write([0x15, 0x12])
payload = i2c.read(4)
pwm_red = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7ff
log(DEBUG, 'DPP2607_Read_LedCurrentRed: pwm_red=%r', pwm_red)
return pwm_red
def DPP2607_Read_LedDriverEnable():
"""
Reads: LED Driver Enable.
DPP2607_Read_LedDriverEnable(DWORD &&LEDEnableRed, DWORD &&LEDEnableGrn, DWORD &&LEDEnableBlu).
:returns: led_enable_red, led_enable_grn, led_enable_blu
:rtype: tuple[EnabledDisabled, EnabledDisabled, EnabledDisabled]
"""
i2c.write([0x15, 0x16])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
led_enable_red = EnabledDisabled((value >> 0) & 0x1)
led_enable_grn = EnabledDisabled((value >> 1) & 0x1)
led_enable_blu = EnabledDisabled((value >> 2) & 0x1)
log(DEBUG, 'DPP2607_Read_LedDriverEnable: led_enable_red=%r, led_enable_grn=%r, led_enable_blu=%r', led_enable_red, led_enable_grn, led_enable_blu)
return led_enable_red, led_enable_grn, led_enable_blu
def DPP2607_Read_ParallelBusPolarityControl():
"""
Reads: Parallel Bus Polarity Control.
DPP2607_Read_ParallelBusPolarityControl(DWORD &&PolarityHSYNC, DWORD &&PolarityVSYNC, DWORD &&PolarityPixelClock, DWORD &&PolarityDataEn).
:returns: polarity_hsync, polarity_vsync, polarity_pixel_clock, polarity_data_en
:rtype: tuple[Polarity, Polarity, PolarityPixelClock, PolarityDataEn]
"""
i2c.write([0x15, 0xAF])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
polarity_hsync = Polarity((value >> 1) & 0x1)
polarity_vsync = Polarity((value >> 2) & 0x1)
polarity_pixel_clock = PolarityPixelClock((value >> 3) & 0x1)
polarity_data_en = PolarityDataEn((value >> 4) & 0x1)
log(DEBUG, 'DPP2607_Read_ParallelBusPolarityControl: polarity_hsync=%r, polarity_vsync=%r, polarity_pixel_clock=%r, polarity_data_en=%r', polarity_hsync, polarity_vsync, polarity_pixel_clock, polarity_data_en)
return polarity_hsync, polarity_vsync, polarity_pixel_clock, polarity_data_en
def DPP2607_Read_SystemStatus():
"""
Reads: System Status.
DPP2607_Read_SystemStatus(DWORD &&CompoundStatInit, DWORD &&CompoundStatFlash, DWORD &&CompoundStatTemp, DWORD &&CompoundStatPAD, DWORD &&CompoundStatLED, DWORD &&CompoundStatBIST).
:returns: compound_stat_init, compound_stat_flash, compound_stat_temp, compound_stat_pad, compound_stat_led, compound_stat_bist
:rtype: tuple[int, int, int, int, CompoundStat, CompoundStat]
"""
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0xC4])
_poll_complete()
i2c.write([0x15, 0x39])
payload = i2c.read(4)
value = struct.unpack(">I", str(bytearray(payload[0:4])))[0]
compound_stat_init = (value >> 0) & 0x1
compound_stat_flash = (value >> 1) & 0x1
compound_stat_temp = (value >> 2) & 0x1
compound_stat_pad = (value >> 3) & 0x1
compound_stat_led = CompoundStat((value >> 5) & 0x1)
compound_stat_bist = CompoundStat((value >> 6) & 0x1)
log(DEBUG, 'DPP2607_Read_SystemStatus: compound_stat_init=%r, compound_stat_flash=%r, compound_stat_temp=%r, compound_stat_pad=%r, compound_stat_led=%r, compound_stat_bist=%r', compound_stat_init, compound_stat_flash, compound_stat_temp, compound_stat_pad, compound_stat_led, compound_stat_bist)
return compound_stat_init, compound_stat_flash, compound_stat_temp, compound_stat_pad, compound_stat_led, compound_stat_bist
def DPP2607_Read_SystemTemperature():
"""
Reads: System Temperature.
DPP2607_Read_SystemTemperature(DWORD &&CompoundTemp).
:returns: compound_temp
:rtype: int
"""
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0xC5])
_poll_complete()
i2c.write([0x15, 0x39])
payload = i2c.read(4)
compound_temp = (struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0xffffffffL
log(DEBUG, 'DPP2607_Read_SystemTemperature: compound_temp=%r', compound_temp)
return compound_temp
def DPP2607_Read_VideoPixelFormat():
"""
Reads: Video Pixel Format.
DPP2607_Read_VideoPixelFormat(DWORD &&PixFormat).
:returns: pix_format
:rtype: PixFormat
"""
i2c.write([0x15, 0x0D])
payload = i2c.read(4)
pix_format = PixFormat((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0xf)
log(DEBUG, 'DPP2607_Read_VideoPixelFormat: pix_format=%r', pix_format)
return pix_format
def DPP2607_Read_VideoResolution():
"""
Reads: Video Resolution.
DPP2607_Read_VideoResolution(DWORD &&Resolution).
:returns: resolution
:rtype: Resolution
"""
i2c.write([0x15, 0x0C])
payload = i2c.read(4)
resolution = Resolution((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x1f)
log(DEBUG, 'DPP2607_Read_VideoResolution: resolution=%r', resolution)
return resolution
def DPP2607_Read_VideoSourceSelection():
"""
Reads: Video Source Selection.
DPP2607_Read_VideoSourceSelection(DWORD &&SourceSel).
:returns: source_sel
:rtype: SourceSel
"""
i2c.write([0x15, 0x0B])
payload = i2c.read(4)
source_sel = SourceSel((struct.unpack(">I", str(bytearray(payload[0:4])))[0] >> 0) & 0x7)
log(DEBUG, 'DPP2607_Read_VideoSourceSelection: source_sel=%r', source_sel)
return source_sel
def DPP2607_Write_CcaC1r1Coefficient(ccac1r1):
"""
Writes: CCA C1R1 Coefficient.
DPP2607_Write_CcaC1r1Coefficient(DWORD CCAC1R1).
:type ccac1r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC1r1Coefficient(%r)', ccac1r1)
payload = [0x5F]
payload.extend(list(bytearray(struct.pack(">I", ccac1r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC1r2Coefficient(ccac1r2):
"""
Writes: CCA C1R2 Coefficient.
DPP2607_Write_CcaC1r2Coefficient(DWORD CCAC1R2).
:type ccac1r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC1r2Coefficient(%r)', ccac1r2)
payload = [0x60]
payload.extend(list(bytearray(struct.pack(">I", ccac1r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC1r3Coefficient(ccac1r3):
"""
Writes: CCA C1R3 Coefficient.
DPP2607_Write_CcaC1r3Coefficient(DWORD CCAC1R3).
:type ccac1r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC1r3Coefficient(%r)', ccac1r3)
payload = [0x61]
payload.extend(list(bytearray(struct.pack(">I", ccac1r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC2r1Coefficient(ccac2r1):
"""
Writes: CCA C2R1 Coefficient.
DPP2607_Write_CcaC2r1Coefficient(DWORD CCAC2R1).
:type ccac2r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC2r1Coefficient(%r)', ccac2r1)
payload = [0x62]
payload.extend(list(bytearray(struct.pack(">I", ccac2r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC2r2Coefficient(ccac2r2):
"""
Writes: CCA C2R2 Coefficient.
DPP2607_Write_CcaC2r2Coefficient(DWORD CCAC2R2).
:type ccac2r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC2r2Coefficient(%r)', ccac2r2)
payload = [0x63]
payload.extend(list(bytearray(struct.pack(">I", ccac2r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC2r3Coefficient(ccac2r3):
"""
Writes: CCA C2R3 Coefficient.
DPP2607_Write_CcaC2r3Coefficient(DWORD CCAC2R3).
:type ccac2r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC2r3Coefficient(%r)', ccac2r3)
payload = [0x64]
payload.extend(list(bytearray(struct.pack(">I", ccac2r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC3r1Coefficient(ccac3r1):
"""
Writes: CCA C3R1 Coefficient.
DPP2607_Write_CcaC3r1Coefficient(DWORD CCAC3R1).
:type ccac3r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC3r1Coefficient(%r)', ccac3r1)
payload = [0x65]
payload.extend(list(bytearray(struct.pack(">I", ccac3r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC3r2Coefficient(ccac3r2):
"""
Writes: CCA C3R2 Coefficient.
DPP2607_Write_CcaC3r2Coefficient(DWORD CCAC3R2).
:type ccac3r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC3r2Coefficient(%r)', ccac3r2)
payload = [0x66]
payload.extend(list(bytearray(struct.pack(">I", ccac3r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC3r3Coefficient(ccac3r3):
"""
Writes: CCA C3R3 Coefficient.
DPP2607_Write_CcaC3r3Coefficient(DWORD CCAC3R3).
:type ccac3r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC3r3Coefficient(%r)', ccac3r3)
payload = [0x67]
payload.extend(list(bytearray(struct.pack(">I", ccac3r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC4r1Coefficient(ccac4r1):
"""
Writes: CCA C4R1 Coefficient.
DPP2607_Write_CcaC4r1Coefficient(DWORD CCAC4R1).
:type ccac4r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC4r1Coefficient(%r)', ccac4r1)
payload = [0x68]
payload.extend(list(bytearray(struct.pack(">I", ccac4r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC4r2Coefficient(ccac4r2):
"""
Writes: CCA C4R2 Coefficient.
DPP2607_Write_CcaC4r2Coefficient(DWORD CCAC4R2).
:type ccac4r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC4r2Coefficient(%r)', ccac4r2)
payload = [0x69]
payload.extend(list(bytearray(struct.pack(">I", ccac4r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC4r3Coefficient(ccac4r3):
"""
Writes: CCA C4R3 Coefficient.
DPP2607_Write_CcaC4r3Coefficient(DWORD CCAC4R3).
:type ccac4r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC4r3Coefficient(%r)', ccac4r3)
payload = [0x6A]
payload.extend(list(bytearray(struct.pack(">I", ccac4r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC5r1Coefficient(ccac5r1):
"""
Writes: CCA C5R1 Coefficient.
DPP2607_Write_CcaC5r1Coefficient(DWORD CCAC5R1).
:type ccac5r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC5r1Coefficient(%r)', ccac5r1)
payload = [0x6B]
payload.extend(list(bytearray(struct.pack(">I", ccac5r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC5r2Coefficient(ccac5r2):
"""
Writes: CCA C5R2 Coefficient.
DPP2607_Write_CcaC5r2Coefficient(DWORD CCAC5R2).
:type ccac5r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC5r2Coefficient(%r)', ccac5r2)
payload = [0x6C]
payload.extend(list(bytearray(struct.pack(">I", ccac5r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC5r3Coefficient(ccac5r3):
"""
Writes: CCA C5R3 Coefficient.
DPP2607_Write_CcaC5r3Coefficient(DWORD CCAC5R3).
:type ccac5r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC5r3Coefficient(%r)', ccac5r3)
payload = [0x6D]
payload.extend(list(bytearray(struct.pack(">I", ccac5r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC6r1Coefficient(ccac6r1):
"""
Writes: CCA C6R1 Coefficient.
DPP2607_Write_CcaC6r1Coefficient(DWORD CCAC6R1).
:type ccac6r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC6r1Coefficient(%r)', ccac6r1)
payload = [0x6E]
payload.extend(list(bytearray(struct.pack(">I", ccac6r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC6r2Coefficient(ccac6r2):
"""
Writes: CCA C6R2 Coefficient.
DPP2607_Write_CcaC6r2Coefficient(DWORD CCAC6R2).
:type ccac6r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC6r2Coefficient(%r)', ccac6r2)
payload = [0x6F]
payload.extend(list(bytearray(struct.pack(">I", ccac6r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC6r3Coefficient(ccac6r3):
"""
Writes: CCA C6R3 Coefficient.
DPP2607_Write_CcaC6r3Coefficient(DWORD CCAC6R3).
:type ccac6r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC6r3Coefficient(%r)', ccac6r3)
payload = [0x70]
payload.extend(list(bytearray(struct.pack(">I", ccac6r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC7r1Coefficient(ccac7r1):
"""
Writes: CCA C7R1 Coefficient.
DPP2607_Write_CcaC7r1Coefficient(DWORD CCAC7R1).
:type ccac7r1: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC7r1Coefficient(%r)', ccac7r1)
payload = [0x71]
payload.extend(list(bytearray(struct.pack(">I", ccac7r1 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC7r2Coefficient(ccac7r2):
"""
Writes: CCA C7R2 Coefficient.
DPP2607_Write_CcaC7r2Coefficient(DWORD CCAC7R2).
:type ccac7r2: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC7r2Coefficient(%r)', ccac7r2)
payload = [0x72]
payload.extend(list(bytearray(struct.pack(">I", ccac7r2 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaC7r3Coefficient(ccac7r3):
"""
Writes: CCA C7R3 Coefficient.
DPP2607_Write_CcaC7r3Coefficient(DWORD CCAC7R3).
:type ccac7r3: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaC7r3Coefficient(%r)', ccac7r3)
payload = [0x73]
payload.extend(list(bytearray(struct.pack(">I", ccac7r3 & 0x1ff))))
i2c.write(payload)
def DPP2607_Write_CcaFunctionEnable(cca_enable):
"""
Writes: CCA Function Enable.
DPP2607_Write_CcaFunctionEnable(DWORD CCAEnable).
:type cca_enable: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CcaFunctionEnable(%r)', cca_enable)
payload = [0x5E]
payload.extend(list(bytearray(struct.pack(">I", cca_enable & 0x1))))
i2c.write(payload)
def DPP2607_Write_CheckerboardAnsiPattern():
"""
Writes: Checkerboard ANSI Pattern.
DPP2607_Write_CheckerboardAnsiPattern().
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CheckerboardAnsiPattern()', )
payload = [0x11]
payload.extend([0, 0, 0, 13]) # test_pattern_ansi
i2c.write(payload)
def DPP2607_Write_CropFirstLine(first_active_line):
"""
Writes: Crop - First Line.
DPP2607_Write_CropFirstLine(DWORD FirstActiveLine).
:type first_active_line: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CropFirstLine(%r)', first_active_line)
payload = [0x29]
payload.extend(list(bytearray(struct.pack(">I", first_active_line & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_CropFirstPixel(first_active_pixel):
"""
Writes: Crop - First Pixel.
DPP2607_Write_CropFirstPixel(DWORD FirstActivePixel).
:type first_active_pixel: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CropFirstPixel(%r)', first_active_pixel)
payload = [0x2B]
payload.extend(list(bytearray(struct.pack(">I", first_active_pixel & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_CropLastLine(last_active_line):
"""
Writes: Crop - Last Line.
DPP2607_Write_CropLastLine(DWORD LastActiveLine).
:type last_active_line: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CropLastLine(%r)', last_active_line)
payload = [0x2A]
payload.extend(list(bytearray(struct.pack(">I", last_active_line & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_CropLastPixel(last_active_pixel):
"""
Writes: Crop - Last Pixel.
DPP2607_Write_CropLastPixel(DWORD LastActivePixel).
:type last_active_pixel: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_CropLastPixel(%r)', last_active_pixel)
payload = [0x2C]
payload.extend(list(bytearray(struct.pack(">I", last_active_pixel & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_DiagonalLinesPattern():
"""
Writes: Diagonal Lines Pattern.
DPP2607_Write_DiagonalLinesPattern().
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_DiagonalLinesPattern()', )
payload = [0x11]
payload.extend([0, 0, 0, 10]) # test_pattern_d_lines
i2c.write(payload)
def DPP2607_Write_DisplayCurtainControl(dmd_curtain_ctl, dmd_curtain_color):
"""
Writes: Display Curtain Control.
DPP2607_Write_DisplayCurtainControl(DWORD DMDCurtainCtl, DWORD DMDCurtainColor).
:type dmd_curtain_ctl: EnabledDisabled
:type dmd_curtain_color: DMDCurtainColor
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_DisplayCurtainControl(%r, %r)', dmd_curtain_ctl, dmd_curtain_color)
payload = [0xA6]
value = 0
value |= (dmd_curtain_ctl & 0xf) << 0
value |= (dmd_curtain_color & 0xf) << 4
payload.extend(list(bytearray(struct.pack(">I", value))))
i2c.write(payload)
def DPP2607_Write_DmdPark(dmdpark):
"""
Writes: DMD PARK.
DPP2607_Write_DmdPark(DWORD DMDPARK).
:type dmdpark: DMDPARK
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_DmdPark(%r)', dmdpark)
payload = [0x2D]
payload.extend(list(bytearray(struct.pack(">I", dmdpark & 0x1))))
i2c.write(payload)
def DPP2607_Write_FineCheckerboardPattern():
"""
Writes: Fine Checkerboard Pattern.
DPP2607_Write_FineCheckerboardPattern().
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_FineCheckerboardPattern()', )
payload = [0x11]
payload.extend([0, 0, 0, 0]) # test_pattern_fine_checker
i2c.write(payload)
def DPP2607_Write_HorizontalGrayRampPattern():
"""
Writes: Horizontal Gray Ramp Pattern.
DPP2607_Write_HorizontalGrayRampPattern().
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_HorizontalGrayRampPattern()', )
payload = [0x11]
payload.extend([0, 0, 0, 12]) # test_pattern_gray_ramp_h
i2c.write(payload)
def DPP2607_Write_HorizontalLinesPattern(test_pattern_h_lines):
"""
Writes: Horizontal Lines Pattern.
DPP2607_Write_HorizontalLinesPattern(DWORD TestPatternHLines).
:type test_pattern_h_lines: TestPatternHLines
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_HorizontalLinesPattern(%r)', test_pattern_h_lines)
payload = [0x11]
payload.extend(list(bytearray(struct.pack(">I", test_pattern_h_lines & 0xf))))
i2c.write(payload)
def DPP2607_Write_ImageLongFlip(flip_long):
"""
Writes: Image Long Flip.
DPP2607_Write_ImageLongFlip(DWORD FlipLong).
:type flip_long: EnabledDisabled
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_ImageLongFlip(%r)', flip_long)
payload = [0x0F]
payload.extend(list(bytearray(struct.pack(">I", flip_long & 0x1))))
i2c.write(payload)
def DPP2607_Write_ImageRotationSettings(rotation_setting):
"""
Writes: Image Rotation Settings.
DPP2607_Write_ImageRotationSettings(DWORD RotationSetting).
:type rotation_setting: RotationSetting
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_ImageRotationSettings(%r)', rotation_setting)
payload = [0x0E]
payload.extend(list(bytearray(struct.pack(">I", rotation_setting & 0x1))))
i2c.write(payload)
def DPP2607_Write_ImageShortFlip(flip_short):
"""
Writes: Image Short Flip.
DPP2607_Write_ImageShortFlip(DWORD FlipShort).
:type flip_short: EnabledDisabled
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_ImageShortFlip(%r)', flip_short)
payload = [0x10]
payload.extend(list(bytearray(struct.pack(">I", flip_short & 0x1))))
i2c.write(payload)
def DPP2607_Write_InterruptStatus(int_seq_abort, int_dmd_reset_overrun, int_dmd_block_error, int_dmdif_overrun, int_format_buf_overflow, int_format_starvation, int_flash_fifo_err, int_flash_dma_err, int_format_mult_err, int_format_cmd_err, int_format_queue_warn, int_ddr_overflow_bp, int_ddr_overflow_fb, int_scaler_line_err, int_scaler_pixerr, int_led_timeout):
"""
Writes: Interrupt Status.
DPP2607_Write_InterruptStatus(DWORD IntSeqAbort, DWORD IntDMDResetOverrun, DWORD IntDMDBlockError, DWORD IntDMDIFOverrun, DWORD IntFormatBufOverflow, DWORD IntFormatStarvation, DWORD IntFlashFIFOErr, DWORD IntFlashDMAErr, DWORD IntFormatMultErr, DWORD IntFormatCmdErr, DWORD IntFormatQueueWarn, DWORD IntDDROverflowBP, DWORD IntDDROverflowFB, DWORD IntScalerLineErr, DWORD IntScalerPixerr, DWORD IntLEDTimeout).
:type int_seq_abort: int
:type int_dmd_reset_overrun: int
:type int_dmd_block_error: int
:type int_dmdif_overrun: int
:type int_format_buf_overflow: int
:type int_format_starvation: int
:type int_flash_fifo_err: int
:type int_flash_dma_err: int
:type int_format_mult_err: int
:type int_format_cmd_err: int
:type int_format_queue_warn: int
:type int_ddr_overflow_bp: int
:type int_ddr_overflow_fb: int
:type int_scaler_line_err: int
:type int_scaler_pixerr: int
:type int_led_timeout: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_InterruptStatus(%r, %r, %r, %r, %r, %r, %r, %r, %r, %r, %r, %r, %r, %r, %r, %r)', int_seq_abort, int_dmd_reset_overrun, int_dmd_block_error, int_dmdif_overrun, int_format_buf_overflow, int_format_starvation, int_flash_fifo_err, int_flash_dma_err, int_format_mult_err, int_format_cmd_err, int_format_queue_warn, int_ddr_overflow_bp, int_ddr_overflow_fb, int_scaler_line_err, int_scaler_pixerr, int_led_timeout)
payload = [0x00]
value = 0
value |= (int_seq_abort & 0x1) << 0
value |= (int_dmd_reset_overrun & 0x1) << 1
value |= (int_dmd_block_error & 0x1) << 2
value |= (int_dmdif_overrun & 0x1) << 3
value |= (int_format_buf_overflow & 0x1) << 4
value |= (int_format_starvation & 0x1) << 5
value |= (int_flash_fifo_err & 0x1) << 7
value |= (int_flash_dma_err & 0x1) << 8
value |= (int_format_mult_err & 0x1) << 9
value |= (int_format_cmd_err & 0x1) << 10
value |= (int_format_queue_warn & 0x1) << 11
value |= (int_ddr_overflow_bp & 0x1) << 12
value |= (int_ddr_overflow_fb & 0x1) << 13
value |= (int_scaler_line_err & 0x1) << 14
value |= (int_scaler_pixerr & 0x1) << 15
value |= (int_led_timeout & 0x1) << 18
payload.extend(list(bytearray(struct.pack(">I", value))))
i2c.write(payload)
def DPP2607_Write_LedCurrentBlue(pwm_blu):
"""
Writes: LED Current - Blue.
DPP2607_Write_LedCurrentBlue(DWORD PWMBlu).
:type pwm_blu: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_LedCurrentBlue(%r)', pwm_blu)
payload = [0x14]
payload.extend(list(bytearray(struct.pack(">I", pwm_blu & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_LedCurrentGreen(pwm_grn):
"""
Writes: LED Current - Green.
DPP2607_Write_LedCurrentGreen(DWORD PWMGrn).
:type pwm_grn: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_LedCurrentGreen(%r)', pwm_grn)
payload = [0x13]
payload.extend(list(bytearray(struct.pack(">I", pwm_grn & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_LedCurrentRed(pwm_red):
"""
Writes: LED Current - Red.
DPP2607_Write_LedCurrentRed(DWORD PWMRed).
:type pwm_red: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_LedCurrentRed(%r)', pwm_red)
payload = [0x12]
payload.extend(list(bytearray(struct.pack(">I", pwm_red & 0x7ff))))
i2c.write(payload)
def DPP2607_Write_LedDriverEnable(led_enable_red, led_enable_grn, led_enable_blu):
"""
Writes: LED Driver Enable.
DPP2607_Write_LedDriverEnable(DWORD LEDEnableRed, DWORD LEDEnableGrn, DWORD LEDEnableBlu).
:type led_enable_red: EnabledDisabled
:type led_enable_grn: EnabledDisabled
:type led_enable_blu: EnabledDisabled
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_LedDriverEnable(%r, %r, %r)', led_enable_red, led_enable_grn, led_enable_blu)
payload = [0x16]
value = 0
value |= (led_enable_red & 0x1) << 0
value |= (led_enable_grn & 0x1) << 1
value |= (led_enable_blu & 0x1) << 2
payload.extend(list(bytearray(struct.pack(">I", value))))
i2c.write(payload)
def DPP2607_Write_ParallelBusPolarityControl(polarity_hsync, polarity_vsync, polarity_pixel_clock, polarity_data_en):
"""
Writes: Parallel Bus Polarity Control.
DPP2607_Write_ParallelBusPolarityControl(DWORD PolarityHSYNC, DWORD PolarityVSYNC, DWORD PolarityPixelClock, DWORD PolarityDataEn).
:type polarity_hsync: Polarity
:type polarity_vsync: Polarity
:type polarity_pixel_clock: PolarityPixelClock
:type polarity_data_en: PolarityDataEn
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_ParallelBusPolarityControl(%r, %r, %r, %r)', polarity_hsync, polarity_vsync, polarity_pixel_clock, polarity_data_en)
payload = [0xAF]
value = 0
value |= (polarity_hsync & 0x1) << 1
value |= (polarity_vsync & 0x1) << 2
value |= (polarity_pixel_clock & 0x1) << 3
value |= (polarity_data_en & 0x1) << 4
payload.extend(list(bytearray(struct.pack(">I", value))))
i2c.write(payload)
def DPP2607_Write_PropagateLedCurrents(led_latch):
"""
Writes: Propagate LED Currents.
DPP2607_Write_PropagateLedCurrents(DWORD LEDLatch).
:type led_latch: int
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_PropagateLedCurrents(%r)', led_latch)
payload = [0x39]
payload.extend(list(bytearray(struct.pack(">I", led_latch))))
i2c.write(payload)
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0xD3])
_poll_complete()
def DPP2607_Write_SequenceSelect(compound_looks):
"""
Writes: Sequence Select.
DPP2607_Write_SequenceSelect(DWORD CompoundLooks).
:type compound_looks: CompoundLooks
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_SequenceSelect(%r)', compound_looks)
payload = [0x39]
payload.extend(list(bytearray(struct.pack(">I", compound_looks))))
i2c.write(payload)
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0xC1])
_poll_complete()
def DPP2607_Write_SetSplashScreen(compound_splash):
"""
Writes: Set Splash Screen.
DPP2607_Write_SetSplashScreen(DWORD CompoundSplash).
:type compound_splash: CompoundSplash
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_SetSplashScreen(%r)', compound_splash)
payload = [0x39]
payload.extend(list(bytearray(struct.pack(">I", compound_splash))))
i2c.write(payload)
i2c.write([0x3A, 0x00, 0x00, 0x00, 0x01])
i2c.write([0x38, 0x00, 0x00, 0x00, 0xBD])
_poll_complete()
def DPP2607_Write_SolidFieldPattern(test_pattern_solids):
"""
Writes: Solid Field Pattern.
DPP2607_Write_SolidFieldPattern(DWORD TestPatternSolids).
:type test_pattern_solids: TestPatternSolids
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_SolidFieldPattern(%r)', test_pattern_solids)
payload = [0x11]
payload.extend(list(bytearray(struct.pack(">I", test_pattern_solids & 0xf))))
i2c.write(payload)
def DPP2607_Write_SystemReset():
"""
Writes: System Reset.
DPP2607_Write_SystemReset().
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_SystemReset()', )
payload = [0x1F]
payload.extend([0, 0, 0, 1]) # dev_rst
i2c.write(payload)
def DPP2607_Write_VeritcalLinesPattern(test_pattern_v_lines):
"""
Writes: Veritcal Lines Pattern.
DPP2607_Write_VeritcalLinesPattern(DWORD TestPatternVLines).
:type test_pattern_v_lines: TestPatternVLines
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_VeritcalLinesPattern(%r)', test_pattern_v_lines)
payload = [0x11]
payload.extend(list(bytearray(struct.pack(">I", test_pattern_v_lines & 0xf))))
i2c.write(payload)
def DPP2607_Write_VerticalGrayRampPattern():
"""
Writes: Vertical Gray Ramp Pattern.
DPP2607_Write_VerticalGrayRampPattern().
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_VerticalGrayRampPattern()', )
payload = [0x11]
payload.extend([0, 0, 0, 11]) # test_pattern_gray_ramp_v
i2c.write(payload)
def DPP2607_Write_VideoPixelFormat(pix_format):
"""
Writes: Video Pixel Format.
DPP2607_Write_VideoPixelFormat(DWORD PixFormat).
:type pix_format: PixFormat
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_VideoPixelFormat(%r)', pix_format)
payload = [0x0D]
payload.extend(list(bytearray(struct.pack(">I", pix_format & 0xf))))
i2c.write(payload)
def DPP2607_Write_VideoResolution(resolution):
"""
Writes: Video Resolution.
DPP2607_Write_VideoResolution(DWORD Resolution).
:type resolution: Resolution
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_VideoResolution(%r)', resolution)
payload = [0x0C]
payload.extend(list(bytearray(struct.pack(">I", resolution & 0x1f))))
i2c.write(payload)
def DPP2607_Write_VideoSourceSelection(source_sel):
"""
Writes: Video Source Selection.
DPP2607_Write_VideoSourceSelection(DWORD SourceSel).
:type source_sel: SourceSel
:rtype: None
"""
log(DEBUG, 'DPP2607_Write_VideoSourceSelection(%r)', source_sel)
payload = [0x0B]
payload.extend(list(bytearray(struct.pack(">I", source_sel & 0x7))))
i2c.write(payload)
| 313 | 0 | 23 |
bb36154f56427888e11879eca6ee7e98539750ca | 5,089 | py | Python | tests/test_cli.py | gercograndia/aws-iam-tester | 555a868e8ccd44b81da18b94dbfaf2219d42eab5 | [
"MIT"
] | 15 | 2020-03-27T22:55:14.000Z | 2022-02-12T04:22:41.000Z | tests/test_cli.py | gercograndia/aws-iam-tester | 555a868e8ccd44b81da18b94dbfaf2219d42eab5 | [
"MIT"
] | 3 | 2020-07-31T19:41:33.000Z | 2021-06-30T07:16:08.000Z | tests/test_cli.py | gercograndia/aws-iam-tester | 555a868e8ccd44b81da18b94dbfaf2219d42eab5 | [
"MIT"
] | 6 | 2020-11-24T20:44:44.000Z | 2021-06-03T02:56:23.000Z | """
Tests for the command module
"""
import os
import subprocess
import pytest
import pathlib
from pyassert import assert_that
# from aws-iam-tester import cli
script_path = pathlib.Path(__file__).parent.absolute()
# Keep this method last to avoid disrupting other methods
| 35.340278 | 157 | 0.647671 | """
Tests for the command module
"""
import os
import subprocess
import pytest
import pathlib
from pyassert import assert_that
# from aws-iam-tester import cli
script_path = pathlib.Path(__file__).parent.absolute()
def run_command(command_list, do_assert=True, show_debug=False):
process = subprocess.Popen(
command_list,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
stdout, stderr = process.communicate()
if show_debug:
print(f"Return code: {process.returncode}")
print(f"Stdout: {stdout}")
print(f"Stderr: {stderr}")
if do_assert:
assert_that(process.returncode).is_less_than(2)
assert_that(stderr).is_not_none()
return process.returncode, str(stdout), str(stderr)
def test_help():
returncode, stdout, stderr = run_command(
['aws-iam-tester', '--help']
)
assert_that(stdout).contains('Usage')
def test_version():
returncode, stdout, stderr = run_command(
['aws-iam-tester', '--version'],
)
assert_that(stdout).contains('version')
def test_test_runs():
returncode, stdout, stderr = run_command(
['aws-iam-tester', '--config-file', f'{script_path}/config.yml', '--number-of-runs', '10'],
)
def test_dry_run():
returncode, stdout, stderr = run_command(
['aws-iam-tester', 'account', '--config-file', f'{script_path}/config.yml', '--dry-run'],
)
def test_no_system_roles():
returncode, stdout, stderr = run_command(
['aws-iam-tester', 'account', '--config-file', f'{script_path}/config.yml', '--no-system-roles'],
)
def test_full_run():
output_dir = '/tmp/iam_tester_results'
returncode, stdout, stderr = run_command(
['aws-iam-tester', 'account', '--config-file', f'{script_path}/config.yml', '--write-to-file', '--output-location', output_dir],
)
assert_that(stdout).contains("are written to")
assert_that(output_dir).is_a_directory()
def test_full_run_with_global_limit():
output_dir = '/tmp/iam_tester_results'
returncode, stdout, stderr = run_command(
['aws-iam-tester', 'account', '--config-file', f'{script_path}/config_with_global_limit_to.yml', '--write-to-file', '--output-location', output_dir],
)
assert_that(stdout).matches(r"(^(.)*are written to(.)*$)|(^(.)*No findings found(.)*$)")
assert_that(output_dir).is_a_directory()
def test_check_user_action():
returncode, stdout, stderr = run_command(
['aws-iam-tester', 'access', '-u', 'ggrandia', '-a', 'glue:DeleteTable'],
)
assert_that(stdout).matches(r"(^(.)*Action(.)*glue:DeleteTable(.)*$)")
def test_check_user_action_with_json():
returncode, stdout, stderr = run_command(
['aws-iam-tester', 'access', '-u', 'ggrandia', '-a', 'glue:DeleteTable', '-j'],
)
assert_that(stdout).matches(r"(^(.)*[(.)*](.)*$)")
def test_access_check():
returncode, stdout, stderr = run_command(
['aws-iam-tester', 'access', '-a', 'glue:DeleteTable'],
)
assert_that(stdout).matches(r"(^(.)*Summary:(.)*$)")
def test_access_check_with_json():
returncode, stdout, stderr = run_command(
['aws-iam-tester', 'access', '-a', 'glue:DeleteTable', '-j'],
)
assert_that(stdout).matches(r"(^(.)*[(.)*](.)*$)")
def test_check_invalid_user_action():
returncode, stdout, stderr = run_command(
['aws-iam-tester', 'access', '-u', 'non_existent_user', '-a', 'glue:DeleteTable'],
do_assert=False,
)
assert_that(stdout).matches(r"(^(.)*Could not find entity(.)*$)")
assert_that(returncode).is_equal_to(2)
def test_check_role_action():
returncode, stdout, stderr = run_command(
['aws-iam-tester', 'access', '-r', 'api2s3_worker_role', '-a', 'glue:DeleteTable'],
)
assert_that(stdout).matches(r"(^(.)*Action(.)*glue:DeleteTable(.)*$)")
def test_check_invalid_user_action():
returncode, stdout, stderr = run_command(
['aws-iam-tester', 'access', '-r', 'non_existent_role', '-a', 'glue:DeleteTable'],
do_assert=False,
)
assert_that(stdout).matches(r"(^(.)*Could not find entity(.)*$)")
assert_that(returncode).is_equal_to(2)
def test_check_both_user_and_role_action():
returncode, stdout, stderr = run_command(
['aws-iam-tester', 'access', '-u', 'whatever', '-r', 'whatever', '-a', 'glue:DeleteTable'],
do_assert=False,
)
assert_that(stdout).matches(r"(^(.)*Pass in user or role, not both(.)*$)")
assert_that(returncode).is_equal_to(2)
# Keep this method last to avoid disrupting other methods
def test_without_aws_creds():
os.environ["AWS_ACCESS_KEY_ID"] = "whatever"
os.environ["SECRET_AWS_ACCESS_KEY"] = "whatever"
returncode, stdout, stderr = run_command(
command_list=['aws-iam-tester', 'account', '--config-file', f'{script_path}/config.yml', '--dry-run'],
do_assert=False
)
del os.environ["AWS_ACCESS_KEY_ID"]
del os.environ["SECRET_AWS_ACCESS_KEY"]
assert_that(returncode).is_equal_to(2)
assert_that(stdout).matches(r"(^(.)*InvalidClientTokenId(.)*$)")
| 4,422 | 0 | 390 |
bb2d7ee630e8e32809d8407270d93c95fb6f1e7b | 7,645 | py | Python | request-patch-review.py | hyunsik/incubator-tajo | fbc358c2030e78f6616b286cf0736afaa8a583f8 | [
"BSD-3-Clause"
] | 19 | 2015-01-22T14:46:11.000Z | 2021-11-10T16:06:36.000Z | request-patch-review.py | gruter/tajo-cdh | 7bd3efdc253fe7bffb42881065183e94a8a0a2ee | [
"BSD-3-Clause"
] | null | null | null | request-patch-review.py | gruter/tajo-cdh | 7bd3efdc253fe7bffb42881065183e94a8a0a2ee | [
"BSD-3-Clause"
] | 19 | 2015-01-11T20:04:49.000Z | 2021-11-10T16:06:26.000Z | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import sys
import os
import time
import datetime
import tempfile
from jira.client import JIRA
def main():
''' main(), shut up, pylint '''
popt = argparse.ArgumentParser(description='Tajo patch review tool')
popt.add_argument('-b', '--branch', action='store', dest='branch', required=True, help='Tracking branch to create diff against')
popt.add_argument('-j', '--jira', action='store', dest='jira', required=True, help='JIRA corresponding to the reviewboard')
popt.add_argument('-skip-rb', '--skip-reviewboard', action='store_true', dest='skip_reviewboard', required=False, help='Skip a review request to reviewboard.')
popt.add_argument('-s', '--summary', action='store', dest='summary', required=False, help='Summary for the reviewboard')
popt.add_argument('-d', '--description', action='store', dest='description', required=False, help='Description for reviewboard')
popt.add_argument('-c', '--change-description', action='store', dest='change_description', required=False, help='Description of what changed in this revision of the review request when updating an existing request')
popt.add_argument('-pa', '--patch-available', action='store_true', dest='patch_available', required=False, help='Transite the JIRA status to Patch Available. If its status is already Patch Available, it updates the status of the JIRA issue by transiting its status to Open and Patch Available sequentially.')
popt.add_argument('-r', '--rb', action='store', dest='reviewboard', required=False, help='Review board that needs to be updated')
popt.add_argument('-t', '--testing-done', action='store', dest='testing', required=False, help='Text for the Testing Done section of the reviewboard')
popt.add_argument('-db', '--debug', action='store_true', required=False, help='Enable debug mode')
opt = popt.parse_args()
# the patch name is determined here.
patch_file=tempfile.gettempdir() + "/" + opt.jira + ".patch"
if opt.reviewboard:
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y%m%d_%H:%M:%S')
patch_file=tempfile.gettempdir() + "/" + opt.jira + '_' + st + '.patch'
# first check if rebase is needed
git_branch_hash="git rev-parse " + opt.branch
p_now=os.popen(git_branch_hash)
branch_now=p_now.read()
p_now.close()
git_common_ancestor="git merge-base " + opt.branch + " HEAD"
p_then=os.popen(git_common_ancestor)
branch_then=p_then.read()
p_then.close()
# get remote and branch name
remote_name=opt.branch.split("/")[0]
branch_name=opt.branch.split("/")[1]
if branch_now != branch_then:
print 'ERROR: Your current working branch is from an older version of ' + opt.branch + '. Please rebase first by using git pull --rebase'
sys.exit(1)
git_configure_reviewboard="git config reviewboard.url https://reviews.apache.org"
print "Configuring reviewboard url to https://reviews.apache.org"
p=os.popen(git_configure_reviewboard)
p.close()
# update the specified remote branch
git_remote_update="git fetch " + remote_name
print "Updating your remote branche " + opt.branch + " to pull the latest changes"
p=os.popen(git_remote_update)
p.close()
# get jira and issue instance
jira=get_jira()
issue = jira.issue(opt.jira)
if not opt.skip_reviewboard:
rb_command="post-review --publish --tracking-branch " + opt.branch + " --target-groups=Tajo --branch=" + branch_name + " --bugs-closed=" + opt.jira
if opt.reviewboard:
rb_command=rb_command + " -r " + opt.reviewboard
summary=issue.key + ": " + issue.fields.summary # default summary is 'TAJO-{NUM}: {JIRA TITLE}'
if opt.summary: # if a summary is given, this field is added or updated
summary=opt.summary
if not opt.reviewboard: # if a review request is created
rb_command=rb_command + " --summary '" + summary + "'"
description=issue.fields.description
if opt.description: # if a descriptin is give, this field is added
description = opt.description
if opt.reviewboard and opt.change_description:
rb_command=rb_command + " --change-description '" + opt.change_description + "'"
if not opt.reviewboard: # if a review request is created
rb_command=rb_command + " --description '" + description + "'"
if opt.testing:
rb_command=rb_command + " --testing-done=" + opt.testing
if opt.debug:
rb_command=rb_command + " --debug"
print rb_command
p=os.popen(rb_command)
rb_url=""
for line in p:
print line
if line.startswith('http'):
rb_url = line
elif line.startswith("There don't seem to be any diffs"):
print 'ERROR: Your reviewboard was not created/updated since there was no diff to upload. The reasons that can cause this issue are 1) Your diff is not checked into your local branch. Please check in the diff to the local branch and retry 2) You are not specifying the local branch name as part of the --branch option. Please specify the remote branch name obtained from git branch -r'
p.close()
sys.exit(1)
elif line.startswith("Your review request still exists, but the diff is not attached") and not opt.debug:
print 'ERROR: Your reviewboard was not created/updated. Please run the script with the --debug option to troubleshoot the problem'
p.close()
sys.exit(1)
p.close()
if opt.debug:
print 'rb url=',rb_url
git_command="git diff --no-prefix " + opt.branch + " > " + patch_file
if opt.debug:
print git_command
p=os.popen(git_command)
p.close()
print 'Creating diff against', opt.branch, 'and uploading patch to ',opt.jira
attachment=open(patch_file)
jira.add_attachment(issue,attachment)
attachment.close()
# Add comment about a request to reviewboard and its url.
if not opt.skip_reviewboard:
comment="Created a review request against branch " + branch_name + " in reviewboard "
if opt.reviewboard:
comment="Updated the review request against branch " + branch_name + " in reviewboard "
comment = comment + "\n" + rb_url
jira.add_comment(opt.jira, comment)
# Transition the jira status to Patch Available
if opt.patch_available:
if issue.fields.status.id == '10002': # If the jira status is already Patch Available (id - 10002)
jira.transition_issue(issue, '731') # Cancel (id - 731) the uploaded patch
issue = jira.issue(opt.jira)
jira.transition_issue(issue, '10002')
if __name__ == '__main__':
sys.exit(main())
| 46.615854 | 393 | 0.708306 | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import sys
import os
import time
import datetime
import tempfile
from jira.client import JIRA
def get_jira():
options = {
'server': 'https://issues.apache.org/jira'
}
# read the config file
home=jira_home=os.getenv('HOME')
home=home.rstrip('/')
jira_config = dict(line.strip().split('=') for line in open(home + '/.jira.ini'))
jira = JIRA(options,basic_auth=(jira_config['user'], jira_config['password']))
return jira
def main():
''' main(), shut up, pylint '''
popt = argparse.ArgumentParser(description='Tajo patch review tool')
popt.add_argument('-b', '--branch', action='store', dest='branch', required=True, help='Tracking branch to create diff against')
popt.add_argument('-j', '--jira', action='store', dest='jira', required=True, help='JIRA corresponding to the reviewboard')
popt.add_argument('-skip-rb', '--skip-reviewboard', action='store_true', dest='skip_reviewboard', required=False, help='Skip a review request to reviewboard.')
popt.add_argument('-s', '--summary', action='store', dest='summary', required=False, help='Summary for the reviewboard')
popt.add_argument('-d', '--description', action='store', dest='description', required=False, help='Description for reviewboard')
popt.add_argument('-c', '--change-description', action='store', dest='change_description', required=False, help='Description of what changed in this revision of the review request when updating an existing request')
popt.add_argument('-pa', '--patch-available', action='store_true', dest='patch_available', required=False, help='Transite the JIRA status to Patch Available. If its status is already Patch Available, it updates the status of the JIRA issue by transiting its status to Open and Patch Available sequentially.')
popt.add_argument('-r', '--rb', action='store', dest='reviewboard', required=False, help='Review board that needs to be updated')
popt.add_argument('-t', '--testing-done', action='store', dest='testing', required=False, help='Text for the Testing Done section of the reviewboard')
popt.add_argument('-db', '--debug', action='store_true', required=False, help='Enable debug mode')
opt = popt.parse_args()
# the patch name is determined here.
patch_file=tempfile.gettempdir() + "/" + opt.jira + ".patch"
if opt.reviewboard:
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y%m%d_%H:%M:%S')
patch_file=tempfile.gettempdir() + "/" + opt.jira + '_' + st + '.patch'
# first check if rebase is needed
git_branch_hash="git rev-parse " + opt.branch
p_now=os.popen(git_branch_hash)
branch_now=p_now.read()
p_now.close()
git_common_ancestor="git merge-base " + opt.branch + " HEAD"
p_then=os.popen(git_common_ancestor)
branch_then=p_then.read()
p_then.close()
# get remote and branch name
remote_name=opt.branch.split("/")[0]
branch_name=opt.branch.split("/")[1]
if branch_now != branch_then:
print 'ERROR: Your current working branch is from an older version of ' + opt.branch + '. Please rebase first by using git pull --rebase'
sys.exit(1)
git_configure_reviewboard="git config reviewboard.url https://reviews.apache.org"
print "Configuring reviewboard url to https://reviews.apache.org"
p=os.popen(git_configure_reviewboard)
p.close()
# update the specified remote branch
git_remote_update="git fetch " + remote_name
print "Updating your remote branche " + opt.branch + " to pull the latest changes"
p=os.popen(git_remote_update)
p.close()
# get jira and issue instance
jira=get_jira()
issue = jira.issue(opt.jira)
if not opt.skip_reviewboard:
rb_command="post-review --publish --tracking-branch " + opt.branch + " --target-groups=Tajo --branch=" + branch_name + " --bugs-closed=" + opt.jira
if opt.reviewboard:
rb_command=rb_command + " -r " + opt.reviewboard
summary=issue.key + ": " + issue.fields.summary # default summary is 'TAJO-{NUM}: {JIRA TITLE}'
if opt.summary: # if a summary is given, this field is added or updated
summary=opt.summary
if not opt.reviewboard: # if a review request is created
rb_command=rb_command + " --summary '" + summary + "'"
description=issue.fields.description
if opt.description: # if a descriptin is give, this field is added
description = opt.description
if opt.reviewboard and opt.change_description:
rb_command=rb_command + " --change-description '" + opt.change_description + "'"
if not opt.reviewboard: # if a review request is created
rb_command=rb_command + " --description '" + description + "'"
if opt.testing:
rb_command=rb_command + " --testing-done=" + opt.testing
if opt.debug:
rb_command=rb_command + " --debug"
print rb_command
p=os.popen(rb_command)
rb_url=""
for line in p:
print line
if line.startswith('http'):
rb_url = line
elif line.startswith("There don't seem to be any diffs"):
print 'ERROR: Your reviewboard was not created/updated since there was no diff to upload. The reasons that can cause this issue are 1) Your diff is not checked into your local branch. Please check in the diff to the local branch and retry 2) You are not specifying the local branch name as part of the --branch option. Please specify the remote branch name obtained from git branch -r'
p.close()
sys.exit(1)
elif line.startswith("Your review request still exists, but the diff is not attached") and not opt.debug:
print 'ERROR: Your reviewboard was not created/updated. Please run the script with the --debug option to troubleshoot the problem'
p.close()
sys.exit(1)
p.close()
if opt.debug:
print 'rb url=',rb_url
git_command="git diff --no-prefix " + opt.branch + " > " + patch_file
if opt.debug:
print git_command
p=os.popen(git_command)
p.close()
print 'Creating diff against', opt.branch, 'and uploading patch to ',opt.jira
attachment=open(patch_file)
jira.add_attachment(issue,attachment)
attachment.close()
# Add comment about a request to reviewboard and its url.
if not opt.skip_reviewboard:
comment="Created a review request against branch " + branch_name + " in reviewboard "
if opt.reviewboard:
comment="Updated the review request against branch " + branch_name + " in reviewboard "
comment = comment + "\n" + rb_url
jira.add_comment(opt.jira, comment)
# Transition the jira status to Patch Available
if opt.patch_available:
if issue.fields.status.id == '10002': # If the jira status is already Patch Available (id - 10002)
jira.transition_issue(issue, '731') # Cancel (id - 731) the uploaded patch
issue = jira.issue(opt.jira)
jira.transition_issue(issue, '10002')
if __name__ == '__main__':
sys.exit(main())
| 322 | 0 | 24 |
c15a73a25b6c01e22dddf80e2f1876a8c274118a | 11,613 | py | Python | babygraphics.py | kaiicheng/United-States-Population-Name-Dashboard | 9019538fcb58c7e97a3dc67d3b27cb8ad180e448 | [
"MIT"
] | null | null | null | babygraphics.py | kaiicheng/United-States-Population-Name-Dashboard | 9019538fcb58c7e97a3dc67d3b27cb8ad180e448 | [
"MIT"
] | null | null | null | babygraphics.py | kaiicheng/United-States-Population-Name-Dashboard | 9019538fcb58c7e97a3dc67d3b27cb8ad180e448 | [
"MIT"
] | null | null | null | """
SC101 Baby Names Project
Adapted from Nick Parlante's Baby Names assignment by
Jerry Liao.
YOUR DESCRIPTION HERE
"""
import tkinter
import babynames
import babygraphicsgui as gui
FILENAMES = [
'data/full/baby-1900.txt', 'data/full/baby-1910.txt',
'data/full/baby-1920.txt', 'data/full/baby-1930.txt',
'data/full/baby-1940.txt', 'data/full/baby-1950.txt',
'data/full/baby-1960.txt', 'data/full/baby-1970.txt',
'data/full/baby-1980.txt', 'data/full/baby-1990.txt',
'data/full/baby-2000.txt', 'data/full/baby-2010.txt'
]
CANVAS_WIDTH = 1000
CANVAS_HEIGHT = 600
YEARS = [1900, 1910, 1920, 1930, 1940, 1950, 1960, 1970, 1980, 1990, 2000, 2010]
GRAPH_MARGIN_SIZE = 20
COLORS = ['red', 'purple', 'green', 'blue']
TEXT_DX = 2
LINE_WIDTH = 2
MAX_RANK = 1000
def get_x_coordinate(width, year_index):
"""
Given the width of the canvas and the index of the current year
in the YEARS list, returns the x coordinate of the vertical
line associated with that year.
Input:
width (int): The width of the canvas
year_index (int): The index of the current year in the YEARS list
Returns:
x_coordinate (int): The x coordinate of the vertical line associated
with the specified year.
"""
average = (width - GRAPH_MARGIN_SIZE*2) // 12
x_coordinate = GRAPH_MARGIN_SIZE + ((year_index - 1900)/10) * average
return x_coordinate
def draw_fixed_lines(canvas):
"""
Erases all existing information on the given canvas and then
draws the fixed background lines on it.
Input:
canvas (Tkinter Canvas): The canvas on which we are drawing.
Returns:
This function does not return any value.
"""
canvas.delete('all') # delete all existing lines from the canvas
# Write your code below this line
#################################
# Create two horizontal lines to the canvas.
canvas.create_line(GRAPH_MARGIN_SIZE, GRAPH_MARGIN_SIZE, CANVAS_WIDTH - GRAPH_MARGIN_SIZE, GRAPH_MARGIN_SIZE)
canvas.create_line(GRAPH_MARGIN_SIZE, CANVAS_HEIGHT - GRAPH_MARGIN_SIZE, CANVAS_WIDTH - GRAPH_MARGIN_SIZE,
CANVAS_HEIGHT - GRAPH_MARGIN_SIZE)
# Add vertical lines to the canvas.
for i in range(len(YEARS)): # len(YEARS)-1
x = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[i]))
canvas.create_line(x, 0, x, CANVAS_HEIGHT)
# Add years caption to the canvas.
for i in range(len(YEARS)): # len(YEARS)-1
x = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[i]))
canvas.create_text(x, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, text=YEARS[i], anchor=tkinter.NW, font='times 10')
def draw_names(canvas, name_data, lookup_names):
"""
Given a dict of baby name data and a list of name, plots
the historical trend of those names onto the canvas.
Input:
canvas (Tkinter Canvas): The canvas on which we are drawing.
name_data (dict): Dictionary holding baby name data
lookup_names (List[str]): A list of names whose data you want to plot
Returns:
This function does not return any value.
"""
draw_fixed_lines(canvas) # draw the fixed background grid
# Write your code below this line
#################################
y_begin = int(GRAPH_MARGIN_SIZE)
average = (CANVAS_HEIGHT - GRAPH_MARGIN_SIZE*2)/1000
for i in range(len(lookup_names)):
for j in range(len(YEARS)-1): # len(YEARS)-1
print("-----------------------")
print(len(YEARS))
ls = list(name_data[lookup_names[i]])
print(ls)
# Create a switch. When the rank of year isn't recorded, the switch will be changed into True.
missed = False
# Create a switch. When the rank of the next year isn't recorded, the switch will be changed into True.
next_missed = False
# Change switch into True, if the rank of the year isn't recorded.
if str(YEARS[j]) in ls:
print('yes, with record') # PROCESSED!
else:
print('no, data missed!')
missed = True
# Change switch into True, if the rank of the next year isn't recorded.
if str(YEARS[j+1]) in ls:
print('yes, next year with record') # PROCESSED!
else:
print('no, next year data missed!')
next_missed = True
# Adjust color of the line.
color_num = i
if color_num > len(COLORS)-1:
color_num = color_num % len(COLORS)
color = COLORS[color_num]
if missed == True: # The data of first year is missed.
print('###################')
print('This is missed-if')
# X coordinate of the year.
x = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j]))
print('x: ')
print(x)
# X coordinate of the next year.
x_next = 0
if j == (len(YEARS) - 1):
pass
else:
x_next = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j + 1]))
print('x_next: ')
print(x_next) # 100
if next_missed is True: # Data in the first year is missed, and that of next year is missed.
# Create a line on the canvas.
canvas.create_line(x, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, x_next, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, width=LINE_WIDTH, fill=color)
# Add name and rank to the canvas.
name_and_rank = str(lookup_names[i] + ' * ')
print(name_and_rank)
canvas.create_text(x, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, text=name_and_rank, anchor=tkinter.SW, font='times 10', fill=color)
else: # Data in the first year is missed, but that of next year isn't missed.
# X coordinate of the year.
x = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j]))
# X coordinate of the next year.
x_next = 0
if j == (len(YEARS) - 1):
pass
else:
x_next = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j + 1]))
# Count the rank of the next year, and then compute y coordinate of the next year.
rank_of_next_year = 0
if j == (len(YEARS) - 1):
pass
else:
rank_of_next_year = int(name_data[lookup_names[i]][str(YEARS[j + 1])])
# Y coordinate of the next year.
y_next = int(y_begin + average * rank_of_next_year)
# Create the line on the canvas.
canvas.create_line(x, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, x_next, y_next, width=LINE_WIDTH, fill=color)
# Add name and rank to the canvas.
name_and_rank = str(lookup_names[i] + ' * ')
canvas.create_text(x, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, text=name_and_rank, anchor=tkinter.SW, font='times 10', fill=color)
else: # Data in the first year isn't missed.
if next_missed is True: # Data in the first year isn't missed, but that of next year is missed.
# X coordinate of the year.
x = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j]))
# X coordinate of the next year.
x_next = 0
if j == (len(YEARS)-1):
pass
else:
x_next = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j + 1]))
# Count the rank of the year, and then compute the y coordinate of the year.
rank_of_year = int(name_data[lookup_names[i]][str(YEARS[j])])
# Y coordinate of the year.
y = int(y_begin + average * rank_of_year)
# Adjust color of the line.
color_num = i
if color_num > len(COLORS) - 1:
color_num = color_num % len(COLORS)
color = COLORS[color_num]
# Add the line to the canvas.
canvas.create_line(x, y, x_next, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, width=LINE_WIDTH, fill=color)
# Add name and rank to the canvas.
name_and_rank = str(lookup_names[i] + ' ' + name_data[lookup_names[i]][str(YEARS[j])])
canvas.create_text(x+TEXT_DX, y, text=name_and_rank, anchor=tkinter.NW, font='times 10', fill=color)
else: # Data in the first year isn't missed, and that of next year isn't missed.
# X coordinate of the year.
x = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j]))
# X coordinate of the next year.
x_next = 0
if j == (len(YEARS)-1):
pass
else:
x_next = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j + 1]))
# Count the rank of the year, and then compute the y coordinate of the year.
rank_of_year = int(name_data[lookup_names[i]][str(YEARS[j])])
# Y coordinate of the year.
y = int(y_begin + average * rank_of_year)
# Count the rank of the next year, and then compute y coordinate of the next year.
rank_of_next_year = 0
if j == (len(YEARS)-1):
pass
else:
rank_of_next_year = int(name_data[lookup_names[i]][str(YEARS[j+1])])
# Y coordinate of the next year.
y_next = int(y_begin + average * rank_of_next_year)
# Adjust color of the line.
color_num = i
if color_num > len(COLORS) - 1:
color_num = color_num % len(COLORS)
color = COLORS[color_num]
# Add the line to the canvas.
canvas.create_line(x, y, x_next, y_next, width=LINE_WIDTH, fill=color)
# Add name and rank to the canvas.
name_and_rank = str(lookup_names[i] + ' ' + name_data[lookup_names[i]][str(YEARS[j])])
canvas.create_text(x+TEXT_DX, y, text=name_and_rank, anchor=tkinter.NW, font='times 10', fill=color)
# main() code is provided, feel free to read through it but DO NOT MODIFY
if __name__ == '__main__':
main()
| 40.463415 | 145 | 0.563765 | """
SC101 Baby Names Project
Adapted from Nick Parlante's Baby Names assignment by
Jerry Liao.
YOUR DESCRIPTION HERE
"""
import tkinter
import babynames
import babygraphicsgui as gui
FILENAMES = [
'data/full/baby-1900.txt', 'data/full/baby-1910.txt',
'data/full/baby-1920.txt', 'data/full/baby-1930.txt',
'data/full/baby-1940.txt', 'data/full/baby-1950.txt',
'data/full/baby-1960.txt', 'data/full/baby-1970.txt',
'data/full/baby-1980.txt', 'data/full/baby-1990.txt',
'data/full/baby-2000.txt', 'data/full/baby-2010.txt'
]
CANVAS_WIDTH = 1000
CANVAS_HEIGHT = 600
YEARS = [1900, 1910, 1920, 1930, 1940, 1950, 1960, 1970, 1980, 1990, 2000, 2010]
GRAPH_MARGIN_SIZE = 20
COLORS = ['red', 'purple', 'green', 'blue']
TEXT_DX = 2
LINE_WIDTH = 2
MAX_RANK = 1000
def get_x_coordinate(width, year_index):
"""
Given the width of the canvas and the index of the current year
in the YEARS list, returns the x coordinate of the vertical
line associated with that year.
Input:
width (int): The width of the canvas
year_index (int): The index of the current year in the YEARS list
Returns:
x_coordinate (int): The x coordinate of the vertical line associated
with the specified year.
"""
average = (width - GRAPH_MARGIN_SIZE*2) // 12
x_coordinate = GRAPH_MARGIN_SIZE + ((year_index - 1900)/10) * average
return x_coordinate
def draw_fixed_lines(canvas):
"""
Erases all existing information on the given canvas and then
draws the fixed background lines on it.
Input:
canvas (Tkinter Canvas): The canvas on which we are drawing.
Returns:
This function does not return any value.
"""
canvas.delete('all') # delete all existing lines from the canvas
# Write your code below this line
#################################
# Create two horizontal lines to the canvas.
canvas.create_line(GRAPH_MARGIN_SIZE, GRAPH_MARGIN_SIZE, CANVAS_WIDTH - GRAPH_MARGIN_SIZE, GRAPH_MARGIN_SIZE)
canvas.create_line(GRAPH_MARGIN_SIZE, CANVAS_HEIGHT - GRAPH_MARGIN_SIZE, CANVAS_WIDTH - GRAPH_MARGIN_SIZE,
CANVAS_HEIGHT - GRAPH_MARGIN_SIZE)
# Add vertical lines to the canvas.
for i in range(len(YEARS)): # len(YEARS)-1
x = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[i]))
canvas.create_line(x, 0, x, CANVAS_HEIGHT)
# Add years caption to the canvas.
for i in range(len(YEARS)): # len(YEARS)-1
x = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[i]))
canvas.create_text(x, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, text=YEARS[i], anchor=tkinter.NW, font='times 10')
def draw_names(canvas, name_data, lookup_names):
"""
Given a dict of baby name data and a list of name, plots
the historical trend of those names onto the canvas.
Input:
canvas (Tkinter Canvas): The canvas on which we are drawing.
name_data (dict): Dictionary holding baby name data
lookup_names (List[str]): A list of names whose data you want to plot
Returns:
This function does not return any value.
"""
draw_fixed_lines(canvas) # draw the fixed background grid
# Write your code below this line
#################################
y_begin = int(GRAPH_MARGIN_SIZE)
average = (CANVAS_HEIGHT - GRAPH_MARGIN_SIZE*2)/1000
for i in range(len(lookup_names)):
for j in range(len(YEARS)-1): # len(YEARS)-1
print("-----------------------")
print(len(YEARS))
ls = list(name_data[lookup_names[i]])
print(ls)
# Create a switch. When the rank of year isn't recorded, the switch will be changed into True.
missed = False
# Create a switch. When the rank of the next year isn't recorded, the switch will be changed into True.
next_missed = False
# Change switch into True, if the rank of the year isn't recorded.
if str(YEARS[j]) in ls:
print('yes, with record') # PROCESSED!
else:
print('no, data missed!')
missed = True
# Change switch into True, if the rank of the next year isn't recorded.
if str(YEARS[j+1]) in ls:
print('yes, next year with record') # PROCESSED!
else:
print('no, next year data missed!')
next_missed = True
# Adjust color of the line.
color_num = i
if color_num > len(COLORS)-1:
color_num = color_num % len(COLORS)
color = COLORS[color_num]
if missed == True: # The data of first year is missed.
print('###################')
print('This is missed-if')
# X coordinate of the year.
x = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j]))
print('x: ')
print(x)
# X coordinate of the next year.
x_next = 0
if j == (len(YEARS) - 1):
pass
else:
x_next = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j + 1]))
print('x_next: ')
print(x_next) # 100
if next_missed is True: # Data in the first year is missed, and that of next year is missed.
# Create a line on the canvas.
canvas.create_line(x, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, x_next, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, width=LINE_WIDTH, fill=color)
# Add name and rank to the canvas.
name_and_rank = str(lookup_names[i] + ' * ')
print(name_and_rank)
canvas.create_text(x, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, text=name_and_rank, anchor=tkinter.SW, font='times 10', fill=color)
else: # Data in the first year is missed, but that of next year isn't missed.
# X coordinate of the year.
x = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j]))
# X coordinate of the next year.
x_next = 0
if j == (len(YEARS) - 1):
pass
else:
x_next = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j + 1]))
# Count the rank of the next year, and then compute y coordinate of the next year.
rank_of_next_year = 0
if j == (len(YEARS) - 1):
pass
else:
rank_of_next_year = int(name_data[lookup_names[i]][str(YEARS[j + 1])])
# Y coordinate of the next year.
y_next = int(y_begin + average * rank_of_next_year)
# Create the line on the canvas.
canvas.create_line(x, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, x_next, y_next, width=LINE_WIDTH, fill=color)
# Add name and rank to the canvas.
name_and_rank = str(lookup_names[i] + ' * ')
canvas.create_text(x, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, text=name_and_rank, anchor=tkinter.SW, font='times 10', fill=color)
else: # Data in the first year isn't missed.
if next_missed is True: # Data in the first year isn't missed, but that of next year is missed.
# X coordinate of the year.
x = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j]))
# X coordinate of the next year.
x_next = 0
if j == (len(YEARS)-1):
pass
else:
x_next = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j + 1]))
# Count the rank of the year, and then compute the y coordinate of the year.
rank_of_year = int(name_data[lookup_names[i]][str(YEARS[j])])
# Y coordinate of the year.
y = int(y_begin + average * rank_of_year)
# Adjust color of the line.
color_num = i
if color_num > len(COLORS) - 1:
color_num = color_num % len(COLORS)
color = COLORS[color_num]
# Add the line to the canvas.
canvas.create_line(x, y, x_next, CANVAS_HEIGHT-GRAPH_MARGIN_SIZE, width=LINE_WIDTH, fill=color)
# Add name and rank to the canvas.
name_and_rank = str(lookup_names[i] + ' ' + name_data[lookup_names[i]][str(YEARS[j])])
canvas.create_text(x+TEXT_DX, y, text=name_and_rank, anchor=tkinter.NW, font='times 10', fill=color)
else: # Data in the first year isn't missed, and that of next year isn't missed.
# X coordinate of the year.
x = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j]))
# X coordinate of the next year.
x_next = 0
if j == (len(YEARS)-1):
pass
else:
x_next = int(get_x_coordinate(width=CANVAS_WIDTH, year_index=YEARS[j + 1]))
# Count the rank of the year, and then compute the y coordinate of the year.
rank_of_year = int(name_data[lookup_names[i]][str(YEARS[j])])
# Y coordinate of the year.
y = int(y_begin + average * rank_of_year)
# Count the rank of the next year, and then compute y coordinate of the next year.
rank_of_next_year = 0
if j == (len(YEARS)-1):
pass
else:
rank_of_next_year = int(name_data[lookup_names[i]][str(YEARS[j+1])])
# Y coordinate of the next year.
y_next = int(y_begin + average * rank_of_next_year)
# Adjust color of the line.
color_num = i
if color_num > len(COLORS) - 1:
color_num = color_num % len(COLORS)
color = COLORS[color_num]
# Add the line to the canvas.
canvas.create_line(x, y, x_next, y_next, width=LINE_WIDTH, fill=color)
# Add name and rank to the canvas.
name_and_rank = str(lookup_names[i] + ' ' + name_data[lookup_names[i]][str(YEARS[j])])
canvas.create_text(x+TEXT_DX, y, text=name_and_rank, anchor=tkinter.NW, font='times 10', fill=color)
# main() code is provided, feel free to read through it but DO NOT MODIFY
def main():
# Load data
name_data = babynames.read_files(FILENAMES)
# Create the window and the canvas
top = tkinter.Tk()
top.wm_title('Baby Names')
canvas = gui.make_gui(top, CANVAS_WIDTH, CANVAS_HEIGHT, name_data, draw_names, babynames.search_names)
# Call draw_fixed_lines() once at startup so we have the lines
# even before the user types anything.
draw_fixed_lines(canvas)
# This line starts the graphical loop that is responsible for
# processing user interactions and plotting data
top.mainloop()
if __name__ == '__main__':
main()
| 534 | 0 | 22 |
a88035b4dc8f36658950702134aeccfd01bff7ba | 301 | py | Python | helpers.py | Silve1ra/casting-agency-api | 075ac2d465972b84a95fbd3e0eb0823ec33a867c | [
"MIT"
] | null | null | null | helpers.py | Silve1ra/casting-agency-api | 075ac2d465972b84a95fbd3e0eb0823ec33a867c | [
"MIT"
] | null | null | null | helpers.py | Silve1ra/casting-agency-api | 075ac2d465972b84a95fbd3e0eb0823ec33a867c | [
"MIT"
] | null | null | null | ITEMS_PER_PAGE = 10
| 23.153846 | 52 | 0.684385 | ITEMS_PER_PAGE = 10
def paginate_items(request, selection):
page = request.args.get('page', 1, type=int)
start = (page - 1) * ITEMS_PER_PAGE
end = start + ITEMS_PER_PAGE
items = [item.serialize() for item in selection]
current_items = items[start:end]
return current_items
| 257 | 0 | 23 |
55e865bd9941f60900fd8edb25559a00824f8688 | 557 | py | Python | Path_finding_visualization using bfs/bfs.py | arymandeshwal/Path-Finding-Visualization | 9f79eb2b9cc9cf2e1515582a1bc71f5b4a657437 | [
"MIT"
] | 1 | 2020-06-30T15:13:14.000Z | 2020-06-30T15:13:14.000Z | Path_finding_visualization using bfs/bfs.py | arymandeshwal/Path-Finding-Visualization | 9f79eb2b9cc9cf2e1515582a1bc71f5b4a657437 | [
"MIT"
] | null | null | null | Path_finding_visualization using bfs/bfs.py | arymandeshwal/Path-Finding-Visualization | 9f79eb2b9cc9cf2e1515582a1bc71f5b4a657437 | [
"MIT"
] | null | null | null | import collections
wall, goal = 1, 3
width_g, height_g = 28, 28
| 26.52381 | 107 | 0.479354 | import collections
wall, goal = 1, 3
width_g, height_g = 28, 28
def bfs_algo(grid,start):
queue = collections.deque([[start]])
seen = set([start])
while queue:
path = queue.popleft()
x, y = path[-1]
if grid[x][y] == goal:
return path
for x2, y2 in ((x+1,y), (x-1,y), (x,y+1), (x,y-1)):
if 0 <= x2 < width_g and 0 <= y2 < height_g and grid[x2][y2] != wall and (x2, y2) not in seen:
queue.append(path + [(x2, y2)])
seen.add((x2, y2))
| 457 | 0 | 25 |
7c708ec1bf56a9b05c8cb0427ab9fe67a29e9919 | 17,436 | py | Python | public/Python27/Lib/distutils/command/sdist.py | NingrumFadillah/cekmutasi | 1fccb6cafb874c2a80ece9b71d7c682fd44dbd48 | [
"MIT"
] | 1 | 2020-11-26T18:53:46.000Z | 2020-11-26T18:53:46.000Z | public/Python27/Lib/distutils/command/sdist.py | NingrumFadillah/cekmutasi | 1fccb6cafb874c2a80ece9b71d7c682fd44dbd48 | [
"MIT"
] | null | null | null | public/Python27/Lib/distutils/command/sdist.py | NingrumFadillah/cekmutasi | 1fccb6cafb874c2a80ece9b71d7c682fd44dbd48 | [
"MIT"
] | 3 | 2017-04-07T12:02:22.000Z | 2020-03-23T12:11:55.000Z | """distutils.command.sdist
Implements the Distutils 'sdist' command (create a source distribution)."""
__revision__ = "$Id: sdist.py 81261 2010-05-17 10:54:43Z tarek.ziade $"
import os
import string
import sys
from glob import glob
from warnings import warn
from distutils.core import Command
from distutils import dir_util, dep_util, file_util, archive_util
from distutils.text_file import TextFile
from distutils.errors import (DistutilsPlatformError, DistutilsOptionError,
DistutilsTemplateError)
from distutils.filelist import FileList
from distutils import log
from distutils.util import convert_path
def show_formats():
"""Print all possible values for the 'formats' option (used by
the "--help-formats" command-line option).
"""
from distutils.fancy_getopt import FancyGetopt
from distutils.archive_util import ARCHIVE_FORMATS
formats = []
for format in ARCHIVE_FORMATS.keys():
formats.append(("formats=" + format, None,
ARCHIVE_FORMATS[format][2]))
formats.sort()
FancyGetopt(formats).print_help(
"List of available source distribution formats:")
| 38.832962 | 78 | 0.582014 | """distutils.command.sdist
Implements the Distutils 'sdist' command (create a source distribution)."""
__revision__ = "$Id: sdist.py 81261 2010-05-17 10:54:43Z tarek.ziade $"
import os
import string
import sys
from glob import glob
from warnings import warn
from distutils.core import Command
from distutils import dir_util, dep_util, file_util, archive_util
from distutils.text_file import TextFile
from distutils.errors import (DistutilsPlatformError, DistutilsOptionError,
DistutilsTemplateError)
from distutils.filelist import FileList
from distutils import log
from distutils.util import convert_path
def show_formats():
"""Print all possible values for the 'formats' option (used by
the "--help-formats" command-line option).
"""
from distutils.fancy_getopt import FancyGetopt
from distutils.archive_util import ARCHIVE_FORMATS
formats = []
for format in ARCHIVE_FORMATS.keys():
formats.append(("formats=" + format, None,
ARCHIVE_FORMATS[format][2]))
formats.sort()
FancyGetopt(formats).print_help(
"List of available source distribution formats:")
class sdist(Command):
description = "create a source distribution (tarball, zip file, etc.)"
def checking_metadata(self):
"""Callable used for the check sub-command.
Placed here so user_options can view it"""
return self.metadata_check
user_options = [
('template=', 't',
"name of manifest template file [default: MANIFEST.in]"),
('manifest=', 'm',
"name of manifest file [default: MANIFEST]"),
('use-defaults', None,
"include the default file set in the manifest "
"[default; disable with --no-defaults]"),
('no-defaults', None,
"don't include the default file set"),
('prune', None,
"specifically exclude files/directories that should not be "
"distributed (build tree, RCS/CVS dirs, etc.) "
"[default; disable with --no-prune]"),
('no-prune', None,
"don't automatically exclude anything"),
('manifest-only', 'o',
"just regenerate the manifest and then stop "
"(implies --force-manifest)"),
('force-manifest', 'f',
"forcibly regenerate the manifest and carry on as usual. "
"Deprecated: now the manifest is always regenerated."),
('formats=', None,
"formats for source distribution (comma-separated list)"),
('keep-temp', 'k',
"keep the distribution tree around after creating " +
"archive file(s)"),
('dist-dir=', 'd',
"directory to put the source distribution archive(s) in "
"[default: dist]"),
('medata-check', None,
"Ensure that all required elements of meta-data "
"are supplied. Warn if any missing. [default]"),
('owner=', 'u',
"Owner name used when creating a tar file [default: current user]"),
('group=', 'g',
"Group name used when creating a tar file [default: current group]"),
]
boolean_options = ['use-defaults', 'prune',
'manifest-only', 'force-manifest',
'keep-temp', 'metadata-check']
help_options = [
('help-formats', None,
"list available distribution formats", show_formats),
]
negative_opt = {'no-defaults': 'use-defaults',
'no-prune': 'prune' }
default_format = {'posix': 'gztar',
'nt': 'zip' }
sub_commands = [('check', checking_metadata)]
def initialize_options(self):
# 'template' and 'manifest' are, respectively, the names of
# the manifest template and manifest file.
self.template = None
self.manifest = None
# 'use_defaults': if true, we will include the default file set
# in the manifest
self.use_defaults = 1
self.prune = 1
self.manifest_only = 0
self.force_manifest = 0
self.formats = None
self.keep_temp = 0
self.dist_dir = None
self.archive_files = None
self.metadata_check = 1
self.owner = None
self.group = None
def finalize_options(self):
if self.manifest is None:
self.manifest = "MANIFEST"
if self.template is None:
self.template = "MANIFEST.in"
self.ensure_string_list('formats')
if self.formats is None:
try:
self.formats = [self.default_format[os.name]]
except KeyError:
raise DistutilsPlatformError, \
"don't know how to create source distributions " + \
"on platform %s" % os.name
bad_format = archive_util.check_archive_formats(self.formats)
if bad_format:
raise DistutilsOptionError, \
"unknown archive format '%s'" % bad_format
if self.dist_dir is None:
self.dist_dir = "dist"
def run(self):
# 'filelist' contains the list of files that will make up the
# manifest
self.filelist = FileList()
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
# Do whatever it takes to get the list of files to process
# (process the manifest template, read an existing manifest,
# whatever). File list is accumulated in 'self.filelist'.
self.get_file_list()
# If user just wanted us to regenerate the manifest, stop now.
if self.manifest_only:
return
# Otherwise, go ahead and create the source distribution tarball,
# or zipfile, or whatever.
self.make_distribution()
def check_metadata(self):
"""Deprecated API."""
warn("distutils.command.sdist.check_metadata is deprecated, \
use the check command instead", PendingDeprecationWarning)
check = self.distribution.get_command_obj('check')
check.ensure_finalized()
check.run()
def get_file_list(self):
"""Figure out the list of files to include in the source
distribution, and put it in 'self.filelist'. This might involve
reading the manifest template (and writing the manifest), or just
reading the manifest, or just using the default file set -- it all
depends on the user's options.
"""
# new behavior:
# the file list is recalculated everytime because
# even if MANIFEST.in or setup.py are not changed
# the user might have added some files in the tree that
# need to be included.
#
# This makes --force the default and only behavior.
template_exists = os.path.isfile(self.template)
if not template_exists:
self.warn(("manifest template '%s' does not exist " +
"(using default file list)") %
self.template)
self.filelist.findall()
if self.use_defaults:
self.add_defaults()
if template_exists:
self.read_template()
if self.prune:
self.prune_file_list()
self.filelist.sort()
self.filelist.remove_duplicates()
self.write_manifest()
def add_defaults(self):
"""Add all the default files to self.filelist:
- README or README.txt
- setup.py
- test/test*.py
- all pure Python modules mentioned in setup script
- all files pointed by package_data (build_py)
- all files defined in data_files.
- all files defined as scripts.
- all C sources listed as part of extensions or C libraries
in the setup script (doesn't catch C headers!)
Warns if (README or README.txt) or setup.py are missing; everything
else is optional.
"""
standards = [('README', 'README.txt'), self.distribution.script_name]
for fn in standards:
if isinstance(fn, tuple):
alts = fn
got_it = 0
for fn in alts:
if os.path.exists(fn):
got_it = 1
self.filelist.append(fn)
break
if not got_it:
self.warn("standard file not found: should have one of " +
string.join(alts, ', '))
else:
if os.path.exists(fn):
self.filelist.append(fn)
else:
self.warn("standard file '%s' not found" % fn)
optional = ['test/test*.py', 'setup.cfg']
for pattern in optional:
files = filter(os.path.isfile, glob(pattern))
if files:
self.filelist.extend(files)
# build_py is used to get:
# - python modules
# - files defined in package_data
build_py = self.get_finalized_command('build_py')
# getting python files
if self.distribution.has_pure_modules():
self.filelist.extend(build_py.get_source_files())
# getting package_data files
# (computed in build_py.data_files by build_py.finalize_options)
for pkg, src_dir, build_dir, filenames in build_py.data_files:
for filename in filenames:
self.filelist.append(os.path.join(src_dir, filename))
# getting distribution.data_files
if self.distribution.has_data_files():
for item in self.distribution.data_files:
if isinstance(item, str): # plain file
item = convert_path(item)
if os.path.isfile(item):
self.filelist.append(item)
else: # a (dirname, filenames) tuple
dirname, filenames = item
for f in filenames:
f = convert_path(f)
if os.path.isfile(f):
self.filelist.append(f)
if self.distribution.has_ext_modules():
build_ext = self.get_finalized_command('build_ext')
self.filelist.extend(build_ext.get_source_files())
if self.distribution.has_c_libraries():
build_clib = self.get_finalized_command('build_clib')
self.filelist.extend(build_clib.get_source_files())
if self.distribution.has_scripts():
build_scripts = self.get_finalized_command('build_scripts')
self.filelist.extend(build_scripts.get_source_files())
def read_template(self):
"""Read and parse manifest template file named by self.template.
(usually "MANIFEST.in") The parsing and processing is done by
'self.filelist', which updates itself accordingly.
"""
log.info("reading manifest template '%s'", self.template)
template = TextFile(self.template,
strip_comments=1,
skip_blanks=1,
join_lines=1,
lstrip_ws=1,
rstrip_ws=1,
collapse_join=1)
while 1:
line = template.readline()
if line is None: # end of file
break
try:
self.filelist.process_template_line(line)
except DistutilsTemplateError, msg:
self.warn("%s, line %d: %s" % (template.filename,
template.current_line,
msg))
def prune_file_list(self):
"""Prune off branches that might slip into the file list as created
by 'read_template()', but really don't belong there:
* the build tree (typically "build")
* the release tree itself (only an issue if we ran "sdist"
previously with --keep-temp, or it aborted)
* any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
"""
build = self.get_finalized_command('build')
base_dir = self.distribution.get_fullname()
self.filelist.exclude_pattern(None, prefix=build.build_base)
self.filelist.exclude_pattern(None, prefix=base_dir)
# pruning out vcs directories
# both separators are used under win32
if sys.platform == 'win32':
seps = r'/|\\'
else:
seps = '/'
vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr',
'_darcs']
vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
def write_manifest(self):
"""Write the file list in 'self.filelist' (presumably as filled in
by 'add_defaults()' and 'read_template()') to the manifest file
named by 'self.manifest'.
"""
self.execute(file_util.write_file,
(self.manifest, self.filelist.files),
"writing manifest file '%s'" % self.manifest)
def read_manifest(self):
"""Read the manifest file (named by 'self.manifest') and use it to
fill in 'self.filelist', the list of files to include in the source
distribution.
"""
log.info("reading manifest file '%s'", self.manifest)
manifest = open(self.manifest)
while 1:
line = manifest.readline()
if line == '': # end of file
break
if line[-1] == '\n':
line = line[0:-1]
self.filelist.append(line)
manifest.close()
def make_release_tree(self, base_dir, files):
"""Create the directory tree that will become the source
distribution archive. All directories implied by the filenames in
'files' are created under 'base_dir', and then we hard link or copy
(if hard linking is unavailable) those files into place.
Essentially, this duplicates the developer's source tree, but in a
directory named after the distribution, containing only the files
to be distributed.
"""
# Create all the directories under 'base_dir' necessary to
# put 'files' there; the 'mkpath()' is just so we don't die
# if the manifest happens to be empty.
self.mkpath(base_dir)
dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
# And walk over the list of files, either making a hard link (if
# os.link exists) to each one that doesn't already exist in its
# corresponding location under 'base_dir', or copying each file
# that's out-of-date in 'base_dir'. (Usually, all files will be
# out-of-date, because by default we blow away 'base_dir' when
# we're done making the distribution archives.)
if hasattr(os, 'link'): # can make hard links on this system
link = 'hard'
msg = "making hard links in %s..." % base_dir
else: # nope, have to copy
link = None
msg = "copying files to %s..." % base_dir
if not files:
log.warn("no files to distribute -- empty manifest?")
else:
log.info(msg)
for file in files:
if not os.path.isfile(file):
log.warn("'%s' not a regular file -- skipping" % file)
else:
dest = os.path.join(base_dir, file)
self.copy_file(file, dest, link=link)
self.distribution.metadata.write_pkg_info(base_dir)
def make_distribution(self):
"""Create the source distribution(s). First, we create the release
tree with 'make_release_tree()'; then, we create all required
archive files (according to 'self.formats') from the release tree.
Finally, we clean up by blowing away the release tree (unless
'self.keep_temp' is true). The list of archive files created is
stored so it can be retrieved later by 'get_archive_files()'.
"""
# Don't warn about missing meta-data here -- should be (and is!)
# done elsewhere.
base_dir = self.distribution.get_fullname()
base_name = os.path.join(self.dist_dir, base_dir)
self.make_release_tree(base_dir, self.filelist.files)
archive_files = [] # remember names of files we create
# tar archive must be created last to avoid overwrite and remove
if 'tar' in self.formats:
self.formats.append(self.formats.pop(self.formats.index('tar')))
for fmt in self.formats:
file = self.make_archive(base_name, fmt, base_dir=base_dir,
owner=self.owner, group=self.group)
archive_files.append(file)
self.distribution.dist_files.append(('sdist', '', file))
self.archive_files = archive_files
if not self.keep_temp:
dir_util.remove_tree(base_dir, dry_run=self.dry_run)
def get_archive_files(self):
"""Return the list of archive files created when the command
was run, or None if the command hasn't run yet.
"""
return self.archive_files
| 2,115 | 14,132 | 23 |
5294b56b586f084d81b078c81b47f955787e0b01 | 136 | py | Python | h5py/tests/types/__init__.py | qsnake/h5py | 45e77c3798032de2f740414a9e014fbca8c0ac18 | [
"BSD-3-Clause"
] | null | null | null | h5py/tests/types/__init__.py | qsnake/h5py | 45e77c3798032de2f740414a9e014fbca8c0ac18 | [
"BSD-3-Clause"
] | null | null | null | h5py/tests/types/__init__.py | qsnake/h5py | 45e77c3798032de2f740414a9e014fbca8c0ac18 | [
"BSD-3-Clause"
] | 8 | 2018-07-05T22:16:08.000Z | 2021-08-19T06:07:45.000Z |
"""
Type and data-conversion test package.
Tests the following:
1) HDF5 to NumPy type mapping
2) Data conversion
"""
| 13.6 | 42 | 0.639706 |
"""
Type and data-conversion test package.
Tests the following:
1) HDF5 to NumPy type mapping
2) Data conversion
"""
| 0 | 0 | 0 |
7b4436910fc762d59fcccf6a3d6b95104887537f | 123 | py | Python | Code/Python2.7/10one-n.py | nicholasz2510/General | e2783cad4da7f9b50c952c2b91ef311d22b1d56f | [
"MIT"
] | 1 | 2019-11-21T15:56:03.000Z | 2019-11-21T15:56:03.000Z | Code/Python2.7/10one-n.py | nicholasz2510/General | e2783cad4da7f9b50c952c2b91ef311d22b1d56f | [
"MIT"
] | 12 | 2019-11-21T21:00:57.000Z | 2022-02-27T01:46:56.000Z | Code/Python2.7/10one-n.py | nicholasz2510/General | e2783cad4da7f9b50c952c2b91ef311d22b1d56f | [
"MIT"
] | 1 | 2019-11-21T20:49:18.000Z | 2019-11-21T20:49:18.000Z | n = int(raw_input("What would you like to use for n? "))
answer = 0
for x in range(1, n+1):
answer += x
print answer
| 15.375 | 56 | 0.626016 | n = int(raw_input("What would you like to use for n? "))
answer = 0
for x in range(1, n+1):
answer += x
print answer
| 0 | 0 | 0 |
273ad74acb2645b250bf948830ca59ebaea78cb2 | 2,770 | py | Python | picture.py | Jacksonlakehhs/Picture | 6a9ceefe659c17034b9a0b8c9e984d1a38fc3dd1 | [
"MIT"
] | null | null | null | picture.py | Jacksonlakehhs/Picture | 6a9ceefe659c17034b9a0b8c9e984d1a38fc3dd1 | [
"MIT"
] | null | null | null | picture.py | Jacksonlakehhs/Picture | 6a9ceefe659c17034b9a0b8c9e984d1a38fc3dd1 | [
"MIT"
] | null | null | null | """
picture.py
Author: Jackson Lake
Credit: HHS page Github Tutorial
Assignment:
Use the ggame library to "paint" a graphical picture of something (e.g. a house, a face or landscape).
Use at least:
1. Three different Color objects.
2. Ten different Sprite objects.
3. One (or more) RectangleAsset objects.
4. One (or more) CircleAsset objects.
5. One (or more) EllipseAsset objects.
6. One (or more) PolygonAsset objects.
See:
https://github.com/HHS-IntroProgramming/Standards-and-Syllabus/wiki/Displaying-Graphics
for general information on how to use ggame.
See:
http://brythonserver.github.io/ggame/
for detailed information on ggame.
"""
from ggame import App, Color, LineStyle, Sprite, RectangleAsset, CircleAsset, EllipseAsset, PolygonAsset
# add your code here \/ \/ \/
from ggame import App, Color, LineStyle, Sprite
from ggame import RectangleAsset, CircleAsset, EllipseAsset, PolygonAsset
red = Color(0xff0000, 1.0)
green = Color(0x00ff00, 1.0)
blue = Color(0x0000ff, 1.0)
black = Color(0x000000, 1.0)
grey = Color(0xCDC0B0, 1.0)
firebrick1 = Color(0xFF3030, 1.0)
purple = Color(0xBF3EFF, 1.0)
gold = Color(0xFFD700, 1.0)
fade1 = Color(0xff0000, 0.6)
fade2 = Color(0xff0000, 0.4)
fade3 = Color(0xff0000, 0.2)
white = Color(0xF8F8FF, 1.0)
violet = Color(0xd147c5, 1.0)
thinline = LineStyle(1, black)
thinner = LineStyle(.4, red)
head = RectangleAsset(120, 100, thinline, grey)
neck = RectangleAsset(40, 28, thinline, grey)
body = RectangleAsset(200, 200, thinline, grey)
leg1 = RectangleAsset(45, 90, thinline, grey)
leg2 = RectangleAsset(45, 90, thinline, grey)
eye1 = CircleAsset(15, thinline, firebrick1)
eye2 = CircleAsset(15, thinline, firebrick1)
shoulder1 = CircleAsset(20, thinline, grey)
shoulder2 = CircleAsset(20, thinline, grey)
arm1 = RectangleAsset(100, 40, thinline, grey)
arm2 = RectangleAsset(100, 40, thinline, grey)
antenna = EllipseAsset(5, 40, thinline, purple)
mouth = EllipseAsset(30, 8, thinline, gold)
lip = RectangleAsset(59, 1, thinline, black)
wave1 = CircleAsset(10, thinner, fade1)
wave2 = CircleAsset(25, thinner, fade2)
wave3 = CircleAsset(42, thinner, fade3)
emblem = CircleAsset(37, thinline)
design = PolygonAsset([(0,0), (20, 50), (40,0)], thinline, violet)
Sprite(antenna, (485, 65))
Sprite(head, (432, 100))
Sprite(neck, (470, 200))
Sprite(body, (400, 228))
Sprite(leg1, (400, 428))
Sprite(leg2, (555, 428))
Sprite(eye1, (440, 115))
Sprite(eye2, (510, 115))
Sprite(arm1, (600, 228))
Sprite(arm2, (300, 228))
Sprite(shoulder1, (580, 228))
Sprite(shoulder2, (380, 228))
Sprite(mouth, (460, 165))
Sprite(lip, (460, 173))
Sprite(wave1, (480, 60))
Sprite(wave2, (465, 43))
Sprite(wave3, (447, 26))
Sprite(emblem, (465, 260))
Sprite(design, (480, 275))
# add your code here /\ /\ /\
myapp = App()
myapp.run()
| 28.556701 | 104 | 0.720939 | """
picture.py
Author: Jackson Lake
Credit: HHS page Github Tutorial
Assignment:
Use the ggame library to "paint" a graphical picture of something (e.g. a house, a face or landscape).
Use at least:
1. Three different Color objects.
2. Ten different Sprite objects.
3. One (or more) RectangleAsset objects.
4. One (or more) CircleAsset objects.
5. One (or more) EllipseAsset objects.
6. One (or more) PolygonAsset objects.
See:
https://github.com/HHS-IntroProgramming/Standards-and-Syllabus/wiki/Displaying-Graphics
for general information on how to use ggame.
See:
http://brythonserver.github.io/ggame/
for detailed information on ggame.
"""
from ggame import App, Color, LineStyle, Sprite, RectangleAsset, CircleAsset, EllipseAsset, PolygonAsset
# add your code here \/ \/ \/
from ggame import App, Color, LineStyle, Sprite
from ggame import RectangleAsset, CircleAsset, EllipseAsset, PolygonAsset
red = Color(0xff0000, 1.0)
green = Color(0x00ff00, 1.0)
blue = Color(0x0000ff, 1.0)
black = Color(0x000000, 1.0)
grey = Color(0xCDC0B0, 1.0)
firebrick1 = Color(0xFF3030, 1.0)
purple = Color(0xBF3EFF, 1.0)
gold = Color(0xFFD700, 1.0)
fade1 = Color(0xff0000, 0.6)
fade2 = Color(0xff0000, 0.4)
fade3 = Color(0xff0000, 0.2)
white = Color(0xF8F8FF, 1.0)
violet = Color(0xd147c5, 1.0)
thinline = LineStyle(1, black)
thinner = LineStyle(.4, red)
head = RectangleAsset(120, 100, thinline, grey)
neck = RectangleAsset(40, 28, thinline, grey)
body = RectangleAsset(200, 200, thinline, grey)
leg1 = RectangleAsset(45, 90, thinline, grey)
leg2 = RectangleAsset(45, 90, thinline, grey)
eye1 = CircleAsset(15, thinline, firebrick1)
eye2 = CircleAsset(15, thinline, firebrick1)
shoulder1 = CircleAsset(20, thinline, grey)
shoulder2 = CircleAsset(20, thinline, grey)
arm1 = RectangleAsset(100, 40, thinline, grey)
arm2 = RectangleAsset(100, 40, thinline, grey)
antenna = EllipseAsset(5, 40, thinline, purple)
mouth = EllipseAsset(30, 8, thinline, gold)
lip = RectangleAsset(59, 1, thinline, black)
wave1 = CircleAsset(10, thinner, fade1)
wave2 = CircleAsset(25, thinner, fade2)
wave3 = CircleAsset(42, thinner, fade3)
emblem = CircleAsset(37, thinline)
design = PolygonAsset([(0,0), (20, 50), (40,0)], thinline, violet)
Sprite(antenna, (485, 65))
Sprite(head, (432, 100))
Sprite(neck, (470, 200))
Sprite(body, (400, 228))
Sprite(leg1, (400, 428))
Sprite(leg2, (555, 428))
Sprite(eye1, (440, 115))
Sprite(eye2, (510, 115))
Sprite(arm1, (600, 228))
Sprite(arm2, (300, 228))
Sprite(shoulder1, (580, 228))
Sprite(shoulder2, (380, 228))
Sprite(mouth, (460, 165))
Sprite(lip, (460, 173))
Sprite(wave1, (480, 60))
Sprite(wave2, (465, 43))
Sprite(wave3, (447, 26))
Sprite(emblem, (465, 260))
Sprite(design, (480, 275))
# add your code here /\ /\ /\
myapp = App()
myapp.run()
| 0 | 0 | 0 |
ed66809bed2e3426e2ba9136f081886e839ddd02 | 928 | py | Python | project/migrations/0012_auto_20190505_1442.py | abhishekm47/restaurant-django | 56993f2269e27c9b932b5f172cdf4db1e95292aa | [
"MIT"
] | null | null | null | project/migrations/0012_auto_20190505_1442.py | abhishekm47/restaurant-django | 56993f2269e27c9b932b5f172cdf4db1e95292aa | [
"MIT"
] | null | null | null | project/migrations/0012_auto_20190505_1442.py | abhishekm47/restaurant-django | 56993f2269e27c9b932b5f172cdf4db1e95292aa | [
"MIT"
] | null | null | null | # Generated by Django 2.0 on 2019-05-05 09:12
import datetime
from django.db import migrations, models
| 30.933333 | 141 | 0.59375 | # Generated by Django 2.0 on 2019-05-05 09:12
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('project', '0011_auto_20190425_1037'),
]
operations = [
migrations.AlterField(
model_name='menuitem',
name='status',
field=models.CharField(choices=[('available', 'available'), ('unavailable', 'unavailable')], default='available', max_length=22),
),
migrations.AlterField(
model_name='menuitem',
name='time',
field=models.TimeField(default=datetime.datetime(2019, 5, 5, 9, 12, 53, 461417)),
),
migrations.AlterField(
model_name='reservation',
name='status',
field=models.CharField(choices=[('pending', 'pending'), ('confirmed', 'confirmed')], default='pending', max_length=22),
),
]
| 0 | 800 | 23 |
918812d7ca13841d10ea04118a8e43bbf43c95de | 391 | py | Python | src/genie/libs/parser/iosxe/tests/ShowIpDhcpDatabase/cli/equal/golden1_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/iosxe/tests/ShowIpDhcpDatabase/cli/equal/golden1_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/iosxe/tests/ShowIpDhcpDatabase/cli/equal/golden1_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z | expected_output = {
"url": {
"ftp://user:password@172.16.4.253/router-dhcp": {
"read": "Dec 01 1997 12:01 AM",
"written": "Never",
"status": "Last read succeeded. Bindings have been loaded in RAM.",
"delay_in_secs": 300,
"timeout_in_secs": 300,
"failures": 0,
"successes": 1,
}
}
}
| 26.066667 | 79 | 0.475703 | expected_output = {
"url": {
"ftp://user:password@172.16.4.253/router-dhcp": {
"read": "Dec 01 1997 12:01 AM",
"written": "Never",
"status": "Last read succeeded. Bindings have been loaded in RAM.",
"delay_in_secs": 300,
"timeout_in_secs": 300,
"failures": 0,
"successes": 1,
}
}
}
| 0 | 0 | 0 |
042f2035facd42e8d86e6aafc24339ffb88e9749 | 3,656 | py | Python | src/genie/libs/parser/iosxe/tests/ShowVlan/cli/equal/golden_output_vlan_2_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/iosxe/tests/ShowVlan/cli/equal/golden_output_vlan_2_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/iosxe/tests/ShowVlan/cli/equal/golden_output_vlan_2_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z | expected_output = {
"vlans": {
"1": {
"vlan_id": "1",
"name": "default",
"state": "active",
"shutdown": False,
"mtu": 1500,
"said": 100001,
"trans1": 0,
"trans2": 0,
"type": "enet",
"interfaces": [
"GigabitEthernet1/0/1",
"GigabitEthernet1/0/2",
"GigabitEthernet1/0/3",
"GigabitEthernet1/0/5",
"GigabitEthernet1/0/6",
"GigabitEthernet1/0/12",
"GigabitEthernet1/0/13",
"GigabitEthernet1/0/14",
"GigabitEthernet1/0/15",
"GigabitEthernet1/0/16",
"GigabitEthernet1/0/17",
"GigabitEthernet1/0/18",
"GigabitEthernet1/0/19",
"GigabitEthernet1/0/20",
"GigabitEthernet1/0/21",
"GigabitEthernet1/0/22",
],
},
"2": {
"vlan_id": "2",
"name": "VLAN_0002",
"state": "active",
"shutdown": False,
"private_vlan": {"primary": True, "association": ["301", "302"]},
},
"301": {
"private_vlan": {
"primary": False,
"type": "community",
"ports": ["FastEthernet5/3", "FastEthernet5/25"],
}
},
"302": {"private_vlan": {"primary": False, "type": "community"}},
"10": {"private_vlan": {"primary": False, "type": "community"}},
"20": {
"vlan_id": "20",
"name": "VLAN-0020",
"shutdown": False,
"state": "active",
"remote_span_vlan": True,
"private_vlan": {"primary": True, "association": ["105"]},
},
"21": {"remote_span_vlan": True},
"24": {"remote_span_vlan": True},
"25": {"remote_span_vlan": True},
"26": {"remote_span_vlan": True},
"27": {"remote_span_vlan": True},
"105": {"private_vlan": {"primary": False, "type": "isolated"}},
"100": {
"vlan_id": "100",
"name": "V100",
"state": "suspend",
"shutdown": False,
"mtu": 1500,
"said": 100100,
"trans1": 0,
"trans2": 0,
"type": "enet",
"private_vlan": {"primary": True, "association": ["151"]},
},
"151": {"private_vlan": {"primary": False, "type": "non-operational"}},
"202": {"private_vlan": {"primary": False, "type": "community"}},
"303": {"private_vlan": {"primary": False, "type": "community"}},
"101": {
"vlan_id": "101",
"shutdown": False,
"name": "VLAN-0101",
"state": "active",
"mtu": 1500,
"said": 100101,
"trans1": 0,
"trans2": 0,
"type": "enet",
"private_vlan": {"primary": True, "association": ["402"]},
},
"402": {"private_vlan": {"primary": False, "type": "non-operational"}},
"102": {
"vlan_id": "102",
"shutdown": False,
"name": "VLAN_0102",
"state": "active",
"remote_span_vlan": True,
},
"103": {
"vlan_id": "103",
"shutdown": False,
"name": "VLAN-0103",
"state": "unsupport",
},
"104": {
"vlan_id": "104",
"name": "VLAN_0104",
"state": "shutdown",
"shutdown": True,
},
}
}
| 32.936937 | 79 | 0.408643 | expected_output = {
"vlans": {
"1": {
"vlan_id": "1",
"name": "default",
"state": "active",
"shutdown": False,
"mtu": 1500,
"said": 100001,
"trans1": 0,
"trans2": 0,
"type": "enet",
"interfaces": [
"GigabitEthernet1/0/1",
"GigabitEthernet1/0/2",
"GigabitEthernet1/0/3",
"GigabitEthernet1/0/5",
"GigabitEthernet1/0/6",
"GigabitEthernet1/0/12",
"GigabitEthernet1/0/13",
"GigabitEthernet1/0/14",
"GigabitEthernet1/0/15",
"GigabitEthernet1/0/16",
"GigabitEthernet1/0/17",
"GigabitEthernet1/0/18",
"GigabitEthernet1/0/19",
"GigabitEthernet1/0/20",
"GigabitEthernet1/0/21",
"GigabitEthernet1/0/22",
],
},
"2": {
"vlan_id": "2",
"name": "VLAN_0002",
"state": "active",
"shutdown": False,
"private_vlan": {"primary": True, "association": ["301", "302"]},
},
"301": {
"private_vlan": {
"primary": False,
"type": "community",
"ports": ["FastEthernet5/3", "FastEthernet5/25"],
}
},
"302": {"private_vlan": {"primary": False, "type": "community"}},
"10": {"private_vlan": {"primary": False, "type": "community"}},
"20": {
"vlan_id": "20",
"name": "VLAN-0020",
"shutdown": False,
"state": "active",
"remote_span_vlan": True,
"private_vlan": {"primary": True, "association": ["105"]},
},
"21": {"remote_span_vlan": True},
"24": {"remote_span_vlan": True},
"25": {"remote_span_vlan": True},
"26": {"remote_span_vlan": True},
"27": {"remote_span_vlan": True},
"105": {"private_vlan": {"primary": False, "type": "isolated"}},
"100": {
"vlan_id": "100",
"name": "V100",
"state": "suspend",
"shutdown": False,
"mtu": 1500,
"said": 100100,
"trans1": 0,
"trans2": 0,
"type": "enet",
"private_vlan": {"primary": True, "association": ["151"]},
},
"151": {"private_vlan": {"primary": False, "type": "non-operational"}},
"202": {"private_vlan": {"primary": False, "type": "community"}},
"303": {"private_vlan": {"primary": False, "type": "community"}},
"101": {
"vlan_id": "101",
"shutdown": False,
"name": "VLAN-0101",
"state": "active",
"mtu": 1500,
"said": 100101,
"trans1": 0,
"trans2": 0,
"type": "enet",
"private_vlan": {"primary": True, "association": ["402"]},
},
"402": {"private_vlan": {"primary": False, "type": "non-operational"}},
"102": {
"vlan_id": "102",
"shutdown": False,
"name": "VLAN_0102",
"state": "active",
"remote_span_vlan": True,
},
"103": {
"vlan_id": "103",
"shutdown": False,
"name": "VLAN-0103",
"state": "unsupport",
},
"104": {
"vlan_id": "104",
"name": "VLAN_0104",
"state": "shutdown",
"shutdown": True,
},
}
}
| 0 | 0 | 0 |
3ca6332215ff1cc553d4ed9016eacd95b07d1418 | 646 | py | Python | tests/test_reducers.py | jhnnsrs/xarray-multiscale | cb4e08bc21db9cfaae5aa096683c91d40acd79c0 | [
"BSD-3-Clause"
] | null | null | null | tests/test_reducers.py | jhnnsrs/xarray-multiscale | cb4e08bc21db9cfaae5aa096683c91d40acd79c0 | [
"BSD-3-Clause"
] | null | null | null | tests/test_reducers.py | jhnnsrs/xarray-multiscale | cb4e08bc21db9cfaae5aa096683c91d40acd79c0 | [
"BSD-3-Clause"
] | null | null | null | from xarray_multiscale.reducers import windowed_mean, windowed_mode
import numpy as np
| 32.3 | 67 | 0.650155 | from xarray_multiscale.reducers import windowed_mean, windowed_mode
import numpy as np
def test_windowed_mode():
data = np.arange(16) % 3 + np.arange(16) % 2
answer = np.array([2, 0, 1, 2])
results = windowed_mode(data, (4,))
assert np.array_equal(results, answer)
data = np.arange(16).reshape(4,4) % 3
answer = np.array([[1,0],[0,2]])
results = windowed_mode(data, (2,2))
assert np.array_equal(results, answer)
def test_windowed_mean():
data = np.arange(16).reshape(4,4) % 2
answer = np.array([[0.5, 0.5],[0.5, 0.5]])
results = windowed_mean(data, (2,2))
assert np.array_equal(results, answer) | 513 | 0 | 46 |
296a16133fba9a356c755d1fd44a8d92f0ae6e1f | 8,210 | py | Python | scraping.py | seeeturtle/forest-watcher | e63d05ba35b66c7d5de2e6edb4d1562c4513dfff | [
"MIT"
] | null | null | null | scraping.py | seeeturtle/forest-watcher | e63d05ba35b66c7d5de2e6edb4d1562c4513dfff | [
"MIT"
] | null | null | null | scraping.py | seeeturtle/forest-watcher | e63d05ba35b66c7d5de2e6edb4d1562c4513dfff | [
"MIT"
] | null | null | null | import functools
import json
import operator
import os
from collections import Counter, namedtuple
from multiprocessing import Pool, cpu_count
from pprint import pprint
import requests
import xlrd
import xmltodict
from tqdm import tqdm
_alb_fields = [
"title",
"link",
"author",
"pubDate",
"description",
"creator",
"isbn",
"isbn13",
"itemId",
"priceSales",
"priceStandard",
"stockStatus",
"mileage",
"cover",
"categoryId",
"categoryName",
"publisher",
"customerReviewRank",
"salesPoint",
"first_category",
"second_category",
]
AladinBook = namedtuple(
"AladinBook",
_alb_fields,
)
# ['version', 'title', 'link', 'pubDate', 'imageUrl', 'totalResults', 'startIndex', 'itemsPerPage', 'query', 'searchCategoryId', 'searchCategoryName', 'item']
# print(repr(aladin_from_isbn13('')))
# print(aladin_from_isbn13(0)["item"][0].keys())
# pprint(AladinBook(**aladin_from_isbn13(0)["item"][0]))
CATEGORIES = aladin_categories() # {CID: (CNAME, 1thCID, 2thCID)}
# print(len(CATEGORIES))
LibraryBook = namedtuple(
"LibraryBook",
[
"no",
"ranking",
"bookname",
"authors",
"publisher",
"publication_year",
"isbn13",
"addition_symbol",
"vol",
"class_no",
"loan_count",
"bookImageURL",
],
)
PAGE_SIZE = 100
# def library_high_school(n):
# # 가장 인기많은 순서대로 n개를 가져온다.
# params = {
# "authKey": "API KEY",
# "from_age": 17,
# "to_age": 19,
# "format": "json",
# }
# res = []
# page_num = 1
# cont = True
# while cont:
# params["pageNo"] = page_num
# r = requests.get("http://data4library.kr/api/loanItemSrch", params=params)
# try:
# ds = r.json()["response"]["docs"]
# res.extend(ds)
# except:
# cont = False
# if len(res) >= n:
# cont = False
# page_num += 1
# print(r.json()["response"]["resultNum"])
# return [LibraryBook(**d["doc"]) for d in res[:n]]
# 동시성 사용 버전의 알라딘
# def library_to_aladin(lbs):
# with Pool(cpu_count()) as p:
# chuncksize = int(len(lbs)/cpu_count() + 0.5) # 반올림
# it = p.imap(aladin_from_isbn13, [lb.isbn13 for lb in lbs], chuncksize)
# r = []
# for x in it:
# if isinstance(x, AladinBook):
# r.append(x)
# return r
LIST_SIZE = 50 # 리스트 요청 시 페이지당 아이ㅣ템의 개수
# QUERY TYPES : ["ItemNewAll", "ItemNewSpecial"]
| 23.25779 | 158 | 0.568088 | import functools
import json
import operator
import os
from collections import Counter, namedtuple
from multiprocessing import Pool, cpu_count
from pprint import pprint
import requests
import xlrd
import xmltodict
from tqdm import tqdm
_alb_fields = [
"title",
"link",
"author",
"pubDate",
"description",
"creator",
"isbn",
"isbn13",
"itemId",
"priceSales",
"priceStandard",
"stockStatus",
"mileage",
"cover",
"categoryId",
"categoryName",
"publisher",
"customerReviewRank",
"salesPoint",
"first_category",
"second_category",
]
AladinBook = namedtuple(
"AladinBook",
_alb_fields,
)
def new_alb(**kwargs):
base = {f: None for f in _alb_fields}
base.update(kwargs)
c = CATEGORIES[base["categoryId"]]
base["first_category"] = c[1]
base["second_category"] = c[2]
return AladinBook(**base)
def new_alb_from_xml(item):
item["itemId"] = item.pop("@itemId")
removed_fields = set(item.keys()) - set(_alb_fields)
for f in removed_fields:
del item[f]
for k, v in item.items():
if isinstance(v, str) and v.isdigit():
item[k] = int(v)
c = CATEGORIES[item["categoryId"]]
item["first_category"] = c[1]
item["second_category"] = c[2]
return new_alb(**item)
# ['version', 'title', 'link', 'pubDate', 'imageUrl', 'totalResults', 'startIndex', 'itemsPerPage', 'query', 'searchCategoryId', 'searchCategoryName', 'item']
def aladin_from_isbn13(isbn13):
# 만약에 문자열중에서 isdigit을 만족하는 것이 있다면 int type 으로 변환시킨다.
params = {
"TTBKey": os.environ.get("ALADIN_API_KEY"),
"ItemId": isbn13,
"ItemIdType": "ISBN13",
"Output": "XML",
}
r = requests.get("http://www.aladin.co.kr/ttb/api/ItemLookUp.aspx", params=params)
# try:
# print("strange:")
# print(r.text.replace(";", "")[1326])
# except:
# pass
try:
# j = json.loads(r.text.replace(";", "").replace(r"\'", "'"), strict=False)
x = xmltodict.parse(r.text)
item = x["object"]["item"]
item = new_alb_from_xml(item)
except:
if "error" in x:
print(f"\nisbn13: {isbn13}\nerror:\n{x}\n")
else:
with open("personal/error-xml.xml", "w") as f:
f.write(r.text)
raise
try:
return item
except:
return None
# print(repr(aladin_from_isbn13('')))
# print(aladin_from_isbn13(0)["item"][0].keys())
# pprint(AladinBook(**aladin_from_isbn13(0)["item"][0]))
def aladin_categories():
wb = xlrd.open_workbook("~/download/aladin_Category_CID_20200626.xls")
ws = wb.sheet_by_index(0)
c = {}
for r in range(ws.nrows):
row = list(ws.row_values(r))
c.update({int(row[0]): (row[1], row[3], row[4])})
return c
CATEGORIES = aladin_categories() # {CID: (CNAME, 1thCID, 2thCID)}
# print(len(CATEGORIES))
LibraryBook = namedtuple(
"LibraryBook",
[
"no",
"ranking",
"bookname",
"authors",
"publisher",
"publication_year",
"isbn13",
"addition_symbol",
"vol",
"class_no",
"loan_count",
"bookImageURL",
],
)
PAGE_SIZE = 100
def a_library_high_school(page):
# 주어진 페이저 번호의 대출 도서 목록을 가져온다.
params = {
"authKey": os.environ.get("LIBRARY_API_KEY"),
"from_age": 17,
"to_age": 19,
"format": "json",
"pageNo": page,
"pageSize": PAGE_SIZE,
}
r = requests.get("http://data4library.kr/api/loanItemSrch", params=params)
try:
ds = r.json()["response"]["docs"]
except:
return []
return [LibraryBook(**d["doc"]) for d in ds]
def library_high_school(n):
if n <= PAGE_SIZE:
return a_library_high_school(1)[:n] # 한페이지만 가져올 거면 그냥 풀 만드는 것보다 직접하는 게 더 빠른듯.
else:
whole_requests = int(n / PAGE_SIZE) + (n % PAGE_SIZE > 0) # 올림
print(whole_requests)
with Pool(cpu_count()) as p:
res = p.imap(a_library_high_school, tqdm(range(1, whole_requests + 1)))
res = functools.reduce(operator.concat, res)[:n]
return res
# def library_high_school(n):
# # 가장 인기많은 순서대로 n개를 가져온다.
# params = {
# "authKey": "API KEY",
# "from_age": 17,
# "to_age": 19,
# "format": "json",
# }
# res = []
# page_num = 1
# cont = True
# while cont:
# params["pageNo"] = page_num
# r = requests.get("http://data4library.kr/api/loanItemSrch", params=params)
# try:
# ds = r.json()["response"]["docs"]
# res.extend(ds)
# except:
# cont = False
# if len(res) >= n:
# cont = False
# page_num += 1
# print(r.json()["response"]["resultNum"])
# return [LibraryBook(**d["doc"]) for d in res[:n]]
def library_to_aladin(lbs):
# 실제로 받는 건 원래 있는 것보다 더 적을 수 도 있다.
# TODO
return [
x
for x in tqdm(map(aladin_from_isbn13, [lb.isbn13 for lb in lbs]))
if isinstance(x, AladinBook)
]
# 동시성 사용 버전의 알라딘
# def library_to_aladin(lbs):
# with Pool(cpu_count()) as p:
# chuncksize = int(len(lbs)/cpu_count() + 0.5) # 반올림
# it = p.imap(aladin_from_isbn13, [lb.isbn13 for lb in lbs], chuncksize)
# r = []
# for x in it:
# if isinstance(x, AladinBook):
# r.append(x)
# return r
def most_popular_category(albs):
x = []
for b in albs:
try:
x.append(
CATEGORIES[b.categoryId][1:] + (b.categoryId,)
) # [C1NAME, C2NAME, CID]
except KeyError:
print(f"error aladin book:\n{b}\nerror category:{b.categoryId}\n")
# pass
# pprint(x)
c = Counter(x)
return c.most_common()
LIST_SIZE = 50 # 리스트 요청 시 페이지당 아이ㅣ템의 개수
# QUERY TYPES : ["ItemNewAll", "ItemNewSpecial"]
def albs_list(category_id, qtype):
# qtype은 알라딘 API 메뉴얼 참고
# 정확히 몇개를 돌려줄지는 모름
n = total_albs_list(category_id, qtype)
if n <= LIST_SIZE:
return a_albs_list(category_id, qtype, 1)
whole_requests = int(n / LIST_SIZE) + (n % LIST_SIZE > 0) # 올림
with Pool(cpu_count()) as p:
res = tqdm(
p.imap(a_albs_list, range(1, whole_requests + 1)), total=len(whole_requests)
)
res = functools.reduce(operator.concat, res)
return res
def total_albs_list(category_id, qtype):
params = {
"TTBKey": os.environ.get("ALADIN_API_KEY"),
"QueryType": qtype,
"Version": "20131101",
"SearchTarget": "Book",
"Start": 1,
"MaxResults": LIST_SIZE,
"CategoryId": category_id,
"Output": "XML",
}
res = requests.get("http://www.aladin.co.kr/ttb/api/ItemList.aspx", params=params)
try:
x = xmltodict.parse(res.text)
total = int(x["object"]["totalResults"])
except:
raise
with open("personal/error-xml.xml", "w") as f:
f.write(res.text)
return total
def a_albs_list(category_id, qtype, page):
params = {
"TTBKey": os.environ.get("ALADIN_API_KEY"),
"QueryType": qtype,
"Version": "20131101",
"SearchTarget": "Book",
"Start": page,
"MaxResults": LIST_SIZE,
"CategoryId": category_id,
"Output": "XML",
"Cover": "Big", # 너비 200px 크기
}
res = requests.get("http://www.aladin.co.kr/ttb/api/ItemList.aspx", params=params)
try:
x = xmltodict.parse(res.text)
items = x["object"]["item"]
except:
return []
r = []
for i in items:
try:
r.append(new_alb_from_xml(i))
except:
raise
pass
return r
def main():
from pymongo import MongoClient
client = MongoClient(os.environ.get("MONGO_CLIENT"))
db = client.forest_watcher_dev
# lbs = library_high_school(500)
# albs = library_to_aladin(lbs)
# cs = most_popular_category(albs)
# categories = db.categories
# categories.insert_many(
# {"first_category": x[0], "second_category": x[1], "category_id": x[2]}
# for x in cs
# )
# items = db.items
# items.insert_many()
| 5,712 | 0 | 276 |
ec98f45f0212d5f968676401cc893d2382082466 | 1,122 | py | Python | tests/test_parse_site.py | e7andy/simple-podcast-dl | b3353b981a6999a01abd6781c161a16d4d3dd898 | [
"MIT"
] | 48 | 2018-10-18T20:21:34.000Z | 2021-10-06T02:30:37.000Z | tests/test_parse_site.py | e7andy/simple-podcast-dl | b3353b981a6999a01abd6781c161a16d4d3dd898 | [
"MIT"
] | 22 | 2018-10-05T20:21:21.000Z | 2021-04-23T07:05:35.000Z | tests/test_parse_site.py | e7andy/simple-podcast-dl | b3353b981a6999a01abd6781c161a16d4d3dd898 | [
"MIT"
] | 2 | 2020-12-06T09:32:31.000Z | 2021-04-23T06:57:59.000Z | from podcast_dl.site_parser import parse_site, InvalidSite
import pytest
@pytest.mark.parametrize(
"site,name",
(
("http://talkpython.fm", "talkpython"),
("https://talkpython.fm", "talkpython"),
("http://pythonbytes.fm", "pythonbytes"),
("https://pythonbytes.fm", "pythonbytes"),
("https://talkpython.fm/episodes/rss", "talkpython"),
("https://changelog.com/podcast/", "changelog"),
("talkpython", "talkpython"),
("pythonbytes", "pythonbytes"),
("talkpython.fm", "talkpython"),
("www.talkpython.fm", "talkpython"),
("https://www.podcastinit.com/feed/mp3/", "podcastinit"),
("www.podcastinit.com/feed/mp3/", "podcastinit"),
),
)
| 33 | 98 | 0.647059 | from podcast_dl.site_parser import parse_site, InvalidSite
import pytest
@pytest.mark.parametrize(
"site,name",
(
("http://talkpython.fm", "talkpython"),
("https://talkpython.fm", "talkpython"),
("http://pythonbytes.fm", "pythonbytes"),
("https://pythonbytes.fm", "pythonbytes"),
("https://talkpython.fm/episodes/rss", "talkpython"),
("https://changelog.com/podcast/", "changelog"),
("talkpython", "talkpython"),
("pythonbytes", "pythonbytes"),
("talkpython.fm", "talkpython"),
("www.talkpython.fm", "talkpython"),
("https://www.podcastinit.com/feed/mp3/", "podcastinit"),
("www.podcastinit.com/feed/mp3/", "podcastinit"),
),
)
def test_parse_site(site, name):
assert parse_site(site).name == name
def test_parse_site_episode_url_still_returns_site_name():
url = "https://www.podcastinit.com/managing-application-secrets-with-brian-kelly-episode-181/"
assert parse_site(url).name == "podcastinit"
def test_invalid_sites():
with pytest.raises(InvalidSite):
parse_site("not_supported")
| 314 | 0 | 68 |
b9c712a7e566365d9ae6762ca6930fb2575830aa | 2,755 | py | Python | star_navi_backend/star_navi_backend/utils.py | Four-Velocity/star_navi_test | 9765d23442b31ffbb2148c3ffab7c3b6b30214ae | [
"Unlicense"
] | null | null | null | star_navi_backend/star_navi_backend/utils.py | Four-Velocity/star_navi_test | 9765d23442b31ffbb2148c3ffab7c3b6b30214ae | [
"Unlicense"
] | null | null | null | star_navi_backend/star_navi_backend/utils.py | Four-Velocity/star_navi_test | 9765d23442b31ffbb2148c3ffab7c3b6b30214ae | [
"Unlicense"
] | null | null | null | import os
from time import sleep
import requests as r
import yaml
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def get_yaml(tp: str) -> dict:
"""
Convert YAML to python dict.
Open dev_settings if exist.
Other way open prod_settings.
:param tp: category of settings which will be converted
:return: dict with data
"""
try:
settings = os.path.join(BASE_DIR, "dev_settings.yml")
with open(settings, 'r') as s:
data = yaml.load(s, Loader=yaml.Loader)[tp]
except FileNotFoundError:
settings = os.path.join(BASE_DIR, "prod_settings.yml")
with open(settings, 'r') as s:
data = yaml.load(s, Loader=yaml.Loader)[tp]
return data
def hard_get(data: dict, set_name: str):
"""
Get settings value from a dict,
Use when the setting required.
:param data: dict with data
:param set_name: setting name
:return: setting value
:raise: ValueError if value does not exist
"""
try:
value = data[set_name]
return value
except KeyError:
raise ValueError(f"Provide value for {set_name.upper()}")
def soft_get(data: dict, set_name: str, tp: type):
"""
Get setting value from a dict, or set it by default,
Use when setting *not* required.
:param data: dict with data
:param set_name: setting name
:param tp: value type
:return: setting value
"""
try:
value = data[set_name]
if type(value) != tp:
value = default(set_name)
except KeyError:
value = default(set_name)
return value
API_SLEEP = soft_get(get_yaml('generator'), 'api_sleep', float)
ADORABLE_AVATAR = hard_get(get_yaml('project'), 'adorable_avatar')
def generate_adorable_avatar(username: str) -> str:
"""
Generate user Adorable_avatar using email, and save it.
Generally any string can be used
:param username: user username
:return: avatar uri
"""
if ADORABLE_AVATAR:
response = r.request('GET', rf'https://api.adorable.io/avatars/150/{username}')
sleep(API_SLEEP)
avatar = os.path.join(BASE_DIR, 'media', 'avatars', f'{username}.png')
with open(avatar, 'wb') as img:
img.write(response.content)
else:
avatar = None
return avatar
| 28.697917 | 87 | 0.62323 | import os
from time import sleep
import requests as r
import yaml
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def get_yaml(tp: str) -> dict:
"""
Convert YAML to python dict.
Open dev_settings if exist.
Other way open prod_settings.
:param tp: category of settings which will be converted
:return: dict with data
"""
try:
settings = os.path.join(BASE_DIR, "dev_settings.yml")
with open(settings, 'r') as s:
data = yaml.load(s, Loader=yaml.Loader)[tp]
except FileNotFoundError:
settings = os.path.join(BASE_DIR, "prod_settings.yml")
with open(settings, 'r') as s:
data = yaml.load(s, Loader=yaml.Loader)[tp]
return data
def hard_get(data: dict, set_name: str):
"""
Get settings value from a dict,
Use when the setting required.
:param data: dict with data
:param set_name: setting name
:return: setting value
:raise: ValueError if value does not exist
"""
try:
value = data[set_name]
return value
except KeyError:
raise ValueError(f"Provide value for {set_name.upper()}")
def soft_get(data: dict, set_name: str, tp: type):
"""
Get setting value from a dict, or set it by default,
Use when setting *not* required.
:param data: dict with data
:param set_name: setting name
:param tp: value type
:return: setting value
"""
def default(val):
defaults = dict(
api_sleep=0.07,
end_datetime='now',
start_datetime='-2M',
max_post_length=1024,
image_generation=True,
images_chance=0.333,
)
print(f"{val.upper()} value is being set from defaults!\n"
"There is no such value in settings.yml, or it's incorrect")
return defaults[val]
try:
value = data[set_name]
if type(value) != tp:
value = default(set_name)
except KeyError:
value = default(set_name)
return value
API_SLEEP = soft_get(get_yaml('generator'), 'api_sleep', float)
ADORABLE_AVATAR = hard_get(get_yaml('project'), 'adorable_avatar')
def generate_adorable_avatar(username: str) -> str:
"""
Generate user Adorable_avatar using email, and save it.
Generally any string can be used
:param username: user username
:return: avatar uri
"""
if ADORABLE_AVATAR:
response = r.request('GET', rf'https://api.adorable.io/avatars/150/{username}')
sleep(API_SLEEP)
avatar = os.path.join(BASE_DIR, 'media', 'avatars', f'{username}.png')
with open(avatar, 'wb') as img:
img.write(response.content)
else:
avatar = None
return avatar
| 398 | 0 | 26 |
e3a294384d04d671ba0596a94bb32835d7554db2 | 4,221 | py | Python | fgclustering/forest_guided_clustering.py | HelmholtzAI-Consultants-Munich/forest_guided_clustering | 3704047497d343417eee2a2f67dc3be201854b76 | [
"MIT"
] | 8 | 2021-11-04T11:44:18.000Z | 2022-02-02T13:38:55.000Z | fgclustering/forest_guided_clustering.py | HelmholtzAI-Consultants-Munich/forest_guided_clustering | 3704047497d343417eee2a2f67dc3be201854b76 | [
"MIT"
] | 4 | 2021-10-19T16:44:10.000Z | 2021-11-08T14:33:35.000Z | fgclustering/forest_guided_clustering.py | HelmholtzAI-Consultants-Munich/forest_guided_clustering | 3704047497d343417eee2a2f67dc3be201854b76 | [
"MIT"
] | 2 | 2021-09-13T08:26:59.000Z | 2022-02-02T09:14:35.000Z | ############################################
# imports
############################################
import fgclustering.utils as utils
import fgclustering.optimizer as optimizer
import fgclustering.plotting as plotting
############################################
# Forest-guided Clustering
############################################
def fgclustering(output, data, target_column, model,
max_K = 6, number_of_clusters = None, max_iter_clustering = 500,
bootstraps_JI = 300, discart_value_JI = 0.6,
bootstraps_p_value = 10000, thr_pvalue = 0.05, random_state = 42):
'''Run forest-guided clustering algirthm for Random Forest Classifier or Regressor. The optimal number of clusters
for a k-medoids clustering is computed, based on the distance matrix computed from the Random Forest proximity matrix.
Features are ranked and filtered based on statistical tests (ANOVA for continuous features, chi square for categorical features).
Feature distribution per cluster is shown in a heatmap and boxplots. Feature importance is plotted to show
the importance of each feature for each cluster, measured by variance and impurity of the feature within the cluster,
i.e. the higher the feature importance, the lower the feature variance/impurity within the cluster.
:param output: Filename to save plot.
:type output: str
:param data: Input data with feature matrix.
If target_column is a string it has to be a column in the data.
:type data: pandas.DataFrame
:param target_column: Name of target column or target values as numpy array.
:type target_column: str or numpy.ndarray
:param model: Trained Random Forest model.
:type model: sklearn.ensemble
:param max_K: Maximum number of clusters for cluster score computation, defaults to 6
:type max_K: int, optional
:param number_of_clusters: Number of clusters for the k-medoids clustering.
Leave None if number of clusters should be optimized, defaults to None
:type number_of_clusters: int, optional
:param max_iter_clustering: Number of iterations for k-medoids clustering, defaults to 500
:type max_iter_clustering: int, optional
:param bootstraps_JI: Number of bootstraps to compute the Jaccard Index, defaults to 300
:type bootstraps_JI: int, optional
:param discart_value_JI: Minimum Jaccard Index for cluster stability, defaults to 0.6
:type discart_value_JI: float, optional
:param bootstraps_p_value: Number of bootstraps to compute the p-value of feature importance, defaults to 10000
:type bootstraps_p_value: int, optional
:param thr_pvalue: P-value threshold for feature filtering, defaults to 0.05
:type thr_pvalue: float, optional
:param random_state: Seed number for random state, defaults to 42
:type random_state: int, optional
:return: Optimal number of clusters.
:rtype: int
'''
# check if random forest is regressor or classifier
is_regressor = 'RandomForestRegressor' in str(type(model))
is_classifier = 'RandomForestClassifier' in str(type(model))
if is_regressor is True:
method = "regression"
print("Interpreting RandomForestRegressor")
elif is_classifier is True:
method = "classifier"
print("Interpreting RandomForestClassifier")
else:
raise ValueError(f'Do not recognize {str(type(model))}. Can only work with sklearn RandomForestRegressor or RandomForestClassifier.')
if type(target_column)==str:
y = data.loc[:,target_column]
X = data.drop(columns=[target_column])
else:
y = target_column
X = data
distanceMatrix = 1 - utils.proximityMatrix(model, X.to_numpy())
if number_of_clusters is None:
k = optimizer.optimizeK(distanceMatrix, y.to_numpy(), max_K, bootstraps_JI, max_iter_clustering, discart_value_JI, method, random_state)
else:
k = number_of_clusters
print(f"Visualizing forest guided clustering for {k} clusters")
plotting.plot_forest_guided_clustering(output, X, y, method, distanceMatrix, k, thr_pvalue, bootstraps_p_value, random_state)
return k
| 49.658824 | 144 | 0.697702 | ############################################
# imports
############################################
import fgclustering.utils as utils
import fgclustering.optimizer as optimizer
import fgclustering.plotting as plotting
############################################
# Forest-guided Clustering
############################################
def fgclustering(output, data, target_column, model,
max_K = 6, number_of_clusters = None, max_iter_clustering = 500,
bootstraps_JI = 300, discart_value_JI = 0.6,
bootstraps_p_value = 10000, thr_pvalue = 0.05, random_state = 42):
'''Run forest-guided clustering algirthm for Random Forest Classifier or Regressor. The optimal number of clusters
for a k-medoids clustering is computed, based on the distance matrix computed from the Random Forest proximity matrix.
Features are ranked and filtered based on statistical tests (ANOVA for continuous features, chi square for categorical features).
Feature distribution per cluster is shown in a heatmap and boxplots. Feature importance is plotted to show
the importance of each feature for each cluster, measured by variance and impurity of the feature within the cluster,
i.e. the higher the feature importance, the lower the feature variance/impurity within the cluster.
:param output: Filename to save plot.
:type output: str
:param data: Input data with feature matrix.
If target_column is a string it has to be a column in the data.
:type data: pandas.DataFrame
:param target_column: Name of target column or target values as numpy array.
:type target_column: str or numpy.ndarray
:param model: Trained Random Forest model.
:type model: sklearn.ensemble
:param max_K: Maximum number of clusters for cluster score computation, defaults to 6
:type max_K: int, optional
:param number_of_clusters: Number of clusters for the k-medoids clustering.
Leave None if number of clusters should be optimized, defaults to None
:type number_of_clusters: int, optional
:param max_iter_clustering: Number of iterations for k-medoids clustering, defaults to 500
:type max_iter_clustering: int, optional
:param bootstraps_JI: Number of bootstraps to compute the Jaccard Index, defaults to 300
:type bootstraps_JI: int, optional
:param discart_value_JI: Minimum Jaccard Index for cluster stability, defaults to 0.6
:type discart_value_JI: float, optional
:param bootstraps_p_value: Number of bootstraps to compute the p-value of feature importance, defaults to 10000
:type bootstraps_p_value: int, optional
:param thr_pvalue: P-value threshold for feature filtering, defaults to 0.05
:type thr_pvalue: float, optional
:param random_state: Seed number for random state, defaults to 42
:type random_state: int, optional
:return: Optimal number of clusters.
:rtype: int
'''
# check if random forest is regressor or classifier
is_regressor = 'RandomForestRegressor' in str(type(model))
is_classifier = 'RandomForestClassifier' in str(type(model))
if is_regressor is True:
method = "regression"
print("Interpreting RandomForestRegressor")
elif is_classifier is True:
method = "classifier"
print("Interpreting RandomForestClassifier")
else:
raise ValueError(f'Do not recognize {str(type(model))}. Can only work with sklearn RandomForestRegressor or RandomForestClassifier.')
if type(target_column)==str:
y = data.loc[:,target_column]
X = data.drop(columns=[target_column])
else:
y = target_column
X = data
distanceMatrix = 1 - utils.proximityMatrix(model, X.to_numpy())
if number_of_clusters is None:
k = optimizer.optimizeK(distanceMatrix, y.to_numpy(), max_K, bootstraps_JI, max_iter_clustering, discart_value_JI, method, random_state)
else:
k = number_of_clusters
print(f"Visualizing forest guided clustering for {k} clusters")
plotting.plot_forest_guided_clustering(output, X, y, method, distanceMatrix, k, thr_pvalue, bootstraps_p_value, random_state)
return k
| 0 | 0 | 0 |
f4fd8478d10d669db73ec0483ef8530b8fdb54c7 | 851 | py | Python | cdhweb/pages/context_processors.py | bwhicks/cdh-web | d6002dc1933a4d6e97f5459aafc9ab92cb1f8050 | [
"Apache-2.0"
] | 1 | 2017-11-21T16:02:33.000Z | 2017-11-21T16:02:33.000Z | cdhweb/pages/context_processors.py | bwhicks/cdh-web | d6002dc1933a4d6e97f5459aafc9ab92cb1f8050 | [
"Apache-2.0"
] | 367 | 2017-08-14T16:05:41.000Z | 2021-11-03T15:29:18.000Z | cdhweb/pages/context_processors.py | bwhicks/cdh-web | d6002dc1933a4d6e97f5459aafc9ab92cb1f8050 | [
"Apache-2.0"
] | 5 | 2017-09-08T21:08:49.000Z | 2020-10-02T04:39:37.000Z | from cdhweb.pages.forms import SiteSearchForm
from cdhweb.pages.models import PageIntro
def page_intro(request):
"""Template context processor: if there is a PageIntro snippet
for this page, add it to the context for display."""
# wagtail stores link url without leading and trailing slashes,
# but requests to django view urls include them; strip them off to match
# NOTE: page intro modification time is NOT taken into account
# when generating Last-Modified headers and returning 304 Not Modified
page_intro = PageIntro.objects.filter(
page__link_url=request.path.strip("/")
).first()
if page_intro:
return {"page_intro": page_intro}
return {}
def site_search(request):
"""Template context processor: adds site search form to context."""
return {"site_search": SiteSearchForm()}
| 35.458333 | 76 | 0.723854 | from cdhweb.pages.forms import SiteSearchForm
from cdhweb.pages.models import PageIntro
def page_intro(request):
"""Template context processor: if there is a PageIntro snippet
for this page, add it to the context for display."""
# wagtail stores link url without leading and trailing slashes,
# but requests to django view urls include them; strip them off to match
# NOTE: page intro modification time is NOT taken into account
# when generating Last-Modified headers and returning 304 Not Modified
page_intro = PageIntro.objects.filter(
page__link_url=request.path.strip("/")
).first()
if page_intro:
return {"page_intro": page_intro}
return {}
def site_search(request):
"""Template context processor: adds site search form to context."""
return {"site_search": SiteSearchForm()}
| 0 | 0 | 0 |
f20337c08370d886c3e94d9bb4e8f75308d8c4f7 | 399 | py | Python | dangerfile.py | teastburn/danger-python | c943131b01558a6867218c7d91a4fa7a5ab3ec71 | [
"MIT"
] | 66 | 2020-01-23T15:31:34.000Z | 2022-03-05T07:51:09.000Z | dangerfile.py | teastburn/danger-python | c943131b01558a6867218c7d91a4fa7a5ab3ec71 | [
"MIT"
] | 7 | 2020-01-23T16:37:03.000Z | 2021-04-12T17:25:55.000Z | dangerfile.py | teastburn/danger-python | c943131b01558a6867218c7d91a4fa7a5ab3ec71 | [
"MIT"
] | 14 | 2020-01-23T15:17:39.000Z | 2022-03-26T16:44:24.000Z | touched_files = danger.git.modified_files + danger.git.created_files
has_source_changes = any(map(lambda f: f.startswith("danger_python"), touched_files))
has_changelog_entry = "CHANGELOG.md" in touched_files
is_trivial = "#trivial" in danger.github.pr.title
if has_source_changes and not has_changelog_entry and not is_trivial:
warn("Please, add a CHANGELOG.md entry for non-trivial changes")
| 49.875 | 85 | 0.802005 | touched_files = danger.git.modified_files + danger.git.created_files
has_source_changes = any(map(lambda f: f.startswith("danger_python"), touched_files))
has_changelog_entry = "CHANGELOG.md" in touched_files
is_trivial = "#trivial" in danger.github.pr.title
if has_source_changes and not has_changelog_entry and not is_trivial:
warn("Please, add a CHANGELOG.md entry for non-trivial changes")
| 0 | 0 | 0 |
83d5dcc0767d5e64221fcf3d14b33d0c2a0a7d9a | 12,519 | py | Python | vplexapi-7.0.0/vplexapi/models/engine.py | dell/python-vplex | 02c5df5e7f9ed61a13a2838f21ca6467a25dd392 | [
"Apache-2.0"
] | 3 | 2020-12-01T11:22:13.000Z | 2021-02-16T17:38:42.000Z | vplexapi-7.0.0/vplexapi/models/engine.py | dell/python-vplex | 02c5df5e7f9ed61a13a2838f21ca6467a25dd392 | [
"Apache-2.0"
] | null | null | null | vplexapi-7.0.0/vplexapi/models/engine.py | dell/python-vplex | 02c5df5e7f9ed61a13a2838f21ca6467a25dd392 | [
"Apache-2.0"
] | 3 | 2021-01-01T21:07:55.000Z | 2021-02-20T07:07:40.000Z | # coding: utf-8
"""
VPlex REST API
A definition for the next-gen VPlex API # noqa: E501
OpenAPI spec version: 0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class Engine(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'cluster_ip_seed': 'str',
'enclosure_id': 'str',
'engine_id': 'str',
'engine_family': 'str',
'health_indications': 'list[str]',
'health_state': 'str',
'marker_led': 'str',
'operational_status': 'str',
'part_number': 'str',
'revision_number': 'str',
'serial_number': 'str',
'top_level_assembly': 'str',
'wwn_seed': 'str',
'name': 'str'
}
attribute_map = {
'cluster_ip_seed': 'cluster_ip_seed',
'enclosure_id': 'enclosure_id',
'engine_id': 'engine_id',
'engine_family': 'engine_family',
'health_indications': 'health_indications',
'health_state': 'health_state',
'marker_led': 'marker_led',
'operational_status': 'operational_status',
'part_number': 'part_number',
'revision_number': 'revision_number',
'serial_number': 'serial_number',
'top_level_assembly': 'top_level_assembly',
'wwn_seed': 'wwn_seed',
'name': 'name'
}
def __init__(self, cluster_ip_seed=None, enclosure_id=None, engine_id=None, engine_family=None, health_indications=None, health_state=None, marker_led=None, operational_status=None, part_number=None, revision_number=None, serial_number=None, top_level_assembly=None, wwn_seed=None, name=None): # noqa: E501
"""Engine - a model defined in Swagger""" # noqa: E501
self._cluster_ip_seed = None
self._enclosure_id = None
self._engine_id = None
self._engine_family = None
self._health_indications = None
self._health_state = None
self._marker_led = None
self._operational_status = None
self._part_number = None
self._revision_number = None
self._serial_number = None
self._top_level_assembly = None
self._wwn_seed = None
self._name = None
self.discriminator = None
if cluster_ip_seed is not None:
self.cluster_ip_seed = cluster_ip_seed
if enclosure_id is not None:
self.enclosure_id = enclosure_id
if engine_id is not None:
self.engine_id = engine_id
if engine_family is not None:
self.engine_family = engine_family
if health_indications is not None:
self.health_indications = health_indications
if health_state is not None:
self.health_state = health_state
if marker_led is not None:
self.marker_led = marker_led
if operational_status is not None:
self.operational_status = operational_status
if part_number is not None:
self.part_number = part_number
if revision_number is not None:
self.revision_number = revision_number
if serial_number is not None:
self.serial_number = serial_number
if top_level_assembly is not None:
self.top_level_assembly = top_level_assembly
if wwn_seed is not None:
self.wwn_seed = wwn_seed
if name is not None:
self.name = name
@property
def cluster_ip_seed(self):
"""Gets the cluster_ip_seed of this Engine. # noqa: E501
:return: The cluster_ip_seed of this Engine. # noqa: E501
:rtype: str
"""
return self._cluster_ip_seed
@cluster_ip_seed.setter
def cluster_ip_seed(self, cluster_ip_seed):
"""Sets the cluster_ip_seed of this Engine.
:param cluster_ip_seed: The cluster_ip_seed of this Engine. # noqa: E501
:type: str
"""
self._cluster_ip_seed = cluster_ip_seed
@property
def enclosure_id(self):
"""Gets the enclosure_id of this Engine. # noqa: E501
:return: The enclosure_id of this Engine. # noqa: E501
:rtype: str
"""
return self._enclosure_id
@enclosure_id.setter
def enclosure_id(self, enclosure_id):
"""Sets the enclosure_id of this Engine.
:param enclosure_id: The enclosure_id of this Engine. # noqa: E501
:type: str
"""
self._enclosure_id = enclosure_id
@property
def engine_id(self):
"""Gets the engine_id of this Engine. # noqa: E501
:return: The engine_id of this Engine. # noqa: E501
:rtype: str
"""
return self._engine_id
@engine_id.setter
def engine_id(self, engine_id):
"""Sets the engine_id of this Engine.
:param engine_id: The engine_id of this Engine. # noqa: E501
:type: str
"""
self._engine_id = engine_id
@property
def engine_family(self):
"""Gets the engine_family of this Engine. # noqa: E501
:return: The engine_family of this Engine. # noqa: E501
:rtype: str
"""
return self._engine_family
@engine_family.setter
def engine_family(self, engine_family):
"""Sets the engine_family of this Engine.
:param engine_family: The engine_family of this Engine. # noqa: E501
:type: str
"""
self._engine_family = engine_family
@property
def health_indications(self):
"""Gets the health_indications of this Engine. # noqa: E501
:return: The health_indications of this Engine. # noqa: E501
:rtype: list[str]
"""
return self._health_indications
@health_indications.setter
def health_indications(self, health_indications):
"""Sets the health_indications of this Engine.
:param health_indications: The health_indications of this Engine. # noqa: E501
:type: list[str]
"""
self._health_indications = health_indications
@property
def health_state(self):
"""Gets the health_state of this Engine. # noqa: E501
:return: The health_state of this Engine. # noqa: E501
:rtype: str
"""
return self._health_state
@health_state.setter
def health_state(self, health_state):
"""Sets the health_state of this Engine.
:param health_state: The health_state of this Engine. # noqa: E501
:type: str
"""
self._health_state = health_state
@property
def marker_led(self):
"""Gets the marker_led of this Engine. # noqa: E501
:return: The marker_led of this Engine. # noqa: E501
:rtype: str
"""
return self._marker_led
@marker_led.setter
def marker_led(self, marker_led):
"""Sets the marker_led of this Engine.
:param marker_led: The marker_led of this Engine. # noqa: E501
:type: str
"""
self._marker_led = marker_led
@property
def operational_status(self):
"""Gets the operational_status of this Engine. # noqa: E501
:return: The operational_status of this Engine. # noqa: E501
:rtype: str
"""
return self._operational_status
@operational_status.setter
def operational_status(self, operational_status):
"""Sets the operational_status of this Engine.
:param operational_status: The operational_status of this Engine. # noqa: E501
:type: str
"""
self._operational_status = operational_status
@property
def part_number(self):
"""Gets the part_number of this Engine. # noqa: E501
:return: The part_number of this Engine. # noqa: E501
:rtype: str
"""
return self._part_number
@part_number.setter
def part_number(self, part_number):
"""Sets the part_number of this Engine.
:param part_number: The part_number of this Engine. # noqa: E501
:type: str
"""
self._part_number = part_number
@property
def revision_number(self):
"""Gets the revision_number of this Engine. # noqa: E501
:return: The revision_number of this Engine. # noqa: E501
:rtype: str
"""
return self._revision_number
@revision_number.setter
def revision_number(self, revision_number):
"""Sets the revision_number of this Engine.
:param revision_number: The revision_number of this Engine. # noqa: E501
:type: str
"""
self._revision_number = revision_number
@property
def serial_number(self):
"""Gets the serial_number of this Engine. # noqa: E501
:return: The serial_number of this Engine. # noqa: E501
:rtype: str
"""
return self._serial_number
@serial_number.setter
def serial_number(self, serial_number):
"""Sets the serial_number of this Engine.
:param serial_number: The serial_number of this Engine. # noqa: E501
:type: str
"""
self._serial_number = serial_number
@property
def top_level_assembly(self):
"""Gets the top_level_assembly of this Engine. # noqa: E501
:return: The top_level_assembly of this Engine. # noqa: E501
:rtype: str
"""
return self._top_level_assembly
@top_level_assembly.setter
def top_level_assembly(self, top_level_assembly):
"""Sets the top_level_assembly of this Engine.
:param top_level_assembly: The top_level_assembly of this Engine. # noqa: E501
:type: str
"""
self._top_level_assembly = top_level_assembly
@property
def wwn_seed(self):
"""Gets the wwn_seed of this Engine. # noqa: E501
:return: The wwn_seed of this Engine. # noqa: E501
:rtype: str
"""
return self._wwn_seed
@wwn_seed.setter
def wwn_seed(self, wwn_seed):
"""Sets the wwn_seed of this Engine.
:param wwn_seed: The wwn_seed of this Engine. # noqa: E501
:type: str
"""
self._wwn_seed = wwn_seed
@property
def name(self):
"""Gets the name of this Engine. # noqa: E501
:return: The name of this Engine. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Engine.
:param name: The name of this Engine. # noqa: E501
:type: str
"""
self._name = name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Engine, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Engine):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 27.57489 | 311 | 0.599728 | # coding: utf-8
"""
VPlex REST API
A definition for the next-gen VPlex API # noqa: E501
OpenAPI spec version: 0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class Engine(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'cluster_ip_seed': 'str',
'enclosure_id': 'str',
'engine_id': 'str',
'engine_family': 'str',
'health_indications': 'list[str]',
'health_state': 'str',
'marker_led': 'str',
'operational_status': 'str',
'part_number': 'str',
'revision_number': 'str',
'serial_number': 'str',
'top_level_assembly': 'str',
'wwn_seed': 'str',
'name': 'str'
}
attribute_map = {
'cluster_ip_seed': 'cluster_ip_seed',
'enclosure_id': 'enclosure_id',
'engine_id': 'engine_id',
'engine_family': 'engine_family',
'health_indications': 'health_indications',
'health_state': 'health_state',
'marker_led': 'marker_led',
'operational_status': 'operational_status',
'part_number': 'part_number',
'revision_number': 'revision_number',
'serial_number': 'serial_number',
'top_level_assembly': 'top_level_assembly',
'wwn_seed': 'wwn_seed',
'name': 'name'
}
def __init__(self, cluster_ip_seed=None, enclosure_id=None, engine_id=None, engine_family=None, health_indications=None, health_state=None, marker_led=None, operational_status=None, part_number=None, revision_number=None, serial_number=None, top_level_assembly=None, wwn_seed=None, name=None): # noqa: E501
"""Engine - a model defined in Swagger""" # noqa: E501
self._cluster_ip_seed = None
self._enclosure_id = None
self._engine_id = None
self._engine_family = None
self._health_indications = None
self._health_state = None
self._marker_led = None
self._operational_status = None
self._part_number = None
self._revision_number = None
self._serial_number = None
self._top_level_assembly = None
self._wwn_seed = None
self._name = None
self.discriminator = None
if cluster_ip_seed is not None:
self.cluster_ip_seed = cluster_ip_seed
if enclosure_id is not None:
self.enclosure_id = enclosure_id
if engine_id is not None:
self.engine_id = engine_id
if engine_family is not None:
self.engine_family = engine_family
if health_indications is not None:
self.health_indications = health_indications
if health_state is not None:
self.health_state = health_state
if marker_led is not None:
self.marker_led = marker_led
if operational_status is not None:
self.operational_status = operational_status
if part_number is not None:
self.part_number = part_number
if revision_number is not None:
self.revision_number = revision_number
if serial_number is not None:
self.serial_number = serial_number
if top_level_assembly is not None:
self.top_level_assembly = top_level_assembly
if wwn_seed is not None:
self.wwn_seed = wwn_seed
if name is not None:
self.name = name
@property
def cluster_ip_seed(self):
"""Gets the cluster_ip_seed of this Engine. # noqa: E501
:return: The cluster_ip_seed of this Engine. # noqa: E501
:rtype: str
"""
return self._cluster_ip_seed
@cluster_ip_seed.setter
def cluster_ip_seed(self, cluster_ip_seed):
"""Sets the cluster_ip_seed of this Engine.
:param cluster_ip_seed: The cluster_ip_seed of this Engine. # noqa: E501
:type: str
"""
self._cluster_ip_seed = cluster_ip_seed
@property
def enclosure_id(self):
"""Gets the enclosure_id of this Engine. # noqa: E501
:return: The enclosure_id of this Engine. # noqa: E501
:rtype: str
"""
return self._enclosure_id
@enclosure_id.setter
def enclosure_id(self, enclosure_id):
"""Sets the enclosure_id of this Engine.
:param enclosure_id: The enclosure_id of this Engine. # noqa: E501
:type: str
"""
self._enclosure_id = enclosure_id
@property
def engine_id(self):
"""Gets the engine_id of this Engine. # noqa: E501
:return: The engine_id of this Engine. # noqa: E501
:rtype: str
"""
return self._engine_id
@engine_id.setter
def engine_id(self, engine_id):
"""Sets the engine_id of this Engine.
:param engine_id: The engine_id of this Engine. # noqa: E501
:type: str
"""
self._engine_id = engine_id
@property
def engine_family(self):
"""Gets the engine_family of this Engine. # noqa: E501
:return: The engine_family of this Engine. # noqa: E501
:rtype: str
"""
return self._engine_family
@engine_family.setter
def engine_family(self, engine_family):
"""Sets the engine_family of this Engine.
:param engine_family: The engine_family of this Engine. # noqa: E501
:type: str
"""
self._engine_family = engine_family
@property
def health_indications(self):
"""Gets the health_indications of this Engine. # noqa: E501
:return: The health_indications of this Engine. # noqa: E501
:rtype: list[str]
"""
return self._health_indications
@health_indications.setter
def health_indications(self, health_indications):
"""Sets the health_indications of this Engine.
:param health_indications: The health_indications of this Engine. # noqa: E501
:type: list[str]
"""
self._health_indications = health_indications
@property
def health_state(self):
"""Gets the health_state of this Engine. # noqa: E501
:return: The health_state of this Engine. # noqa: E501
:rtype: str
"""
return self._health_state
@health_state.setter
def health_state(self, health_state):
"""Sets the health_state of this Engine.
:param health_state: The health_state of this Engine. # noqa: E501
:type: str
"""
self._health_state = health_state
@property
def marker_led(self):
"""Gets the marker_led of this Engine. # noqa: E501
:return: The marker_led of this Engine. # noqa: E501
:rtype: str
"""
return self._marker_led
@marker_led.setter
def marker_led(self, marker_led):
"""Sets the marker_led of this Engine.
:param marker_led: The marker_led of this Engine. # noqa: E501
:type: str
"""
self._marker_led = marker_led
@property
def operational_status(self):
"""Gets the operational_status of this Engine. # noqa: E501
:return: The operational_status of this Engine. # noqa: E501
:rtype: str
"""
return self._operational_status
@operational_status.setter
def operational_status(self, operational_status):
"""Sets the operational_status of this Engine.
:param operational_status: The operational_status of this Engine. # noqa: E501
:type: str
"""
self._operational_status = operational_status
@property
def part_number(self):
"""Gets the part_number of this Engine. # noqa: E501
:return: The part_number of this Engine. # noqa: E501
:rtype: str
"""
return self._part_number
@part_number.setter
def part_number(self, part_number):
"""Sets the part_number of this Engine.
:param part_number: The part_number of this Engine. # noqa: E501
:type: str
"""
self._part_number = part_number
@property
def revision_number(self):
"""Gets the revision_number of this Engine. # noqa: E501
:return: The revision_number of this Engine. # noqa: E501
:rtype: str
"""
return self._revision_number
@revision_number.setter
def revision_number(self, revision_number):
"""Sets the revision_number of this Engine.
:param revision_number: The revision_number of this Engine. # noqa: E501
:type: str
"""
self._revision_number = revision_number
@property
def serial_number(self):
"""Gets the serial_number of this Engine. # noqa: E501
:return: The serial_number of this Engine. # noqa: E501
:rtype: str
"""
return self._serial_number
@serial_number.setter
def serial_number(self, serial_number):
"""Sets the serial_number of this Engine.
:param serial_number: The serial_number of this Engine. # noqa: E501
:type: str
"""
self._serial_number = serial_number
@property
def top_level_assembly(self):
"""Gets the top_level_assembly of this Engine. # noqa: E501
:return: The top_level_assembly of this Engine. # noqa: E501
:rtype: str
"""
return self._top_level_assembly
@top_level_assembly.setter
def top_level_assembly(self, top_level_assembly):
"""Sets the top_level_assembly of this Engine.
:param top_level_assembly: The top_level_assembly of this Engine. # noqa: E501
:type: str
"""
self._top_level_assembly = top_level_assembly
@property
def wwn_seed(self):
"""Gets the wwn_seed of this Engine. # noqa: E501
:return: The wwn_seed of this Engine. # noqa: E501
:rtype: str
"""
return self._wwn_seed
@wwn_seed.setter
def wwn_seed(self, wwn_seed):
"""Sets the wwn_seed of this Engine.
:param wwn_seed: The wwn_seed of this Engine. # noqa: E501
:type: str
"""
self._wwn_seed = wwn_seed
@property
def name(self):
"""Gets the name of this Engine. # noqa: E501
:return: The name of this Engine. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Engine.
:param name: The name of this Engine. # noqa: E501
:type: str
"""
self._name = name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Engine, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Engine):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 0 | 0 | 0 |
07299342f9f59edd736455fe5cf8f959f8772d0f | 616 | py | Python | python/dynamic-programming/count-number-of-ways-to-cover-a-distance.py | fossabot/a-grim-loth | a6c8d549289a39ec981c1e0d0c754bb2708dfff9 | [
"MIT"
] | 4 | 2021-06-26T17:18:47.000Z | 2022-02-02T15:02:27.000Z | python/dynamic-programming/count-number-of-ways-to-cover-a-distance.py | fossabot/a-grim-loth | a6c8d549289a39ec981c1e0d0c754bb2708dfff9 | [
"MIT"
] | 8 | 2021-06-29T07:00:32.000Z | 2021-12-01T11:26:22.000Z | python/dynamic-programming/count-number-of-ways-to-cover-a-distance.py | fossabot/a-grim-loth | a6c8d549289a39ec981c1e0d0c754bb2708dfff9 | [
"MIT"
] | 3 | 2021-07-14T14:42:08.000Z | 2021-12-07T19:36:53.000Z | # A Dynamic Programming based on Python3
# program to count number of ways to
# cover a distance with 1, 2 and 3 steps
# driver program
dist = 4
print(printCountDP(dist))
| 21.241379 | 61 | 0.594156 | # A Dynamic Programming based on Python3
# program to count number of ways to
# cover a distance with 1, 2 and 3 steps
def printCountDP(dist):
count = [0] * (dist + 1)
# Initialize base values. There is
# one way to cover 0 and 1 distances
# and two ways to cover 2 distance
count[0] = 1
if dist >= 1:
count[1] = 1
if dist >= 2:
count[2] = 2
# Fill the count array in bottom
# up manner
for i in range(3, dist + 1):
count[i] = count[i - 1] + count[i - 2] + count[i - 3]
return count[dist]
# driver program
dist = 4
print(printCountDP(dist))
| 419 | 0 | 23 |
f6a467785c54771ef85fa13b6cb39f8aa6f71d70 | 1,425 | py | Python | app/bot/main.py | ignatiy/zvonar-api | f471d5691e59b4d0d94877a4649069f782fd14f8 | [
"MIT"
] | 3 | 2021-04-28T04:52:27.000Z | 2021-08-30T10:10:10.000Z | app/bot/main.py | ignatiy/zvonar-api | f471d5691e59b4d0d94877a4649069f782fd14f8 | [
"MIT"
] | null | null | null | app/bot/main.py | ignatiy/zvonar-api | f471d5691e59b4d0d94877a4649069f782fd14f8 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import config
import sys, traceback
from datetime import datetime
from telegram import Bot, Update, User, Message
from telegram.ext import CommandHandler, Updater, MessageHandler, CallbackContext, Filters
from telegram.utils.request import Request
from telegram import KeyboardButton, ReplyKeyboardMarkup, ReplyKeyboardRemove
if __name__ == '__main__':
main() | 29.6875 | 111 | 0.748772 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import config
import sys, traceback
from datetime import datetime
from telegram import Bot, Update, User, Message
from telegram.ext import CommandHandler, Updater, MessageHandler, CallbackContext, Filters
from telegram.utils.request import Request
from telegram import KeyboardButton, ReplyKeyboardMarkup, ReplyKeyboardRemove
def main():
try:
request = Request(
connect_timeout=0.5,
read_timeout=1.0
)
bot = Bot(
request=request,
token=config.token,
base_url=config.proxy #Подготовка прокси на случай блокировки ТГ. в конфиге поменять ссылку на прокси сервер
)
updater = Updater(
bot=bot,
use_context=True
)
response = updater.bot.get_me()
print('*' * 30)
print('Start telegram: ' + response.username + '\nID: ' + str(response.id) + '')
print('*' * 30)
dispatcher = updater.dispatcher
dispatcher.add_handler(CommandHandler("start", start))
dispatcher.add_handler(CommandHandler("update", update))
dispatcher.add_handler(MessageHandler(Filters.text, echoMessage))
# dispatcher.add_handler(MessageHandler(Filters.status_update.new_chat_members, addNewUserOnChatMember))
# dispatcher.add_handler(MessageHandler(Filters.status_update.left_chat_member, delUserOnChatMember))
updater.start_polling()
updater.idle()
print('\nFinish telegram\n')
except Exception as e:
print(e)
if __name__ == '__main__':
main() | 1,066 | 0 | 23 |
d9b6984afb12d9fb4c8ffcaf41467860bbfab712 | 13,677 | py | Python | NACA-foil-body-fitted/blockmeshdict.py | kaijunhuang1994/blockMeshDict-Python | 1d640551980df6c66af057aad78d142e0cbae95e | [
"MIT"
] | null | null | null | NACA-foil-body-fitted/blockmeshdict.py | kaijunhuang1994/blockMeshDict-Python | 1d640551980df6c66af057aad78d142e0cbae95e | [
"MIT"
] | null | null | null | NACA-foil-body-fitted/blockmeshdict.py | kaijunhuang1994/blockMeshDict-Python | 1d640551980df6c66af057aad78d142e0cbae95e | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from __future__ import division, print_function
import argparse
import numpy as np
from numpy import linspace, zeros, ones, sin, cos, arctan, pi
import os
def gen_blockmeshdict(foil="0012"):
"""
Write a `blockMeshDict` for a NACA foil at specified angle of attack.
"""
# Foil geometry
c = 1.0 # Geometric chord length
NACA = [int(d) for d in foil] # NACA 4-digit designation
# Mesh dimensions
scale = 1 # Scaling factor
W = 0.5 # *Half* depth of foil (z-direction)
D = 1.2 # Length of downstream section
scalingX= 1.3 # A scaling factor in the x-direction, which is used to make the boundary similar to the leading edge of the airfoil
scalingY= 2 # A scaling factor in the y-direction, which is used to make the boundary similar to the leading edge of the airfoil
# Mesh resolution parameters
Ni = 400 # Number of interpolation points along the foil
# Nx = 200 # Number of mesh cells along the foil
Nleading = 40 # Number of mesh cells along the leading foil
Ntrailing = 40 # Number of mesh cells along the trailing foil
ND = 20 # Number of cells in the downstream direction
NT = 10 # Number of cells the transverse direction
NW = 1 # Number of cells in the z-direction (along the foil axis)
# Expansion rates
ExpTransverse = 1 # Expansion rate in transverse direction
ExpDownstream = 1 # Expansion rate in the downstream direction
ExpLeading = 1 # Expansion rate in the leading foil
ExpTrailing = 1 # Expansion rate in the trailing foil
# ------------------------- END OF MESH PARAMETER REGION --------------------- #
# Create a vector with x-coordinates, camber and thickness
beta = linspace(0, pi, Ni)
x = c*(0.5*(1 - cos(beta)))
y_c = zeros(len(x))
y_t = zeros(len(x))
theta = zeros(len(x))
# Values of m, p and t
m = NACA[0]/100
p = NACA[1]/10
t = (NACA[2]*10 + NACA[3])/100
# Calculate thickness
# The upper expression will give the airfoil a finite thickness at the trailing
# edge, witch might cause trouble. The lower expression is corrected to give
# zero thickness at the trailing edge, but the foil is strictly speaking no
# longer a proper NACA airfoil.
#
# See http://turbmodels.larc.nasa.gov/naca4412sep_val.html
# http://en.wikipedia.org/wiki/NACA_airfoil
#y_t = (t*c/0.2) * (0.2969*(x/c)**0.5 - 0.1260*(x/c) - 0.3516*(x/c)**2 + 0.2843*(x/c)**3 - 0.1015*(x/c)**4)
y_t = (t*c/0.2)*(0.2969*(x/c)**0.5 - 0.1260*(x/c) - 0.3516*(x/c)**2 \
+ 0.2843*(x/c)**3 - 0.1036*(x/c)**4)
if p > 0:
# Calculate camber
y_c += (m*x/p**2)*(2*p - x/c)*(x < p*c)
y_c += (m*(c-x)/(1 - p)**2)*(1 + x/c - 2*p)*(x >= p*c)
# Calculate theta-value
theta += arctan((m/p**2) * (2*p - 2*x/c))*(x < p*c)
theta += arctan((m/(1 - p)**2) * (-2*x/c + 2*p))*(x >= p*c)
# Calculate coordinates of upper surface
Xu = x - y_t*sin(theta)
Yu = y_c + y_t*cos(theta)
# Calculate coordinates of lower surface
Xl = x + y_t*sin(theta)
Yl = y_c - y_t*cos(theta)
# Converts a one-dimensional array to a column vector (The data type: np.matrix)
# In order to be able to execute successfully functions (np.concatenate)
Xu = np.matrix([Xu]).transpose()
Yu = np.matrix([Yu]).transpose()
Xl = np.matrix([Xl]).transpose()
Yl = np.matrix([Yl]).transpose()
if p > 0:
# Find index i of max. camber
C_max_idx = np.where(y_c == max(y_c))[0][0]
else:
# Otherwise use location of max. thickness
C_max_idx = np.where(y_t == max(y_t))[0][0]
# Edge 4-5 and 16-17
pts1 = np.concatenate([Xu[1:C_max_idx],
Yu[1:C_max_idx],
W*ones(np.shape(Xu[1:C_max_idx]))], axis=1)
pts5 = np.concatenate([pts1[:, 0], pts1[:, 1], -pts1[:, 2]], axis=1)
# Edge 5-7 and 17-19
pts2 = np.concatenate([Xu[C_max_idx + 1:Ni - 1],
Yu[C_max_idx + 1:Ni - 1],
W*ones(np.shape(Xu[C_max_idx + 1:Ni - 1]))], axis=1)
pts6 = np.concatenate([pts2[:, 0], pts2[:, 1], -pts2[:, 2]], axis=1)
# Edge 4-6 and 16-18
pts3 = np.concatenate([Xl[1:C_max_idx],
Yl[1:C_max_idx],
W*ones(np.shape(Xl[1:C_max_idx]))], axis=1)
pts7 = np.concatenate([pts3[:, 0], pts3[:, 1], -pts3[:, 2]], axis=1)
# Edge 6-7 and 18-19
pts4 = np.concatenate([Xl[C_max_idx + 1:Ni - 1],
Yl[C_max_idx + 1:Ni - 1],
W*ones(np.shape(Xl[C_max_idx + 1:Ni - 1]))], axis=1)
pts8 = np.concatenate([pts4[:, 0], pts4[:, 1], -pts4[:, 2]], axis=1)
# Edge 0-1 and 12-13
# A scaling factor is used to make the boundary similar to the leading edge of the airfoil
pts9_x = pts1[:,0] * scalingX
pts9_x = pts9_x - (pts9_x[-1] - pts1[:,0][-1])
pts9_y = pts1[:,1] * scalingY
pts9 = np.concatenate([pts9_x,
pts9_y,
W*ones(np.shape(pts9_x))], axis=1)
pts11 = np.concatenate([pts9[:, 0], pts9[:, 1], -pts9[:, 2]], axis=1)
# Edge 0-9 and 12-21
# A scaling factor is used to make the boundary similar to the leading edge of the airfoil
pts10_x = pts3[:,0] * scalingX
pts10_x = pts10_x - (pts10_x[-1] - pts3[:,0][-1])
pts10_y = pts3[:,1] * scalingY
pts10 = np.concatenate([pts10_x,
pts10_y,
W*ones(np.shape(pts10_x))], axis=1)
pts12 = np.concatenate([pts10[:, 0], pts10[:, 1], -pts10[:, 2]], axis=1)
# Move point of mesh "nose"
NoseX = pts9_x[0]
NoseY = pts9_y[0]
# Calculate the location of the vertices on the positive y-axis and put them in a matrix
vertices = zeros((12, 3))
vertices[0, :] = [NoseX[0], NoseY[0], W]
vertices[1, :] = [Xu[C_max_idx], pts9_y[-1] , W]
vertices[2, :] = [Xu[-1], pts9_y[-1] , W]
vertices[3, :] = [D, pts9_y[-1] , W]
vertices[4, :] = [Xu[0], Yu[0], W]
vertices[5, :] = [Xu[C_max_idx], Yu[C_max_idx], W]
vertices[6, :] = [Xl[C_max_idx], Yl[C_max_idx], W]
vertices[7, :] = [Xu[-1], Yu[-1], W]
vertices[8, :] = [D, Yu[-1], W]
vertices[9, :] = [Xl[C_max_idx], pts10_y[-1], W]
vertices[10, :] = [Xu[-1], pts10_y[-1], W]
vertices[11, :] = [D, pts10_y[-1], W]
# Create vertices for other side (negative z-axis)
vertices2 = vertices.copy()
vertices2[:, 2] *= -1
vertices = np.vstack((vertices, vertices2))
# Open file
f = open("blockMeshDict", "w")
# Write file
f.write("/*--------------------------------*- C++ -*----------------------------------*\\ \n")
f.write("| ========= | | \n")
f.write("| \\\\ / F ield | OpenFOAM: The Open Source CFD Toolbox | \n")
f.write("| \\\\ / O peration | Version: 3.0.x | \n")
f.write("| \\\\ / A nd | Web: www.OpenFOAM.com | \n")
f.write("| \\\\/ M anipulation | | \n")
f.write("\\*---------------------------------------------------------------------------*/ \n")
f.write("FoamFile \n")
f.write("{ \n")
f.write(" version 2.0; \n")
f.write(" format ascii; \n")
f.write(" class dictionary; \n")
f.write(" object blockMeshDict; \n")
f.write("} \n")
f.write("// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // \n")
f.write("\n")
f.write("convertToMeters %f; \n" % scale)
f.write("\n")
f.write("vertices \n")
f.write("( \n")
for vertex in vertices:
f.write(" (%f %f %f)\n" % tuple(vertex))
f.write("); \n")
f.write("\n")
f.write("blocks \n")
f.write("( \n")
f.write(" hex (16 17 13 12 4 5 1 0) (%i %i %i) simpleGrading (%f %f 1) \n" % (Nleading, NT, NW, ExpLeading, ExpTransverse))
f.write(" hex (17 19 14 13 5 7 2 1) (%i %i %i) simpleGrading (%f %f 1) \n" % (Ntrailing, NT, NW, ExpTrailing, ExpTransverse))
f.write(" hex (19 20 15 14 7 8 3 2) (%i %i %i) simpleGrading (%f %f 1) \n" % (ND, NT, NW, ExpDownstream, ExpTransverse))
f.write(" hex (4 6 9 0 16 18 21 12) (%i %i %i) simpleGrading (%f %f 1) \n" % (Nleading, NT, NW, ExpLeading, ExpTransverse))
f.write(" hex (6 7 10 9 18 19 22 21) (%i %i %i) simpleGrading (%f %f 1) \n" % (Ntrailing, NT, NW, ExpTrailing, ExpTransverse))
f.write(" hex (7 8 11 10 19 20 23 22) (%i %i %i) simpleGrading (%f %f 1) \n" % (ND, NT, NW, ExpDownstream, ExpTransverse))
f.write("); \n")
f.write("\n")
f.write("edges \n")
f.write("( \n")
f.write(" spline 4 5 \n")
f.write(" ( \n")
for pt in np.array(pts1):
f.write(" (%f %f %f) \n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 5 7 \n")
f.write(" ( \n")
for pt in np.array(pts2):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 4 6 \n")
f.write(" ( \n")
for pt in np.array(pts3):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 6 7 \n")
f.write(" ( \n")
for pt in np.array(pts4):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 16 17 \n")
f.write(" ( \n")
for pt in np.array(pts5):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 17 19 \n")
f.write(" ( \n")
for pt in np.array(pts6):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 16 18 \n")
f.write(" ( \n")
for pt in np.array(pts7):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 18 19 \n")
f.write(" ( \n")
for pt in np.array(pts8):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 0 1 \n")
f.write(" ( \n")
for pt in np.array(pts9):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 12 13 \n")
f.write(" ( \n")
for pt in np.array(pts11):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 0 9 \n")
f.write(" ( \n")
for pt in np.array(pts10):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 12 21 \n")
f.write(" ( \n")
for pt in np.array(pts12):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write("); \n")
f.write("\n")
f.write("boundary \n")
f.write("( \n")
f.write(" inlet \n")
f.write(" { \n")
f.write(" type patch; \n")
f.write(" faces \n")
f.write(" ( \n")
f.write(" (1 0 12 13) \n")
f.write(" (0 9 21 12) \n")
f.write(" ); \n")
f.write(" } \n")
f.write("\n")
f.write(" outlet \n")
f.write(" { \n")
f.write(" type patch; \n")
f.write(" faces \n")
f.write(" ( \n")
f.write(" (11 8 20 23) \n")
f.write(" (8 3 15 20) \n")
f.write(" ); \n")
f.write(" } \n")
f.write("\n")
f.write(" topAndBottom \n")
f.write(" { \n")
f.write(" type patch; \n")
f.write(" faces \n")
f.write(" ( \n")
f.write(" (3 2 14 15) \n")
f.write(" (2 1 13 14) \n")
f.write(" (9 10 22 21) \n")
f.write(" (10 11 23 22) \n")
f.write(" ); \n")
f.write(" } \n")
f.write("\n")
f.write(" airfoil \n")
f.write(" { \n")
f.write(" type wall; \n")
f.write(" faces \n")
f.write(" ( \n")
f.write(" (5 4 16 17) \n")
f.write(" (7 5 17 19) \n")
f.write(" (4 6 18 16) \n")
f.write(" (6 7 19 18) \n")
f.write(" ); \n")
f.write(" } \n")
f.write("); \n")
f.write(" \n")
f.write("mergePatchPairs \n")
f.write("( \n")
f.write("); \n")
f.write(" \n")
f.write("// ************************************************************************* // \n")
# Close file
f.close()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Plotting results")
parser.add_argument("foil", help="NACA foil digits")
args = parser.parse_args()
print("Generating blockMeshDict for a NACA {}".format(args.foil))
gen_blockmeshdict(args.foil)
| 36.569519 | 141 | 0.455655 | #!/usr/bin/env python
from __future__ import division, print_function
import argparse
import numpy as np
from numpy import linspace, zeros, ones, sin, cos, arctan, pi
import os
def gen_blockmeshdict(foil="0012"):
"""
Write a `blockMeshDict` for a NACA foil at specified angle of attack.
"""
# Foil geometry
c = 1.0 # Geometric chord length
NACA = [int(d) for d in foil] # NACA 4-digit designation
# Mesh dimensions
scale = 1 # Scaling factor
W = 0.5 # *Half* depth of foil (z-direction)
D = 1.2 # Length of downstream section
scalingX= 1.3 # A scaling factor in the x-direction, which is used to make the boundary similar to the leading edge of the airfoil
scalingY= 2 # A scaling factor in the y-direction, which is used to make the boundary similar to the leading edge of the airfoil
# Mesh resolution parameters
Ni = 400 # Number of interpolation points along the foil
# Nx = 200 # Number of mesh cells along the foil
Nleading = 40 # Number of mesh cells along the leading foil
Ntrailing = 40 # Number of mesh cells along the trailing foil
ND = 20 # Number of cells in the downstream direction
NT = 10 # Number of cells the transverse direction
NW = 1 # Number of cells in the z-direction (along the foil axis)
# Expansion rates
ExpTransverse = 1 # Expansion rate in transverse direction
ExpDownstream = 1 # Expansion rate in the downstream direction
ExpLeading = 1 # Expansion rate in the leading foil
ExpTrailing = 1 # Expansion rate in the trailing foil
# ------------------------- END OF MESH PARAMETER REGION --------------------- #
# Create a vector with x-coordinates, camber and thickness
beta = linspace(0, pi, Ni)
x = c*(0.5*(1 - cos(beta)))
y_c = zeros(len(x))
y_t = zeros(len(x))
theta = zeros(len(x))
# Values of m, p and t
m = NACA[0]/100
p = NACA[1]/10
t = (NACA[2]*10 + NACA[3])/100
# Calculate thickness
# The upper expression will give the airfoil a finite thickness at the trailing
# edge, witch might cause trouble. The lower expression is corrected to give
# zero thickness at the trailing edge, but the foil is strictly speaking no
# longer a proper NACA airfoil.
#
# See http://turbmodels.larc.nasa.gov/naca4412sep_val.html
# http://en.wikipedia.org/wiki/NACA_airfoil
#y_t = (t*c/0.2) * (0.2969*(x/c)**0.5 - 0.1260*(x/c) - 0.3516*(x/c)**2 + 0.2843*(x/c)**3 - 0.1015*(x/c)**4)
y_t = (t*c/0.2)*(0.2969*(x/c)**0.5 - 0.1260*(x/c) - 0.3516*(x/c)**2 \
+ 0.2843*(x/c)**3 - 0.1036*(x/c)**4)
if p > 0:
# Calculate camber
y_c += (m*x/p**2)*(2*p - x/c)*(x < p*c)
y_c += (m*(c-x)/(1 - p)**2)*(1 + x/c - 2*p)*(x >= p*c)
# Calculate theta-value
theta += arctan((m/p**2) * (2*p - 2*x/c))*(x < p*c)
theta += arctan((m/(1 - p)**2) * (-2*x/c + 2*p))*(x >= p*c)
# Calculate coordinates of upper surface
Xu = x - y_t*sin(theta)
Yu = y_c + y_t*cos(theta)
# Calculate coordinates of lower surface
Xl = x + y_t*sin(theta)
Yl = y_c - y_t*cos(theta)
# Converts a one-dimensional array to a column vector (The data type: np.matrix)
# In order to be able to execute successfully functions (np.concatenate)
Xu = np.matrix([Xu]).transpose()
Yu = np.matrix([Yu]).transpose()
Xl = np.matrix([Xl]).transpose()
Yl = np.matrix([Yl]).transpose()
if p > 0:
# Find index i of max. camber
C_max_idx = np.where(y_c == max(y_c))[0][0]
else:
# Otherwise use location of max. thickness
C_max_idx = np.where(y_t == max(y_t))[0][0]
# Edge 4-5 and 16-17
pts1 = np.concatenate([Xu[1:C_max_idx],
Yu[1:C_max_idx],
W*ones(np.shape(Xu[1:C_max_idx]))], axis=1)
pts5 = np.concatenate([pts1[:, 0], pts1[:, 1], -pts1[:, 2]], axis=1)
# Edge 5-7 and 17-19
pts2 = np.concatenate([Xu[C_max_idx + 1:Ni - 1],
Yu[C_max_idx + 1:Ni - 1],
W*ones(np.shape(Xu[C_max_idx + 1:Ni - 1]))], axis=1)
pts6 = np.concatenate([pts2[:, 0], pts2[:, 1], -pts2[:, 2]], axis=1)
# Edge 4-6 and 16-18
pts3 = np.concatenate([Xl[1:C_max_idx],
Yl[1:C_max_idx],
W*ones(np.shape(Xl[1:C_max_idx]))], axis=1)
pts7 = np.concatenate([pts3[:, 0], pts3[:, 1], -pts3[:, 2]], axis=1)
# Edge 6-7 and 18-19
pts4 = np.concatenate([Xl[C_max_idx + 1:Ni - 1],
Yl[C_max_idx + 1:Ni - 1],
W*ones(np.shape(Xl[C_max_idx + 1:Ni - 1]))], axis=1)
pts8 = np.concatenate([pts4[:, 0], pts4[:, 1], -pts4[:, 2]], axis=1)
# Edge 0-1 and 12-13
# A scaling factor is used to make the boundary similar to the leading edge of the airfoil
pts9_x = pts1[:,0] * scalingX
pts9_x = pts9_x - (pts9_x[-1] - pts1[:,0][-1])
pts9_y = pts1[:,1] * scalingY
pts9 = np.concatenate([pts9_x,
pts9_y,
W*ones(np.shape(pts9_x))], axis=1)
pts11 = np.concatenate([pts9[:, 0], pts9[:, 1], -pts9[:, 2]], axis=1)
# Edge 0-9 and 12-21
# A scaling factor is used to make the boundary similar to the leading edge of the airfoil
pts10_x = pts3[:,0] * scalingX
pts10_x = pts10_x - (pts10_x[-1] - pts3[:,0][-1])
pts10_y = pts3[:,1] * scalingY
pts10 = np.concatenate([pts10_x,
pts10_y,
W*ones(np.shape(pts10_x))], axis=1)
pts12 = np.concatenate([pts10[:, 0], pts10[:, 1], -pts10[:, 2]], axis=1)
# Move point of mesh "nose"
NoseX = pts9_x[0]
NoseY = pts9_y[0]
# Calculate the location of the vertices on the positive y-axis and put them in a matrix
vertices = zeros((12, 3))
vertices[0, :] = [NoseX[0], NoseY[0], W]
vertices[1, :] = [Xu[C_max_idx], pts9_y[-1] , W]
vertices[2, :] = [Xu[-1], pts9_y[-1] , W]
vertices[3, :] = [D, pts9_y[-1] , W]
vertices[4, :] = [Xu[0], Yu[0], W]
vertices[5, :] = [Xu[C_max_idx], Yu[C_max_idx], W]
vertices[6, :] = [Xl[C_max_idx], Yl[C_max_idx], W]
vertices[7, :] = [Xu[-1], Yu[-1], W]
vertices[8, :] = [D, Yu[-1], W]
vertices[9, :] = [Xl[C_max_idx], pts10_y[-1], W]
vertices[10, :] = [Xu[-1], pts10_y[-1], W]
vertices[11, :] = [D, pts10_y[-1], W]
# Create vertices for other side (negative z-axis)
vertices2 = vertices.copy()
vertices2[:, 2] *= -1
vertices = np.vstack((vertices, vertices2))
# Open file
f = open("blockMeshDict", "w")
# Write file
f.write("/*--------------------------------*- C++ -*----------------------------------*\\ \n")
f.write("| ========= | | \n")
f.write("| \\\\ / F ield | OpenFOAM: The Open Source CFD Toolbox | \n")
f.write("| \\\\ / O peration | Version: 3.0.x | \n")
f.write("| \\\\ / A nd | Web: www.OpenFOAM.com | \n")
f.write("| \\\\/ M anipulation | | \n")
f.write("\\*---------------------------------------------------------------------------*/ \n")
f.write("FoamFile \n")
f.write("{ \n")
f.write(" version 2.0; \n")
f.write(" format ascii; \n")
f.write(" class dictionary; \n")
f.write(" object blockMeshDict; \n")
f.write("} \n")
f.write("// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // \n")
f.write("\n")
f.write("convertToMeters %f; \n" % scale)
f.write("\n")
f.write("vertices \n")
f.write("( \n")
for vertex in vertices:
f.write(" (%f %f %f)\n" % tuple(vertex))
f.write("); \n")
f.write("\n")
f.write("blocks \n")
f.write("( \n")
f.write(" hex (16 17 13 12 4 5 1 0) (%i %i %i) simpleGrading (%f %f 1) \n" % (Nleading, NT, NW, ExpLeading, ExpTransverse))
f.write(" hex (17 19 14 13 5 7 2 1) (%i %i %i) simpleGrading (%f %f 1) \n" % (Ntrailing, NT, NW, ExpTrailing, ExpTransverse))
f.write(" hex (19 20 15 14 7 8 3 2) (%i %i %i) simpleGrading (%f %f 1) \n" % (ND, NT, NW, ExpDownstream, ExpTransverse))
f.write(" hex (4 6 9 0 16 18 21 12) (%i %i %i) simpleGrading (%f %f 1) \n" % (Nleading, NT, NW, ExpLeading, ExpTransverse))
f.write(" hex (6 7 10 9 18 19 22 21) (%i %i %i) simpleGrading (%f %f 1) \n" % (Ntrailing, NT, NW, ExpTrailing, ExpTransverse))
f.write(" hex (7 8 11 10 19 20 23 22) (%i %i %i) simpleGrading (%f %f 1) \n" % (ND, NT, NW, ExpDownstream, ExpTransverse))
f.write("); \n")
f.write("\n")
f.write("edges \n")
f.write("( \n")
f.write(" spline 4 5 \n")
f.write(" ( \n")
for pt in np.array(pts1):
f.write(" (%f %f %f) \n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 5 7 \n")
f.write(" ( \n")
for pt in np.array(pts2):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 4 6 \n")
f.write(" ( \n")
for pt in np.array(pts3):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 6 7 \n")
f.write(" ( \n")
for pt in np.array(pts4):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 16 17 \n")
f.write(" ( \n")
for pt in np.array(pts5):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 17 19 \n")
f.write(" ( \n")
for pt in np.array(pts6):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 16 18 \n")
f.write(" ( \n")
for pt in np.array(pts7):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 18 19 \n")
f.write(" ( \n")
for pt in np.array(pts8):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 0 1 \n")
f.write(" ( \n")
for pt in np.array(pts9):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 12 13 \n")
f.write(" ( \n")
for pt in np.array(pts11):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 0 9 \n")
f.write(" ( \n")
for pt in np.array(pts10):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write(" spline 12 21 \n")
f.write(" ( \n")
for pt in np.array(pts12):
f.write(" (%f %f %f)\n" % tuple(pt))
f.write(" ) \n")
f.write("); \n")
f.write("\n")
f.write("boundary \n")
f.write("( \n")
f.write(" inlet \n")
f.write(" { \n")
f.write(" type patch; \n")
f.write(" faces \n")
f.write(" ( \n")
f.write(" (1 0 12 13) \n")
f.write(" (0 9 21 12) \n")
f.write(" ); \n")
f.write(" } \n")
f.write("\n")
f.write(" outlet \n")
f.write(" { \n")
f.write(" type patch; \n")
f.write(" faces \n")
f.write(" ( \n")
f.write(" (11 8 20 23) \n")
f.write(" (8 3 15 20) \n")
f.write(" ); \n")
f.write(" } \n")
f.write("\n")
f.write(" topAndBottom \n")
f.write(" { \n")
f.write(" type patch; \n")
f.write(" faces \n")
f.write(" ( \n")
f.write(" (3 2 14 15) \n")
f.write(" (2 1 13 14) \n")
f.write(" (9 10 22 21) \n")
f.write(" (10 11 23 22) \n")
f.write(" ); \n")
f.write(" } \n")
f.write("\n")
f.write(" airfoil \n")
f.write(" { \n")
f.write(" type wall; \n")
f.write(" faces \n")
f.write(" ( \n")
f.write(" (5 4 16 17) \n")
f.write(" (7 5 17 19) \n")
f.write(" (4 6 18 16) \n")
f.write(" (6 7 19 18) \n")
f.write(" ); \n")
f.write(" } \n")
f.write("); \n")
f.write(" \n")
f.write("mergePatchPairs \n")
f.write("( \n")
f.write("); \n")
f.write(" \n")
f.write("// ************************************************************************* // \n")
# Close file
f.close()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Plotting results")
parser.add_argument("foil", help="NACA foil digits")
args = parser.parse_args()
print("Generating blockMeshDict for a NACA {}".format(args.foil))
gen_blockmeshdict(args.foil)
| 0 | 0 | 0 |
d399b88a18aecdddd170a1ff0ae5152520149d95 | 470 | py | Python | pandayoda/common/serializer.py | wguanicedew/panda-yoda | df0860363818a219dd5bc2bfcda2659e598bd6bd | [
"Apache-2.0"
] | null | null | null | pandayoda/common/serializer.py | wguanicedew/panda-yoda | df0860363818a219dd5bc2bfcda2659e598bd6bd | [
"Apache-2.0"
] | null | null | null | pandayoda/common/serializer.py | wguanicedew/panda-yoda | df0860363818a219dd5bc2bfcda2659e598bd6bd | [
"Apache-2.0"
] | null | null | null | import json,logging
logger = logging.getLogger(__name__)
| 21.363636 | 67 | 0.659574 | import json,logging
logger = logging.getLogger(__name__)
def serialize(msg,pretty_print=False):
try:
if pretty_print:
return json.dumps(msg,indent=2,sort_keys=True)
else:
return json.dumps(msg)
except:
logger.exception('failed to serialize the message: %s',msg)
raise
def deserialize(msg):
try:
return json.loads(msg)
except:
logger.exception('failed to deserialize the message: %s',msg)
raise
| 365 | 0 | 46 |
4f8befee59e57e0c07b138ebb1f32b92934c4723 | 980 | py | Python | Hard/Median_Of_Two_Sorted_Arrays.py | dianjiaogit/LeetCode_Python_solution | 390693c839d1be8802c21ea81062443b6d5ea36f | [
"MIT"
] | null | null | null | Hard/Median_Of_Two_Sorted_Arrays.py | dianjiaogit/LeetCode_Python_solution | 390693c839d1be8802c21ea81062443b6d5ea36f | [
"MIT"
] | null | null | null | Hard/Median_Of_Two_Sorted_Arrays.py | dianjiaogit/LeetCode_Python_solution | 390693c839d1be8802c21ea81062443b6d5ea36f | [
"MIT"
] | null | null | null | # There are two sorted arrays nums1 and nums2 of size m and n respectively.
# Find the median of the two sorted arrays. The overall run time complexity should be O(log (m+n)).
# Example 1:
# nums1 = [1, 3]
# nums2 = [2]
# The median is 2.0
# Example 2:
# nums1 = [1, 2]
# nums2 = [3, 4]
# The median is (2 + 3)/2 = 2.5
| 24.5 | 99 | 0.523469 | # There are two sorted arrays nums1 and nums2 of size m and n respectively.
# Find the median of the two sorted arrays. The overall run time complexity should be O(log (m+n)).
# Example 1:
# nums1 = [1, 3]
# nums2 = [2]
# The median is 2.0
# Example 2:
# nums1 = [1, 2]
# nums2 = [3, 4]
# The median is (2 + 3)/2 = 2.5
class Solution:
def findMedianSortedArrays(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: float
"""
result = merge(nums1, nums2)
a = len(result)
if a % 2 == 1:
return result[a // 2]
else:
return (result[a // 2] + result[a // 2 - 1]) / 2
def merge(nums1, nums2):
if nums1 == []:
return nums2
elif nums2 == []:
return nums1
else:
if nums1[0] <= nums2[0]:
return nums1[0:1] + merge(nums1[1:], nums2)
else:
return nums2[0:1] + merge(nums1, nums2[1:]) | 256 | 347 | 54 |
cb3441198ad038e40a02ae3b24dbdfc064616efb | 2,428 | py | Python | src/judge.py | Gomango999/codebreaker | 407c4ac7a69c8db52cc7d2a57034cdda243c9134 | [
"MIT"
] | 1 | 2021-12-11T01:43:27.000Z | 2021-12-11T01:43:27.000Z | src/judge.py | Gomango999/codebreaker | 407c4ac7a69c8db52cc7d2a57034cdda243c9134 | [
"MIT"
] | null | null | null | src/judge.py | Gomango999/codebreaker | 407c4ac7a69c8db52cc7d2a57034cdda243c9134 | [
"MIT"
] | 1 | 2021-12-15T07:04:29.000Z | 2021-12-15T07:04:29.000Z | import execute
import random
import traceback
# input sanity checker: prints 1 if valid
# correct checker: prints 1 if valid
# Returns score, status_message
| 36.238806 | 133 | 0.653213 | import execute
import random
import traceback
# input sanity checker: prints 1 if valid
# correct checker: prints 1 if valid
# Returns score, status_message
async def run_judge(problem, user_input, user_output):
box = execute.Box()
sanity_exe = box.prepfile('sanity', problem.sanity_exe)
try:
result = await box.run_command_async(sanity_exe, timeout=10, input = user_input)
if result.strip() != "1":
return (0, "Input is insane.")
except:
box.cleanup()
return (0, "[internal error: sanity checker failed]")
broken_exe = box.prepfile('broken.exe', problem.broken_exe)
correct_exe = box.prepfile('correct.exe', problem.correct_exe)
try:
broken_output = await box.run_command_async(broken_exe, timeout=1, input = user_input)
broken_output = broken_output.strip()
except execute.TimeoutExpired:
return (10, "Code broken! (TLE)")
box.cleanup()
except execute.NonZeroReturnCode:
return (10, "Code broken! (RE)")
box.cleanup()
try:
correct_output = await box.run_command_async(correct_exe, input = user_input)
correct_output = correct_output.strip()
except:
box.cleanup()
return (0, "[internal error: judges' solution failed]")
user_input_file = box.prepfile('user.in', user_input)
user_output_file = box.prepfile('user.out', user_output)
broken_output_file = box.prepfile('broken.out', broken_output)
correct_output_file = box.prepfile('correct.out', correct_output)
checker_exe = box.prepfile('checker', problem.checker_exe)
try:
result = await box.run_command_async("%s %s %s %s" % (checker_exe, user_output_file, correct_output_file, user_input_file))
print(result)
if result.strip() != "100":
#box.cleanup()
return (-1, "Wrong answer for proposed input.")
except:
box.cleanup()
return (0, "[internal error: checker broke when marking user output]")
try:
result = await box.run_command_async("%s %s %s %s" % (checker_exe, broken_output_file, correct_output_file, user_input_file))
if result.strip() == "100":
box.cleanup()
return (-2, "Input does not break code.")
except:
box.cleanup()
return (0, "[internal error: checker broke when marking broken output]")
return (10, "Code broken! (WA)")
| 2,247 | 0 | 22 |
062605923e1edf65555afdc8134ca5a4fdc006fb | 1,432 | py | Python | Comparisoner.py | 1970938138/LevelUper | 2cbe9d61bdd0bc6d37a5e1fe8d2d9601132135f9 | [
"MIT"
] | 1 | 2020-08-08T19:32:43.000Z | 2020-08-08T19:32:43.000Z | Comparisoner.py | 1970938138/LevelUper | 2cbe9d61bdd0bc6d37a5e1fe8d2d9601132135f9 | [
"MIT"
] | null | null | null | Comparisoner.py | 1970938138/LevelUper | 2cbe9d61bdd0bc6d37a5e1fe8d2d9601132135f9 | [
"MIT"
] | null | null | null | #/usr/bin/python3/
#coding=utf-8
#================ 简介 ===================
# 脚本: 伪·红石比较器
# 作者: 北方重工NK1
# 时间: 2017年12月10日 13:37:11
# 描述: 匹配元素_作业帮
#================ 简介 ===================
import re
Checking_Points1=r'<dt>考点:</dt>([\s\S]*?)</dd>'
Checking_Points2=r'.+?\[(.*?)\].+?'
Checking_Points_biology=r'<dd>([\s\S]*)\\n'
QQmsg=r'http://www.zybang.com/question/rcswebview/'
print("The comparisoner has been launched.")
| 29.833333 | 90 | 0.504888 | #/usr/bin/python3/
#coding=utf-8
#================ 简介 ===================
# 脚本: 伪·红石比较器
# 作者: 北方重工NK1
# 时间: 2017年12月10日 13:37:11
# 描述: 匹配元素_作业帮
#================ 简介 ===================
import re
Checking_Points1=r'<dt>考点:</dt>([\s\S]*?)</dd>'
Checking_Points2=r'.+?\[(.*?)\].+?'
Checking_Points_biology=r'<dd>([\s\S]*)\\n'
QQmsg=r'http://www.zybang.com/question/rcswebview/'
print("The comparisoner has been launched.")
def match(target,html):
if(target=="Checking_Points"):
result=re.findall(Checking_Points1,html)
if(result):
result=re.findall(Checking_Points2,str(result))
if(result):
return result
else:
result=re.findall(Checking_Points1,html)
result=re.findall(Checking_Points_biology,str(result))
if(result):
result_0=str(result[0]).strip('\\n')
result_0=result_0.strip(' ')
return result_0
else:
print("没有找到考点,可能是该题目没有提供考点信息.如果你确定题目提供了考点信息,请联系原作者,并向其发送该题目的网址.","\n")
return None
else:
print("没有找到考点,可能是该题目没有提供考点信息.如果你确定题目提供了考点信息,请联系原作者,并向其发送该题目的网址.","\n")
return None
elif(target=="QQ"):
if(re.match(QQmsg,html)):
return True
else:
return False | 1,127 | 0 | 23 |
49dacf4bde624efc469d0f8d61220b29c7647828 | 1,199 | py | Python | 001. Add Two Numbers/solution.py | corkiwang1122/LeetCode | 39b1680b58173e6ec23a475605c3450ce8f78a81 | [
"MIT"
] | 3,690 | 2015-01-03T03:40:23.000Z | 2022-03-31T08:10:19.000Z | 001. Add Two Numbers/solution.py | Windfall94/LeetCode | 1756256d7e619164076bbf358c8f7ca68cd8bd79 | [
"MIT"
] | 21 | 2015-01-25T16:39:43.000Z | 2021-02-26T05:28:22.000Z | 001. Add Two Numbers/solution.py | Windfall94/LeetCode | 1756256d7e619164076bbf358c8f7ca68cd8bd79 | [
"MIT"
] | 1,290 | 2015-01-09T01:28:20.000Z | 2022-03-28T12:20:39.000Z | #!python3
# Definition for singly-linked list.
if __name__ == "__main__":
l1 = ListNode(2)
l1.next = ListNode(4)
l1.next.next = ListNode(3)
l2 = ListNode(5)
l2.next = ListNode(6)
l2.next.next = ListNode(4)
lsum = ListNode(7)
lsum.next = ListNode(0)
lsum.next.next = ListNode(8)
print(compareLinkedList(Solution().addTwoNumbers(l1, l2), lsum))
| 21.8 | 68 | 0.506255 | #!python3
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
head = ListNode(0)
p = head
quot = 0
while l1 or l2 or quot != 0:
if l1:
quot += l1.val
l1 = l1.next
if l2:
quot += l2.val
l2 = l2.next
quot, rem = divmod(quot, 10)
p.next = ListNode(rem)
p = p.next
return head.next
def compareLinkedList(l1, l2):
while l1 or l2:
if not (l1 and l2) or l1.val != l2.val:
return False
l1 = l1.next
l2 = l2.next
return True
if __name__ == "__main__":
l1 = ListNode(2)
l1.next = ListNode(4)
l1.next.next = ListNode(3)
l2 = ListNode(5)
l2.next = ListNode(6)
l2.next.next = ListNode(4)
lsum = ListNode(7)
lsum.next = ListNode(0)
lsum.next.next = ListNode(8)
print(compareLinkedList(Solution().addTwoNumbers(l1, l2), lsum))
| 207 | 509 | 94 |
864473efccfc1e8c9a8b5879266e118d1d0fc8b1 | 1,165 | py | Python | SAMBALoader/Transports/Transport.py | abcminiuser/sam-ba-loader | 3154cb1cbc3c45d30438fafa8955a4c6e423c9d8 | [
"MIT"
] | 7 | 2016-03-21T18:31:17.000Z | 2021-02-17T18:55:33.000Z | SAMBALoader/Transports/Transport.py | abcminiuser/sam-ba-loader | 3154cb1cbc3c45d30438fafa8955a4c6e423c9d8 | [
"MIT"
] | 11 | 2016-03-20T20:33:58.000Z | 2016-12-04T20:53:18.000Z | SAMBALoader/Transports/Transport.py | abcminiuser/sam-ba-loader | 3154cb1cbc3c45d30438fafa8955a4c6e423c9d8 | [
"MIT"
] | 4 | 2016-03-21T18:31:31.000Z | 2020-06-24T14:33:58.000Z | #
# Open Source SAM-BA Programmer
# Copyright (C) Dean Camera, 2016.
#
# dean [at] fourwalledcubicle [dot] com
# www.fourwalledcubicle.com
#
#
# Released under a MIT license, see LICENCE.txt.
import abc
import logging
class TimeoutError(Exception):
"""Exception thrown when a read operation times out while waiting for more
data.
"""
pass
class TransportBase(object):
"""Base class for SAM-BA transports. Derived instances should override all
methods listed here.
"""
__metaclass__ = abc.ABCMeta
LOG = logging.getLogger(__name__)
@abc.abstractmethod
def read(self, length):
"""Reads a given number of bytes from the transport.
Args:
length : Number of bytes to read. If `None`, a full line will be
read until a terminator is reached.
Returns:
Byte array of the received data.
"""
pass
@abc.abstractmethod
def write(self, data):
"""Writes a given number of bytes to the transport.
Args:
data : Bytes to write.
"""
pass
| 20.803571 | 80 | 0.593133 | #
# Open Source SAM-BA Programmer
# Copyright (C) Dean Camera, 2016.
#
# dean [at] fourwalledcubicle [dot] com
# www.fourwalledcubicle.com
#
#
# Released under a MIT license, see LICENCE.txt.
import abc
import logging
class TimeoutError(Exception):
"""Exception thrown when a read operation times out while waiting for more
data.
"""
pass
class TransportBase(object):
"""Base class for SAM-BA transports. Derived instances should override all
methods listed here.
"""
__metaclass__ = abc.ABCMeta
LOG = logging.getLogger(__name__)
@abc.abstractmethod
def read(self, length):
"""Reads a given number of bytes from the transport.
Args:
length : Number of bytes to read. If `None`, a full line will be
read until a terminator is reached.
Returns:
Byte array of the received data.
"""
pass
@abc.abstractmethod
def write(self, data):
"""Writes a given number of bytes to the transport.
Args:
data : Bytes to write.
"""
pass
| 0 | 0 | 0 |
a4c463bce1cc1ff7b6828d441bb881f6bbe7c1d7 | 2,624 | py | Python | drafts/rebuild_frame.py | BrunoSanchez/capsule_N1 | a5ee3b74afc27de1a954ae2f9f96c278a4723226 | [
"BSD-3-Clause"
] | 12 | 2017-04-13T06:49:42.000Z | 2019-11-19T09:27:43.000Z | drafts/rebuild_frame.py | BrunoSanchez/capsule_N1 | a5ee3b74afc27de1a954ae2f9f96c278a4723226 | [
"BSD-3-Clause"
] | 56 | 2017-09-05T16:00:57.000Z | 2020-11-20T18:02:58.000Z | drafts/rebuild_frame.py | BrunoSanchez/capsule_N1 | a5ee3b74afc27de1a954ae2f9f96c278a4723226 | [
"BSD-3-Clause"
] | 5 | 2017-10-08T16:55:40.000Z | 2020-09-22T14:04:53.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# rebuild_frame.py
#
# Copyright 2016 Bruno S <bruno@oac.unc.edu.ar>
#
# This file is part of ProperImage (https://github.com/toros-astro/ProperImage)
# License: BSD-3-Clause
# Full Text: https://github.com/toros-astro/ProperImage/blob/master/LICENSE.txt
#
import os
import shlex
import subprocess
import sys
import numpy as np
import matplotlib.pyplot as plt
import sep
from astropy.io import fits
from properimage import simtools
from properimage import propercoadd as pc
from properimage import utils
# =============================================================================
# PSF measure test by propercoadd
# =============================================================================
N = 512
test_dir = os.path.abspath('./test/test_images/rebuild_psf2')
frame = utils.sim_varpsf(400, test_dir, SN=5.)
with pc.SingleImage(frame) as sim:
a_fields, psf_basis = sim.get_variable_psf()
utils.plot_afields(a_fields, frame.shape, os.path.join(test_dir, 'a_fields.png'))
utils.plot_psfbasis(psf_basis, os.path.join(test_dir, 'psf_basis.png'), nbook=False)
plt.imshow(np.log10(frame), interpolation='none')
#plt.plot(cat['sources']['x'], cat['sources']['y'], '.k')
plt.colorbar()
plt.tight_layout()
plt.savefig(os.path.join(test_dir, 'test_frame.png'))
plt.close()
cat = sep.extract(frame - sep.Background(frame),
thresh=4.5*sep.Background(frame).globalrms)
xy = [(int(row['y']), int(row['x'])) for row in cat]
weights = 100000. * cat['flux']/max(cat['flux'])
m = simtools.delta_point(N*2, center=False, xy=xy)#, weights=weights)
x, y = sim.get_afield_domain() # np.mgrid[:frame.shape[0], :frame.shape[1]]
rebuild = np.zeros_like(frame)
for i in range(len(psf_basis)):
psf = psf_basis[i]
a = a_fields[i]
rebuild += a(x, y) * simtools.convol_gal_psf_fft(m, psf)
rebuild += 1000.
plt.imshow(np.log10(rebuild), interpolation='none')
plt.colorbar()
plt.tight_layout()
plt.savefig(os.path.join(test_dir, 'frame_rebuild.png'))
plt.close()
f = fits.PrimaryHDU(frame)
f.writeto(os.path.join(test_dir, 'test_frame.fits'), overwrite=True)
r = fits.PrimaryHDU(rebuild)
r.writeto(os.path.join(test_dir, 'frame_rebuild.fits'), overwrite=True)
scale = np.vdot(frame.flatten(), rebuild.flatten())
scale = scale/np.vdot(rebuild.flatten(), rebuild.flatten())
diff = frame - scale*rebuild
plt.imshow(np.log10(diff), interpolation='none')
plt.colorbar()
plt.tight_layout()
plt.savefig(os.path.join(test_dir, 'diff.png'))
plt.close()
diff = fits.PrimaryHDU(diff)
diff.writeto(os.path.join(test_dir, 'diff.fits'), overwrite=True)
| 28.835165 | 84 | 0.676829 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# rebuild_frame.py
#
# Copyright 2016 Bruno S <bruno@oac.unc.edu.ar>
#
# This file is part of ProperImage (https://github.com/toros-astro/ProperImage)
# License: BSD-3-Clause
# Full Text: https://github.com/toros-astro/ProperImage/blob/master/LICENSE.txt
#
import os
import shlex
import subprocess
import sys
import numpy as np
import matplotlib.pyplot as plt
import sep
from astropy.io import fits
from properimage import simtools
from properimage import propercoadd as pc
from properimage import utils
# =============================================================================
# PSF measure test by propercoadd
# =============================================================================
N = 512
test_dir = os.path.abspath('./test/test_images/rebuild_psf2')
frame = utils.sim_varpsf(400, test_dir, SN=5.)
with pc.SingleImage(frame) as sim:
a_fields, psf_basis = sim.get_variable_psf()
utils.plot_afields(a_fields, frame.shape, os.path.join(test_dir, 'a_fields.png'))
utils.plot_psfbasis(psf_basis, os.path.join(test_dir, 'psf_basis.png'), nbook=False)
plt.imshow(np.log10(frame), interpolation='none')
#plt.plot(cat['sources']['x'], cat['sources']['y'], '.k')
plt.colorbar()
plt.tight_layout()
plt.savefig(os.path.join(test_dir, 'test_frame.png'))
plt.close()
cat = sep.extract(frame - sep.Background(frame),
thresh=4.5*sep.Background(frame).globalrms)
xy = [(int(row['y']), int(row['x'])) for row in cat]
weights = 100000. * cat['flux']/max(cat['flux'])
m = simtools.delta_point(N*2, center=False, xy=xy)#, weights=weights)
x, y = sim.get_afield_domain() # np.mgrid[:frame.shape[0], :frame.shape[1]]
rebuild = np.zeros_like(frame)
for i in range(len(psf_basis)):
psf = psf_basis[i]
a = a_fields[i]
rebuild += a(x, y) * simtools.convol_gal_psf_fft(m, psf)
rebuild += 1000.
plt.imshow(np.log10(rebuild), interpolation='none')
plt.colorbar()
plt.tight_layout()
plt.savefig(os.path.join(test_dir, 'frame_rebuild.png'))
plt.close()
f = fits.PrimaryHDU(frame)
f.writeto(os.path.join(test_dir, 'test_frame.fits'), overwrite=True)
r = fits.PrimaryHDU(rebuild)
r.writeto(os.path.join(test_dir, 'frame_rebuild.fits'), overwrite=True)
scale = np.vdot(frame.flatten(), rebuild.flatten())
scale = scale/np.vdot(rebuild.flatten(), rebuild.flatten())
diff = frame - scale*rebuild
plt.imshow(np.log10(diff), interpolation='none')
plt.colorbar()
plt.tight_layout()
plt.savefig(os.path.join(test_dir, 'diff.png'))
plt.close()
diff = fits.PrimaryHDU(diff)
diff.writeto(os.path.join(test_dir, 'diff.fits'), overwrite=True)
| 0 | 0 | 0 |
8a8e45c6b2044b3b506b00abde61b8b723a2cc7e | 1,035 | py | Python | dataservices/migrations/0016_internetusage_country.py | uktrade/directory-api | 45a9024a7ecc2842895201cbb51420ba9e57a168 | [
"MIT"
] | 2 | 2017-06-02T09:09:08.000Z | 2021-01-18T10:26:53.000Z | dataservices/migrations/0016_internetusage_country.py | uktrade/directory-api | 45a9024a7ecc2842895201cbb51420ba9e57a168 | [
"MIT"
] | 629 | 2016-10-10T09:35:52.000Z | 2022-03-25T15:04:04.000Z | dataservices/migrations/0016_internetusage_country.py | uktrade/directory-api | 45a9024a7ecc2842895201cbb51420ba9e57a168 | [
"MIT"
] | 5 | 2017-06-22T10:02:22.000Z | 2022-03-14T17:55:21.000Z | # Generated by Django 2.2.13 on 2021-01-07 14:53
from django.db import migrations, models
import django.db.models.deletion
| 29.571429 | 120 | 0.681159 | # Generated by Django 2.2.13 on 2021-01-07 14:53
from django.db import migrations, models
import django.db.models.deletion
def update_countries(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
IU = apps.get_model('dataservices', 'internetusage')
Country = apps.get_model('dataservices', 'Country')
for obj in IU.objects.all():
obj.country = Country.objects.filter(iso3=obj.country_code).first()
obj.save()
def backward(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('dataservices', '0015_ciafactbook_country'),
]
operations = [
migrations.AddField(
model_name='internetusage',
name='country',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='dataservices.Country'),
),
migrations.RunPython(update_countries, backward),
]
| 427 | 412 | 69 |
3dcd550d074b7ec61777a55b7db45c0eded56dde | 4,060 | py | Python | applications/ibm_notebook/ibm_notebook.py | iostackproject/zoe-applications | 94a1cfee2a45afcf7375af1f01502717ede3f9d7 | [
"Apache-2.0"
] | null | null | null | applications/ibm_notebook/ibm_notebook.py | iostackproject/zoe-applications | 94a1cfee2a45afcf7375af1f01502717ede3f9d7 | [
"Apache-2.0"
] | null | null | null | applications/ibm_notebook/ibm_notebook.py | iostackproject/zoe-applications | 94a1cfee2a45afcf7375af1f01502717ede3f9d7 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Copyright (c) 2016, Francesco Pace
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import json
sys.path.append('../..')
import frameworks.spark.spark as spark_framework
import frameworks.spark.spark_jupyter as spark_jupyter
#################################
# Zoe Application customization #
#################################
APP_NAME = 'ibm-notebook'
SPARK_MASTER_MEMORY_LIMIT = 512 * (1024 ** 2) # 512MB
SPARK_WORKER_MEMORY_LIMIT = 12 * (1024 ** 3) # 12GB
NOTEBOOK_MEMORY_LIMIT = 4 * (1024 ** 3) # 4GB, contains also the Spark client
SPARK_WORKER_CORES = 6
SPARK_WORKER_COUNT = 2
DOCKER_REGISTRY = '172.17.131.201:5000' # Set to None to use images from the Docker Hub
SPARK_MASTER_IMAGE = 'iostackrepo/spark-master-ibm'
SPARK_WORKER_IMAGE = 'iostackrepo/spark-worker-ibm'
NOTEBOOK_IMAGE = 'iostackrepo/spark-jupyter-notebook-ibm'
#####################
# END CUSTOMIZATION #
#####################
if __name__ == "__main__":
app_dict = create_app(app_name=APP_NAME, notebook_memory_limit=NOTEBOOK_MEMORY_LIMIT,
spark_master_memory_limit=SPARK_MASTER_MEMORY_LIMIT,
spark_worker_memory_limit=SPARK_WORKER_MEMORY_LIMIT,
spark_worker_cores=SPARK_WORKER_CORES, spark_worker_count=SPARK_WORKER_COUNT,
docker_registry=DOCKER_REGISTRY, spark_master_image=SPARK_MASTER_IMAGE,
spark_worker_image=SPARK_WORKER_IMAGE, notebook_image=NOTEBOOK_IMAGE)
json.dump(app_dict, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
| 43.655914 | 128 | 0.678079 | #!/usr/bin/env python
# Copyright (c) 2016, Francesco Pace
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import json
sys.path.append('../..')
import frameworks.spark.spark as spark_framework
import frameworks.spark.spark_jupyter as spark_jupyter
#################################
# Zoe Application customization #
#################################
APP_NAME = 'ibm-notebook'
SPARK_MASTER_MEMORY_LIMIT = 512 * (1024 ** 2) # 512MB
SPARK_WORKER_MEMORY_LIMIT = 12 * (1024 ** 3) # 12GB
NOTEBOOK_MEMORY_LIMIT = 4 * (1024 ** 3) # 4GB, contains also the Spark client
SPARK_WORKER_CORES = 6
SPARK_WORKER_COUNT = 2
DOCKER_REGISTRY = '172.17.131.201:5000' # Set to None to use images from the Docker Hub
SPARK_MASTER_IMAGE = 'iostackrepo/spark-master-ibm'
SPARK_WORKER_IMAGE = 'iostackrepo/spark-worker-ibm'
NOTEBOOK_IMAGE = 'iostackrepo/spark-jupyter-notebook-ibm'
#####################
# END CUSTOMIZATION #
#####################
def spark_jupyter_notebook_ibm_app(name,
notebook_mem_limit, master_mem_limit, worker_mem_limit, worker_cores,
worker_count,
master_image, worker_image, notebook_image):
sp_master = spark_framework.spark_master_service(int(master_mem_limit), master_image)
sp_workers = spark_framework.spark_worker_service(int(worker_count), int(worker_mem_limit), int(worker_cores), worker_image)
jupyter = spark_jupyter.spark_jupyter_notebook_service(int(notebook_mem_limit), int(worker_mem_limit), notebook_image)
app = {
'name': name,
'version': 2,
'will_end': False,
'priority': 512,
'requires_binary': False,
'services': [
sp_master,
sp_workers,
jupyter,
]
}
return app
def create_app(app_name=APP_NAME, notebook_memory_limit=NOTEBOOK_MEMORY_LIMIT,
spark_master_memory_limit=SPARK_MASTER_MEMORY_LIMIT, spark_worker_memory_limit=SPARK_WORKER_MEMORY_LIMIT,
spark_worker_cores=SPARK_WORKER_CORES, spark_worker_count=SPARK_WORKER_COUNT,
docker_registry=DOCKER_REGISTRY, spark_master_image=SPARK_MASTER_IMAGE,
spark_worker_image=SPARK_WORKER_IMAGE, notebook_image=NOTEBOOK_IMAGE):
if docker_registry is not None:
spark_master_image = docker_registry + '/' + spark_master_image
spark_worker_image = docker_registry + '/' + spark_worker_image
notebook_image = docker_registry + '/' + notebook_image
return spark_jupyter_notebook_ibm_app(app_name, notebook_memory_limit, spark_master_memory_limit,
spark_worker_memory_limit, spark_worker_cores, spark_worker_count,
spark_master_image, spark_worker_image, notebook_image)
if __name__ == "__main__":
app_dict = create_app(app_name=APP_NAME, notebook_memory_limit=NOTEBOOK_MEMORY_LIMIT,
spark_master_memory_limit=SPARK_MASTER_MEMORY_LIMIT,
spark_worker_memory_limit=SPARK_WORKER_MEMORY_LIMIT,
spark_worker_cores=SPARK_WORKER_CORES, spark_worker_count=SPARK_WORKER_COUNT,
docker_registry=DOCKER_REGISTRY, spark_master_image=SPARK_MASTER_IMAGE,
spark_worker_image=SPARK_WORKER_IMAGE, notebook_image=NOTEBOOK_IMAGE)
json.dump(app_dict, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
| 1,905 | 0 | 46 |
d265bb95061be140a2fbade4620ead7c273e06d0 | 6,600 | py | Python | nornir_salt/plugins/processors/SaltEventProcessor.py | dmulyalin/nornir-salt | 184002995515dddc802b578400370c2219e94957 | [
"MIT"
] | 5 | 2021-01-22T09:34:55.000Z | 2021-12-22T08:12:34.000Z | nornir_salt/plugins/processors/SaltEventProcessor.py | dmulyalin/nornir-salt | 184002995515dddc802b578400370c2219e94957 | [
"MIT"
] | 2 | 2022-01-27T14:46:40.000Z | 2022-02-28T16:59:01.000Z | nornir_salt/plugins/processors/SaltEventProcessor.py | dmulyalin/nornir-salt | 184002995515dddc802b578400370c2219e94957 | [
"MIT"
] | 1 | 2021-01-10T04:37:08.000Z | 2021-01-10T04:37:08.000Z | """
SaltEventProcessor Plugin
#########################
Processor plugin to emit events on task execution progress,
used by Nornir Proxy Runner modules to track tasks flow.
SaltEventProcessor does not work outside of SaltStack environment.
SaltEventProcessor reference
============================
.. autofunction:: nornir_salt.plugins.processors.SaltEventProcessor.SaltEventProcessor
"""
import logging
import time
from nornir.core.inventory import Host
from nornir.core.task import AggregatedResult, MultiResult, Task
log = logging.getLogger(__name__)
try:
# starting with salt 3003 need to use loader_context to reconstruct
# __salt__ dunder within treads:
# details: https://github.com/saltstack/salt/issues/59962
try:
from salt.loader_context import loader_context
except ImportError:
# after salt 3004 api was updated - https://github.com/saltstack/salt/pull/60595
from salt.loader.context import loader_context
HAS_LOADER_CONTEXT = True
except ImportError:
HAS_LOADER_CONTEXT = False
class SaltEventProcessor:
"""
SaltEventProcessor can emit event on SaltStack Event bus about task execution progress.
:param __salt__: (obj) __salt__ dunder object
:param loader: (obj) salt loader, required to use __salt__ dunder within threads
for SaltStack version above 3003.
:param loader_context: (obj) salt loader context
:param proxy_id: (str) Proxy Minion ID to form event tags
:param tftr: (str) timestamp formatter string, default is "%d-%b-%Y %H:%M:%S"
:param identity: (dict) task identity dictionary of uuid4, jid, function_name keys
"""
def _emit_event(self, tag, data):
"""
Helper function to emit event on SaltStack Event BUS.
:param tag: (str) event tag string
:param data: (any) event data content
"""
if HAS_LOADER_CONTEXT and self.loader is not None:
with loader_context(self.loader):
self.__salt__["event.send"](tag=tag, data=data)
else:
self.__salt__["event.send"](tag=tag, data=data)
def _timestamp(self):
"""
Helper function to produce event data timestamp.
"""
return time.strftime(self.tftr)
| 35.869565 | 91 | 0.591212 | """
SaltEventProcessor Plugin
#########################
Processor plugin to emit events on task execution progress,
used by Nornir Proxy Runner modules to track tasks flow.
SaltEventProcessor does not work outside of SaltStack environment.
SaltEventProcessor reference
============================
.. autofunction:: nornir_salt.plugins.processors.SaltEventProcessor.SaltEventProcessor
"""
import logging
import time
from nornir.core.inventory import Host
from nornir.core.task import AggregatedResult, MultiResult, Task
log = logging.getLogger(__name__)
try:
# starting with salt 3003 need to use loader_context to reconstruct
# __salt__ dunder within treads:
# details: https://github.com/saltstack/salt/issues/59962
try:
from salt.loader_context import loader_context
except ImportError:
# after salt 3004 api was updated - https://github.com/saltstack/salt/pull/60595
from salt.loader.context import loader_context
HAS_LOADER_CONTEXT = True
except ImportError:
HAS_LOADER_CONTEXT = False
class SaltEventProcessor:
"""
SaltEventProcessor can emit event on SaltStack Event bus about task execution progress.
:param __salt__: (obj) __salt__ dunder object
:param loader: (obj) salt loader, required to use __salt__ dunder within threads
for SaltStack version above 3003.
:param loader_context: (obj) salt loader context
:param proxy_id: (str) Proxy Minion ID to form event tags
:param tftr: (str) timestamp formatter string, default is "%d-%b-%Y %H:%M:%S"
:param identity: (dict) task identity dictionary of uuid4, jid, function_name keys
"""
def __init__(self, __salt__, loader, proxy_id, identity, tftr="%d-%b-%Y %H:%M:%S"):
self.__salt__ = __salt__
self.loader = loader
self.proxy_id = proxy_id
self.tftr = tftr
self.jid = identity["jid"]
self.function = identity["function_name"]
def _emit_event(self, tag, data):
"""
Helper function to emit event on SaltStack Event BUS.
:param tag: (str) event tag string
:param data: (any) event data content
"""
if HAS_LOADER_CONTEXT and self.loader is not None:
with loader_context(self.loader):
self.__salt__["event.send"](tag=tag, data=data)
else:
self.__salt__["event.send"](tag=tag, data=data)
def _timestamp(self):
"""
Helper function to produce event data timestamp.
"""
return time.strftime(self.tftr)
def task_started(self, task: Task) -> None:
tag = "nornir-proxy/{jid}/{proxy_id}/task/started/{task_name}".format(
proxy_id=self.proxy_id, task_name=task.name, jid=self.jid
)
data = {
"timestamp": self._timestamp(),
"task_name": task.name,
"jid": self.jid,
"proxy_id": self.proxy_id,
"task_event": "started",
"task_type": "task",
"hosts": list(task.nornir.inventory.hosts.keys()),
"status": "RUNNING",
"function": self.function,
}
self._emit_event(tag, data)
def task_completed(self, task: Task, result: AggregatedResult) -> None:
tag = "nornir-proxy/{jid}/{proxy_id}/task/completed/{task_name}".format(
proxy_id=self.proxy_id, task_name=task.name, jid=self.jid
)
data = {
"timestamp": self._timestamp(),
"task_name": task.name,
"jid": self.jid,
"proxy_id": self.proxy_id,
"task_event": "completed",
"task_type": "task",
"hosts": list(task.nornir.inventory.hosts.keys()),
"status": "FAILED" if task.results.failed else "PASSED",
"function": self.function,
}
self._emit_event(tag, data)
def task_instance_started(self, task: Task, host: Host) -> None:
tag = "nornir-proxy/{jid}/{proxy_id}/{host}/task/started/{task_name}".format(
proxy_id=self.proxy_id, host=host.name, task_name=task.name, jid=self.jid
)
data = {
"timestamp": self._timestamp(),
"task_name": task.name,
"jid": self.jid,
"host": host.name,
"proxy_id": self.proxy_id,
"task_event": "started",
"task_type": "task_instance",
"status": "RUNNING",
"function": self.function,
}
self._emit_event(tag, data)
def task_instance_completed(
self, task: Task, host: Host, result: MultiResult
) -> None:
tag = "nornir-proxy/{jid}/{proxy_id}/{host}/task/completed/{task_name}".format(
proxy_id=self.proxy_id, host=host.name, task_name=task.name, jid=self.jid
)
data = {
"timestamp": self._timestamp(),
"task_name": task.name,
"jid": self.jid,
"proxy_id": self.proxy_id,
"host": host.name,
"task_event": "completed",
"task_type": "task_instance",
"status": "FAILED" if task.results.failed else "PASSED",
"function": self.function,
}
self._emit_event(tag, data)
def subtask_instance_started(self, task: Task, host: Host) -> None:
tag = "nornir-proxy/{jid}/{proxy_id}/{host}/subtask/started/{task_name}".format(
proxy_id=self.proxy_id, host=host.name, task_name=task.name, jid=self.jid
)
data = {
"timestamp": self._timestamp(),
"task_name": task.name,
"jid": self.jid,
"proxy_id": self.proxy_id,
"host": host.name,
"task_event": "started",
"task_type": "subtask",
"status": "RUNNING",
"function": self.function,
}
self._emit_event(tag, data)
def subtask_instance_completed(
self, task: Task, host: Host, result: MultiResult
) -> None:
tag = "nornir-proxy/{jid}/{proxy_id}/{host}/subtask/completed/{task_name}".format(
proxy_id=self.proxy_id, host=host.name, task_name=task.name, jid=self.jid
)
data = {
"timestamp": self._timestamp(),
"task_name": task.name,
"jid": self.jid,
"proxy_id": self.proxy_id,
"host": host.name,
"task_event": "completed",
"task_type": "subtask",
"status": "FAILED" if task.results.failed else "PASSED",
"function": self.function,
}
self._emit_event(tag, data)
| 4,155 | 0 | 189 |
3716bae826c7763b2b5a647b070f06460590a0f5 | 371 | py | Python | umb/__about__.py | fadhiilrachman/ReverseUMB | 19cab7e50471a8002354d91ebb9a97e3f51f1906 | [
"MIT"
] | 1 | 2019-02-19T11:15:31.000Z | 2019-02-19T11:15:31.000Z | umb/__about__.py | fadhiilrachman/UMBPrivateAPI | 19cab7e50471a8002354d91ebb9a97e3f51f1906 | [
"MIT"
] | null | null | null | umb/__about__.py | fadhiilrachman/UMBPrivateAPI | 19cab7e50471a8002354d91ebb9a97e3f51f1906 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__copyright__ = 'Copyright 2018 by Fadhiil Rachman'
__version__ = '1.0.1'
__license__ = 'MIT'
__author__ = 'Fadhiil Rachman'
__author_email__ = 'fadhiilrachman@gmail.com'
__url__ = 'https://github.com/fadhiilrachman/UMBPrivateAPI'
__all__ = (
'__version__'
) | 28.538462 | 71 | 0.657682 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__copyright__ = 'Copyright 2018 by Fadhiil Rachman'
__version__ = '1.0.1'
__license__ = 'MIT'
__author__ = 'Fadhiil Rachman'
__author_email__ = 'fadhiilrachman@gmail.com'
__url__ = 'https://github.com/fadhiilrachman/UMBPrivateAPI'
__all__ = (
'__version__'
) | 0 | 0 | 0 |
3194320ee57bad0e6e337a87b5b963094566107d | 2,517 | py | Python | util/fuzz/dbcopy.py | umarcor/prjtrellis | 9b3db7ba9a02e7d2f49c52ce062d5b22e320004c | [
"MIT"
] | 93 | 2020-06-26T10:01:31.000Z | 2022-03-30T03:01:23.000Z | util/fuzz/dbcopy.py | umarcor/prjtrellis | 9b3db7ba9a02e7d2f49c52ce062d5b22e320004c | [
"MIT"
] | 29 | 2020-06-26T10:12:03.000Z | 2022-03-31T14:45:51.000Z | util/fuzz/dbcopy.py | umarcor/prjtrellis | 9b3db7ba9a02e7d2f49c52ce062d5b22e320004c | [
"MIT"
] | 20 | 2020-06-26T10:01:33.000Z | 2022-03-18T22:36:39.000Z | import pytrellis
"""
Database copy utilities
This is used where there are several tiles with different types but the same or similar bit databases - such as all the
CIB tiles, some IO tiles, etc.
"""
def dbcopy(family, device, source, dest, copy_muxes=True, copy_words=True, copy_enums=True, copy_conns=True):
"""
Copy the bit database from one tile type to another
:param family: database family
:param device: database device
:param source: tiletype to copy from
:param dest: tiletype to copy to
:param copy_muxes: include muxes in copy
:param copy_words: include settings words in copy
:param copy_enums: include settings enums in copy
:param copy_conns: include fixed connections in copy
"""
srcdb = pytrellis.get_tile_bitdata(
pytrellis.TileLocator(family, device, source))
dstdb = pytrellis.get_tile_bitdata(
pytrellis.TileLocator(family, device, dest))
if copy_muxes:
sinks = srcdb.get_sinks()
for sink in sinks:
mux = srcdb.get_mux_data_for_sink(sink)
for src in mux.get_sources():
dstdb.add_mux_arc(mux.arcs[src])
if copy_words:
cwords = srcdb.get_settings_words()
for cword in cwords:
wd = srcdb.get_data_for_setword(cword)
dstdb.add_setting_word(wd)
if copy_enums:
cenums = srcdb.get_settings_enums()
for cenum in cenums:
ed = srcdb.get_data_for_enum(cenum)
dstdb.add_setting_enum(ed)
if copy_conns:
fcs = srcdb.get_fixed_conns()
for conn in fcs:
dstdb.add_fixed_conn(conn)
| 32.269231 | 119 | 0.67064 | import pytrellis
"""
Database copy utilities
This is used where there are several tiles with different types but the same or similar bit databases - such as all the
CIB tiles, some IO tiles, etc.
"""
def dbcopy(family, device, source, dest, copy_muxes=True, copy_words=True, copy_enums=True, copy_conns=True):
"""
Copy the bit database from one tile type to another
:param family: database family
:param device: database device
:param source: tiletype to copy from
:param dest: tiletype to copy to
:param copy_muxes: include muxes in copy
:param copy_words: include settings words in copy
:param copy_enums: include settings enums in copy
:param copy_conns: include fixed connections in copy
"""
srcdb = pytrellis.get_tile_bitdata(
pytrellis.TileLocator(family, device, source))
dstdb = pytrellis.get_tile_bitdata(
pytrellis.TileLocator(family, device, dest))
if copy_muxes:
sinks = srcdb.get_sinks()
for sink in sinks:
mux = srcdb.get_mux_data_for_sink(sink)
for src in mux.get_sources():
dstdb.add_mux_arc(mux.arcs[src])
if copy_words:
cwords = srcdb.get_settings_words()
for cword in cwords:
wd = srcdb.get_data_for_setword(cword)
dstdb.add_setting_word(wd)
if copy_enums:
cenums = srcdb.get_settings_enums()
for cenum in cenums:
ed = srcdb.get_data_for_enum(cenum)
dstdb.add_setting_enum(ed)
if copy_conns:
fcs = srcdb.get_fixed_conns()
for conn in fcs:
dstdb.add_fixed_conn(conn)
def copy_muxes_with_predicate(family, device, source, dest, predicate):
srcdb = pytrellis.get_tile_bitdata(
pytrellis.TileLocator(family, device, source))
dstdb = pytrellis.get_tile_bitdata(
pytrellis.TileLocator(family, device, dest))
sinks = srcdb.get_sinks()
for sink in sinks:
mux = srcdb.get_mux_data_for_sink(sink)
for src in mux.get_sources():
if predicate((src, sink)):
dstdb.add_mux_arc(mux.arcs[src])
def copy_conns_with_predicate(family, device, source, dest, predicate):
srcdb = pytrellis.get_tile_bitdata(
pytrellis.TileLocator(family, device, source))
dstdb = pytrellis.get_tile_bitdata(
pytrellis.TileLocator(family, device, dest))
fcs = srcdb.get_fixed_conns()
for conn in fcs:
if predicate(conn):
dstdb.add_fixed_conn(conn)
| 827 | 0 | 46 |
284a914f4a42ea03047f7887872d097a591d2a0d | 3,035 | py | Python | plotsky.py | hagabbar/VItamin | c1ae6dfa27b8ab77193caacddd477fde0dece1c2 | [
"MIT"
] | 13 | 2019-06-26T01:51:54.000Z | 2021-12-15T07:01:28.000Z | plotsky.py | hagabbar/VItamin | c1ae6dfa27b8ab77193caacddd477fde0dece1c2 | [
"MIT"
] | 12 | 2020-02-19T13:32:07.000Z | 2022-03-12T00:00:52.000Z | plotsky.py | hagabbar/VItamin | c1ae6dfa27b8ab77193caacddd477fde0dece1c2 | [
"MIT"
] | 5 | 2019-10-23T01:23:56.000Z | 2022-03-01T14:12:24.000Z | import numpy as np
from ligo.skymap import kde
import matplotlib
matplotlib.use('Agg')
from matplotlib.colors import to_rgb
from matplotlib import pyplot as plt
from mpl_toolkits.basemap import Basemap
#matplotlib.rc('text', usetex=True)
| 39.934211 | 154 | 0.61944 | import numpy as np
from ligo.skymap import kde
import matplotlib
matplotlib.use('Agg')
from matplotlib.colors import to_rgb
from matplotlib import pyplot as plt
from mpl_toolkits.basemap import Basemap
#matplotlib.rc('text', usetex=True)
def greedy(density):
i,j = np.shape(density)
idx = np.argsort(density.flatten())[::-1]
c = np.cumsum(density.flatten()[idx])
c = c/c[-1]
np.append(c,1.0)
p = np.zeros(i*j)
p[idx] = c[:]
return p.reshape(i,j)
def plot_sky(pts,contour=True,filled=False,ax=None,trueloc=None,cmap='Reds',col='red'):
cls = kde.Clustered2DSkyKDE
pts[:,0] = pts[:,0] - np.pi
skypost = cls(pts, trials=5, jobs=8)
# make up some data on a regular lat/lon grid.
# nlats = 145; nlons = 291; delta = 2.*np.pi/(nlons-1)
nlats = 145; nlons = 291; delta = 2.*np.pi/(nlons-1)
lats = (0.5*np.pi-delta*np.indices((nlats,nlons))[0,:,:])
# lons = (delta*np.indices((nlats,nlons))[1,:,:])
lons = (delta*np.indices((nlats,nlons))[1,:,:]-np.pi)
locs = np.column_stack((lons.flatten(),lats.flatten()))
prob = skypost(locs).reshape(nlats,nlons)
p1 = greedy(prob)
# compute mean location of samples
nx = np.cos(pts[:,1])*np.cos(pts[:,0])
ny = np.cos(pts[:,1])*np.sin(pts[:,0])
nz = np.sin(pts[:,1])
mean_n = [np.mean(nx),np.mean(ny),np.mean(nz)]
# bestloc = [np.remainder(np.arctan2(mean_n[1],mean_n[0]),2.0*np.pi),np.arctan2(mean_n[2],np.sqrt(mean_n[0]**2 + mean_n[1]**2))]
bestloc = [trueloc[0],trueloc[1]]
if ax is None:
# map = Basemap(projection='ortho',lon_0=-bestloc[0]*180/np.pi,lat_0=bestloc[1]*180/np.pi,resolution=None,celestial=True)
map = Basemap(projection='moll',lon_0=0,resolution=None,celestial=True)
map.drawmapboundary(fill_color='white')
# draw lat/lon grid lines every 30 degrees.
# map.drawmeridians(np.arange(0,360,30))
meridian = ["-180","-150","-120","-90","-60","-30","0","30","+60","+90","+120","+150"]
map.drawmeridians(np.arange(-180,180,30),labels=[1,1,1,1])
for i in np.arange(len(meridian)):
plt.annotate(r"$\textrm{%s}$" % meridian[i] + u"\u00b0",xy=map(np.arange(-180,180,30)[i],0),xycoords='data')
map.drawparallels(np.arange(-90,90,30),labels=[1,0,0,0])
else:
map = ax
# compute native map projection coordinates of lat/lon grid.
# x, y = map(lons*180./np.pi, lats*180./np.pi)
x, y = map(lons*180./np.pi, lats*180./np.pi)
# contour data over the map.
if filled:
base_color = np.array(to_rgb(col))
opp_color = 1.0 - base_color
cs1 = map.contourf(x,y,1.0-p1,levels=[0.0,0.1,0.5,1.0],colors=[base_color+opp_color,base_color+0.8*opp_color,base_color+0.6*opp_color,base_color])
cs2 = map.contour(x,y,p1,levels=[0.5,0.9],linewidths=2.0,colors=col)
if trueloc is not None:
xx, yy = map((trueloc[0]*180./np.pi)-180.0, trueloc[1]*180./np.pi)
map.plot(xx,yy,marker='+',markersize=20,linewidth=5,color='black')
return map
| 2,747 | 0 | 46 |
8c7e4cea5e9a1f974ea00d3262c69bbdf4f0e3cb | 468 | py | Python | Curso Udemy 2022/Curso_Luiz_Otavio/77_aula_count.py | Matheusfarmaceutico/Exercicios-Python | d1821bd9d11ea0707074c5fe11dead2e85476ebd | [
"MIT"
] | null | null | null | Curso Udemy 2022/Curso_Luiz_Otavio/77_aula_count.py | Matheusfarmaceutico/Exercicios-Python | d1821bd9d11ea0707074c5fe11dead2e85476ebd | [
"MIT"
] | null | null | null | Curso Udemy 2022/Curso_Luiz_Otavio/77_aula_count.py | Matheusfarmaceutico/Exercicios-Python | d1821bd9d11ea0707074c5fe11dead2e85476ebd | [
"MIT"
] | null | null | null | """count - itertools"""
# Apresentação do count
from itertools import count
# aceita número de ponto flutuante como step, mas n aceita um limite.
contador = count(start=5, step=0.05)
for v in contador:
print(round(v, 2)) # arredonda em duas casas decimais
if v > 10:
break
print(separador())
contador = count()
nomes = ['Matheus','Júlia','Rafaela']
nomes = zip(contador,nomes)
for v in nomes:
print(v[0], v[1]) | 26 | 69 | 0.668803 | def separador():
return "-="*30
"""count - itertools"""
# Apresentação do count
from itertools import count
# aceita número de ponto flutuante como step, mas n aceita um limite.
contador = count(start=5, step=0.05)
for v in contador:
print(round(v, 2)) # arredonda em duas casas decimais
if v > 10:
break
print(separador())
contador = count()
nomes = ['Matheus','Júlia','Rafaela']
nomes = zip(contador,nomes)
for v in nomes:
print(v[0], v[1]) | 14 | 0 | 22 |
15d1c16a4bd6e9bc254169bd4f69686b3112f911 | 6,671 | py | Python | mongodb_consistent_backup/Sharding.py | cprato79/mongodb_consistent_backup | d780ad545b603d3a2f807e1813f1de407e81f1ba | [
"Apache-2.0"
] | 1 | 2020-10-20T06:01:21.000Z | 2020-10-20T06:01:21.000Z | mongodb_consistent_backup/Sharding.py | cprato79/mongodb_consistent_backup | d780ad545b603d3a2f807e1813f1de407e81f1ba | [
"Apache-2.0"
] | null | null | null | mongodb_consistent_backup/Sharding.py | cprato79/mongodb_consistent_backup | d780ad545b603d3a2f807e1813f1de407e81f1ba | [
"Apache-2.0"
] | 1 | 2020-10-20T06:01:25.000Z | 2020-10-20T06:01:25.000Z | import logging
from time import sleep
from mongodb_consistent_backup.Common import DB, MongoUri, validate_hostname
from mongodb_consistent_backup.Errors import DBOperationError, Error, OperationError
from mongodb_consistent_backup.Replication import Replset
| 41.69375 | 106 | 0.575326 | import logging
from time import sleep
from mongodb_consistent_backup.Common import DB, MongoUri, validate_hostname
from mongodb_consistent_backup.Errors import DBOperationError, Error, OperationError
from mongodb_consistent_backup.Replication import Replset
class Sharding:
def __init__(self, config, timer, db):
self.config = config
self.timer = timer
self.db = db
self.balancer_wait_secs = self.config.sharding.balancer.wait_secs
self.balancer_sleep = self.config.sharding.balancer.ping_secs
self.timer_name = self.__class__.__name__
self.config_server = None
self.config_db = None
self._balancer_state_start = None
self.restored = False
# Get a DB connection
try:
if isinstance(self.db, DB):
self.connection = self.db.connection()
if not self.db.is_mongos() and not self.db.is_configsvr():
raise DBOperationError('MongoDB connection is not to a mongos or configsvr!')
else:
raise Error("'db' field is not an instance of class: 'DB'!")
except Exception, e:
logging.fatal("Could not get DB connection! Error: %s" % e)
raise DBOperationError(e)
def close(self):
if self.config_db:
self.config_db.close()
return self.restore_balancer_state()
def get_start_state(self):
self._balancer_state_start = self.get_balancer_state()
logging.info("Began with balancer state running: %s" % str(self._balancer_state_start))
return self._balancer_state_start
def shards(self):
try:
if self.db.is_configsvr() and self.db.server_version() < tuple("3.4.0".split(".")):
return self.connection['config'].shards.find()
else:
listShards = self.db.admin_command("listShards")
if 'shards' in listShards:
return listShards['shards']
except Exception, e:
raise DBOperationError(e)
def check_balancer_running(self):
try:
config = self.connection['config']
lock = config['locks'].find_one({'_id': 'balancer'})
if 'state' in lock and int(lock['state']) == 0:
return False
return True
except Exception, e:
raise DBOperationError(e)
def get_balancer_state(self):
try:
config = self.connection['config']
state = config['settings'].find_one({'_id': 'balancer'})
if not state:
return True
elif 'stopped' in state and state.get('stopped') is True:
return False
else:
return True
except Exception, e:
raise DBOperationError(e)
def set_balancer(self, value):
try:
if value is True:
set_value = False
elif value is False:
set_value = True
else:
set_value = True
config = self.connection['config']
config['settings'].update_one({'_id': 'balancer'}, {'$set': {'stopped': set_value}})
except Exception, e:
logging.fatal("Failed to set balancer state! Error: %s" % e)
raise DBOperationError(e)
def restore_balancer_state(self):
if self._balancer_state_start is not None and not self.restored:
try:
logging.info("Restoring balancer state to: %s" % str(self._balancer_state_start))
self.set_balancer(self._balancer_state_start)
self.restored = True
except Exception, e:
logging.fatal("Failed to set balancer state! Error: %s" % e)
raise DBOperationError(e)
def stop_balancer(self):
logging.info("Stopping the balancer and waiting a max of %i sec" % self.balancer_wait_secs)
wait_cnt = 0
self.timer.start(self.timer_name)
self.set_balancer(False)
while wait_cnt < self.balancer_wait_secs:
if self.check_balancer_running():
wait_cnt += self.balancer_sleep
logging.info("Balancer is still running, sleeping for %i sec(s)" % self.balancer_sleep)
sleep(self.balancer_sleep)
else:
self.timer.stop(self.timer_name)
logging.info("Balancer stopped after %.2f seconds" % self.timer.duration(self.timer_name))
return
logging.fatal("Could not stop balancer %s: %s!" % (self.db.uri, e))
raise DBOperationError("Could not stop balancer %s: %s" % (self.db.uri, e))
def get_configdb_hosts(self):
try:
cmdlineopts = self.db.admin_command("getCmdLineOpts")
config_string = None
if cmdlineopts.get('parsed').get('configdb'):
config_string = cmdlineopts.get('parsed').get('configdb')
elif cmdlineopts.get('parsed').get('sharding').get('configDB'):
config_string = cmdlineopts.get('parsed').get('sharding').get('configDB')
if config_string:
return MongoUri(config_string, 27019)
elif self.db.is_configsvr():
return self.db.uri
else:
logging.fatal("Unable to locate config servers for %s!" % self.db.uri)
raise OperationError("Unable to locate config servers for %s!" % self.db.uri)
except Exception, e:
raise OperationError(e)
def get_config_server(self, force=False):
if force or not self.config_server:
configdb_uri = self.get_configdb_hosts()
try:
logging.info("Found sharding config server: %s" % configdb_uri)
if self.db.uri.hosts() == configdb_uri.hosts():
self.config_db = self.db
logging.debug("Re-using seed connection to config server(s)")
else:
self.config_db = DB(configdb_uri, self.config, True)
if self.config_db.is_replset():
self.config_server = Replset(self.config, self.config_db)
else:
self.config_server = { 'host': configdb_uri.hosts() }
self.config_db.close()
except Exception, e:
logging.fatal("Unable to locate config servers using %s: %s!" % (self.db.uri, e))
raise OperationError(e)
return self.config_server
| 6,097 | -6 | 319 |
87032d2863af5326c2d522426973813601f385f1 | 2,817 | py | Python | turbo_palm_tree/utility/parse_subreddit_list.py | jtara1/turbo_palm_tree | 96ebe40aa176f1bb4acd036be8581666aa47ca17 | [
"Apache-2.0"
] | 4 | 2016-10-16T21:39:47.000Z | 2019-11-12T05:56:17.000Z | turbo_palm_tree/utility/parse_subreddit_list.py | jtara1/turbo_palm_tree | 96ebe40aa176f1bb4acd036be8581666aa47ca17 | [
"Apache-2.0"
] | 17 | 2017-12-31T08:43:09.000Z | 2018-06-12T19:24:40.000Z | turbo_palm_tree/utility/parse_subreddit_list.py | jtara1/turbo_palm_tree | 96ebe40aa176f1bb4acd036be8581666aa47ca17 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Aug 30 15:26:13 2016
@author: jtara1
General syntax for subreddits.txt:
: (colon character) denotes folder name
subreddit url or word denotes subreddit
For more examples see https://github.com/jtara1/RedditImageGrab/commit/8e4787ef9ac43ca694fc663be026f69a568bb622
Example of expected input and output:
subreddits.txt = "
pc-wallpapers:
https://www.reddit.com/r/wallpapers/
/r/BackgroundArt/
nature_pics:
http://www.reddit.com/r/EarthPorn/
:
Mountain
"
parse_subreddit_list('/MyPath/subreddits.txt', '/MyPath/') = [
('wallpapers', '/MyPath/pc-wallpaper/wallpapers'),
('BackgroundArt', '/MyPath/pc-wallpaper/BackgroundArt'),
('EarthPorn', '/MyPath/nature-pics/EarthPorn'),
('Mountain', '/MyPath/Mountain')
]
"""
import re
import os
from os import getcwd, mkdir
from .general_utility import get_subreddit_name
def parse_subreddit_list(file_path, base_path=getcwd()):
"""Gets list of subreddits from a file & returns folder for media from each subreddit
:param file_path: path of text file to load subreddits from (relative or full path)
:param base_path: base path that gets returned with each subreddit
:return: list containing tuples of subreddit & its associated folder to get media saved to
:rtype: list
"""
try:
file = open(file_path, 'r')
except IOError as e:
print(e)
raise IOError
output = []
folder_regex = re.compile('([a-zA-Z0-9_\- ]*):\n')
subreddit_regex = re.compile('(?:https?://)?(?:www.)?reddit.com/r/([a-zA-Z0-9_]*)')
subreddit_regex2 = re.compile('(?:/r/)?([a-zA-Z0-9_]*)')
if not os.path.isdir(base_path):
mkdir(base_path)
# iterate through the lines using regex to check if line is subreddit or folder title
path = base_path
for line in file:
if line == '\n':
continue
folder_match = re.match(folder_regex, line)
if folder_match:
if folder_match.group(1) != '':
path = os.path.join(base_path, line[:-2])
if not os.path.isdir(path):
mkdir(path)
else:
path = base_path
continue
subreddit_match = re.match(subreddit_regex, line)
if not subreddit_match:
subreddit_match = re.match(subreddit_regex2, line)
if not subreddit_match:
print('No match at position %s' % file.tell() )
print('parse_subreddit_list Error: No match found, skipping this iteration.')
continue
subreddit = get_subreddit_name(subreddit_match.group(1))
final_path = os.path.join(path, subreddit)
if not os.path.isdir(final_path):
mkdir(final_path)
output.append((subreddit, final_path))
return output
| 29.968085 | 111 | 0.648562 | # -*- coding: utf-8 -*-
"""
Created on Tue Aug 30 15:26:13 2016
@author: jtara1
General syntax for subreddits.txt:
: (colon character) denotes folder name
subreddit url or word denotes subreddit
For more examples see https://github.com/jtara1/RedditImageGrab/commit/8e4787ef9ac43ca694fc663be026f69a568bb622
Example of expected input and output:
subreddits.txt = "
pc-wallpapers:
https://www.reddit.com/r/wallpapers/
/r/BackgroundArt/
nature_pics:
http://www.reddit.com/r/EarthPorn/
:
Mountain
"
parse_subreddit_list('/MyPath/subreddits.txt', '/MyPath/') = [
('wallpapers', '/MyPath/pc-wallpaper/wallpapers'),
('BackgroundArt', '/MyPath/pc-wallpaper/BackgroundArt'),
('EarthPorn', '/MyPath/nature-pics/EarthPorn'),
('Mountain', '/MyPath/Mountain')
]
"""
import re
import os
from os import getcwd, mkdir
from .general_utility import get_subreddit_name
def parse_subreddit_list(file_path, base_path=getcwd()):
"""Gets list of subreddits from a file & returns folder for media from each subreddit
:param file_path: path of text file to load subreddits from (relative or full path)
:param base_path: base path that gets returned with each subreddit
:return: list containing tuples of subreddit & its associated folder to get media saved to
:rtype: list
"""
try:
file = open(file_path, 'r')
except IOError as e:
print(e)
raise IOError
output = []
folder_regex = re.compile('([a-zA-Z0-9_\- ]*):\n')
subreddit_regex = re.compile('(?:https?://)?(?:www.)?reddit.com/r/([a-zA-Z0-9_]*)')
subreddit_regex2 = re.compile('(?:/r/)?([a-zA-Z0-9_]*)')
if not os.path.isdir(base_path):
mkdir(base_path)
# iterate through the lines using regex to check if line is subreddit or folder title
path = base_path
for line in file:
if line == '\n':
continue
folder_match = re.match(folder_regex, line)
if folder_match:
if folder_match.group(1) != '':
path = os.path.join(base_path, line[:-2])
if not os.path.isdir(path):
mkdir(path)
else:
path = base_path
continue
subreddit_match = re.match(subreddit_regex, line)
if not subreddit_match:
subreddit_match = re.match(subreddit_regex2, line)
if not subreddit_match:
print('No match at position %s' % file.tell() )
print('parse_subreddit_list Error: No match found, skipping this iteration.')
continue
subreddit = get_subreddit_name(subreddit_match.group(1))
final_path = os.path.join(path, subreddit)
if not os.path.isdir(final_path):
mkdir(final_path)
output.append((subreddit, final_path))
return output
| 0 | 0 | 0 |
73788cb6f0857ad4d9db4996d005b5a40c26d508 | 2,609 | py | Python | jmm/scripts/entry_point.py | zqmillet/japanese_media_manager | 7f7c9ba9f48e67c5f68f80b6fe09675aded05858 | [
"MIT"
] | null | null | null | jmm/scripts/entry_point.py | zqmillet/japanese_media_manager | 7f7c9ba9f48e67c5f68f80b6fe09675aded05858 | [
"MIT"
] | null | null | null | jmm/scripts/entry_point.py | zqmillet/japanese_media_manager | 7f7c9ba9f48e67c5f68f80b6fe09675aded05858 | [
"MIT"
] | null | null | null | from argparse import ArgumentParser
from jmm.scripts.generate_configuration import generate_configuration
from jmm.scripts.scrape import scrape
from jmm.scripts.valid_configuration import valid_configuration
from jmm.scripts.show_version import show_version
from jmm.scripts import command as COMMAND
if __name__ == '__main__':
main() # pragma: no cover
| 33.448718 | 142 | 0.684553 | from argparse import ArgumentParser
from jmm.scripts.generate_configuration import generate_configuration
from jmm.scripts.scrape import scrape
from jmm.scripts.valid_configuration import valid_configuration
from jmm.scripts.show_version import show_version
from jmm.scripts import command as COMMAND
def main() -> None:
argument_parser = ArgumentParser(
prog='jmm',
description='collect, check and complete your personal adult videos',
)
subparsers = argument_parser.add_subparsers(dest='command')
generate_configuration_parser = subparsers.add_parser(
name=COMMAND.GENERATE_CONFIG,
help='generate custom configuration file'
)
test_config_parser = subparsers.add_parser(
name=COMMAND.VALID_CONFIG,
help='valid custom configuration'
)
scrape_parser = subparsers.add_parser(
name=COMMAND.SCRAPE,
help='scrape metadata of media and manage them'
)
_ = subparsers.add_parser(
name=COMMAND.SHOW_VERSION,
help='show jmm version'
)
generate_configuration_parser.add_argument(
'-f', '--force',
action='store_true',
help='if specify this argument, the custom configuration file will be overwritten forcely'
)
scrape_parser.add_argument(
'-i', '--input-directories',
action='store',
type=str,
nargs='*',
default=[],
help='specify directories which contain media, if this argument is not specified, scraper will read it from config file.'
)
scrape_parser.add_argument(
'-o', '--output-file-path_pattern',
action='store',
type=str,
help='specify the output file path pattern',
default=None
)
test_config_parser.add_argument(
'-n', '--numbers',
type=str,
nargs='+',
help='specify the numbers of media for testing config'
)
arguments = argument_parser.parse_args()
if arguments.command == COMMAND.GENERATE_CONFIG:
generate_configuration(force=arguments.force) # pragma: no cover
elif arguments.command == COMMAND.SCRAPE:
scrape(input_directories=arguments.input_directories, output_file_path_pattern=arguments.output_file_path_pattern) # pragma: no cover
elif arguments.command == COMMAND.VALID_CONFIG:
valid_configuration(numbers=arguments.numbers) # pragma: no cover
elif arguments.command == COMMAND.SHOW_VERSION:
show_version() # pragma: no cover
else:
argument_parser.print_usage()
if __name__ == '__main__':
main() # pragma: no cover
| 2,225 | 0 | 23 |
150ff69ccf3f48a574d3df803edbed10977201dd | 3,168 | py | Python | test/v4.1/06_rat_to_rel_test.py | gavinIRL/RHBot | 1e22ae5ca7b67ebd6a72c23d9f46d5a8eb6e99cf | [
"MIT"
] | null | null | null | test/v4.1/06_rat_to_rel_test.py | gavinIRL/RHBot | 1e22ae5ca7b67ebd6a72c23d9f46d5a8eb6e99cf | [
"MIT"
] | 60 | 2021-03-29T14:29:49.000Z | 2021-05-03T06:06:19.000Z | test/v4/06_rat_to_rel_test.py | gavinIRL/RHBot | 1e22ae5ca7b67ebd6a72c23d9f46d5a8eb6e99cf | [
"MIT"
] | null | null | null | # This file is for testing and verifying the ratio-to-true calculation
# It will be done in real-time rather than as part of test 5
# As it will be much quicker to debug for #54
import pyautogui
import sys
import time
from windowcapture import WindowCapture
import os
print('Press Ctrl-C to quit.')
if __name__ == "__main__":
ct = ConvertTest()
ct.start()
| 39.111111 | 78 | 0.57197 | # This file is for testing and verifying the ratio-to-true calculation
# It will be done in real-time rather than as part of test 5
# As it will be much quicker to debug for #54
import pyautogui
import sys
import time
from windowcapture import WindowCapture
import os
print('Press Ctrl-C to quit.')
class ConvertTest():
def __init__(self) -> None:
os.chdir(os.path.dirname(os.path.abspath(__file__)))
with open("gamename.txt") as f:
gamename = f.readline()
self.game_wincap = WindowCapture(gamename)
# self.rely = 0
def convert_click_to_ratio(self, truex, truey):
# This will grab the current rectangle coords of game window
# and then turn the click values into a ratio of positions
# versus the game window
self.game_wincap.update_window_position(border=False)
# Turn the screen pos into window pos
relx = truex - self.game_wincap.window_rect[0] * 1.5
rely = truey - self.game_wincap.window_rect[1] * 1.5
print("relx={}, rely={}".format(relx, rely))
print("winx={}, winy={}".format(
self.game_wincap.window_rect[0], self.game_wincap.window_rect[1]))
print("winwidth={}".format(self.game_wincap.w))
# Then convert to a ratio
ratx = relx/(self.game_wincap.w*1.5)
raty = rely/(self.game_wincap.h*1.5)
return ratx, raty
def convert_ratio_to_click(self, ratx, raty):
# This will grab the current rectangle coords of game window
# and then turn the ratio of positions versus the game window
# into true x,y coords
self.game_wincap.update_window_position(border=False)
# Turn the ratios into relative
relx = int(ratx * self.game_wincap.w)
rely = int(raty * self.game_wincap.h)
# Turn the relative into true
truex = int((relx + self.game_wincap.window_rect[0]) * 1.5)
truey = int((rely + self.game_wincap.window_rect[1]) * 1.5)
return truex, truey
def start(self):
try:
while True:
x, y = pyautogui.position()
ratx, raty = self.convert_click_to_ratio(x, y)
ratx = "{:.4f}".format(ratx)
raty = "{:.4f}".format(raty)
convx, convy = self.convert_ratio_to_click(
float(ratx), float(raty))
positionStr = 'X: ' + \
str(x).rjust(4) + ' Y: ' + str(y).rjust(4)
positionStr += ' ratX: ' + \
str(ratx).rjust(4) + ' ratY: ' + str(raty).rjust(4)
# positionStr += ' convX: ' + \
# str(convx).rjust(4) + ' convY: ' + str(convy).rjust(4)
# positionStr += ' relY: ' + \
# str(self.rely).rjust(4) + ' winY: ' + \
# str(self.game_wincap.window_rect[1]).rjust(4)
print(positionStr, end='')
print('\b' * len(positionStr), end='', flush=True)
time.sleep(0.5)
except KeyboardInterrupt:
print('\n')
if __name__ == "__main__":
ct = ConvertTest()
ct.start()
| 2,670 | -1 | 131 |
f30c69b7f172c3e2a035ed0d520b8882438ecaf2 | 1,426 | py | Python | python/pyclaw/evolve/rp/rp_advection.py | rypjones/D-Claw | 6eed8cc5270bf53768e25fa20fa5259cb3c1532e | [
"BSD-3-Clause"
] | 7 | 2016-11-13T03:11:51.000Z | 2021-09-07T18:59:48.000Z | python/pyclaw/evolve/rp/rp_advection.py | rypjones/D-Claw | 6eed8cc5270bf53768e25fa20fa5259cb3c1532e | [
"BSD-3-Clause"
] | 11 | 2020-01-14T18:00:37.000Z | 2022-03-29T14:25:24.000Z | python/pyclaw/evolve/rp/rp_advection.py | rypjones/D-Claw | 6eed8cc5270bf53768e25fa20fa5259cb3c1532e | [
"BSD-3-Clause"
] | 6 | 2020-01-14T17:15:42.000Z | 2021-12-03T17:28:44.000Z | #!/usr/bin/env python
# encoding: utf-8
r"""
Simple advection Riemann solvers
Basic advection Riemann solvers of the form (1d)
.. math::
q_t + A q_x = 0.
:Authors:
Kyle T. Mandli (2008-2-20): Initial version
"""
# ============================================================================
# Copyright (C) 2008 Kyle T. Mandli <mandli@amath.washington.edu>
#
# Distributed under the terms of the Berkeley Software Distribution (BSD)
# license
# http://www.opensource.org/licenses/
# ============================================================================
import numpy as np
def rp_advection_1d(q_l, q_r, aux_l, aux_r, aux_global):
r"""Basic 1d advection riemann solver
*aux_global* should contain -
- *u* - (float) Determines advection speed
See :ref:`pyclaw_rp` for more details.
:Version: 1.0 (2008-2-20)
"""
# Riemann solver constants
meqn = 1
mwaves = 1
# Number of Riemann problems we are solving
nrp = q_l.shape[0]
# Return values
wave = np.empty((nrp, meqn, mwaves))
s = np.empty((nrp, mwaves))
amdq = np.zeros((nrp, meqn))
apdq = np.zeros((nrp, meqn))
wave[:, 0, 0] = q_r[:, 0] - q_l[:, 0]
s[:, 0] = aux_global["u"]
if aux_global["u"] > 0:
apdq[:, 0] = s[:, 0] * wave[:, 0, 0]
else:
amdq[:, 0] = s[:, 0] * wave[:, 0, 0]
return wave, s, amdq, apdq
| 25.017544 | 78 | 0.517532 | #!/usr/bin/env python
# encoding: utf-8
r"""
Simple advection Riemann solvers
Basic advection Riemann solvers of the form (1d)
.. math::
q_t + A q_x = 0.
:Authors:
Kyle T. Mandli (2008-2-20): Initial version
"""
# ============================================================================
# Copyright (C) 2008 Kyle T. Mandli <mandli@amath.washington.edu>
#
# Distributed under the terms of the Berkeley Software Distribution (BSD)
# license
# http://www.opensource.org/licenses/
# ============================================================================
import numpy as np
def rp_advection_1d(q_l, q_r, aux_l, aux_r, aux_global):
r"""Basic 1d advection riemann solver
*aux_global* should contain -
- *u* - (float) Determines advection speed
See :ref:`pyclaw_rp` for more details.
:Version: 1.0 (2008-2-20)
"""
# Riemann solver constants
meqn = 1
mwaves = 1
# Number of Riemann problems we are solving
nrp = q_l.shape[0]
# Return values
wave = np.empty((nrp, meqn, mwaves))
s = np.empty((nrp, mwaves))
amdq = np.zeros((nrp, meqn))
apdq = np.zeros((nrp, meqn))
wave[:, 0, 0] = q_r[:, 0] - q_l[:, 0]
s[:, 0] = aux_global["u"]
if aux_global["u"] > 0:
apdq[:, 0] = s[:, 0] * wave[:, 0, 0]
else:
amdq[:, 0] = s[:, 0] * wave[:, 0, 0]
return wave, s, amdq, apdq
| 0 | 0 | 0 |
49ee7d7222697c5b890a55e3050851794cb913ba | 410 | py | Python | bezman_shop/accounts/migrations/0002_customer_phone.py | baiztheprogrammer/bezman | 2be9415fa5a6c74ec4922dc5d898f8fb17927be5 | [
"MIT"
] | null | null | null | bezman_shop/accounts/migrations/0002_customer_phone.py | baiztheprogrammer/bezman | 2be9415fa5a6c74ec4922dc5d898f8fb17927be5 | [
"MIT"
] | null | null | null | bezman_shop/accounts/migrations/0002_customer_phone.py | baiztheprogrammer/bezman | 2be9415fa5a6c74ec4922dc5d898f8fb17927be5 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.3 on 2020-12-02 06:26
from django.db import migrations, models
| 20.5 | 49 | 0.585366 | # Generated by Django 3.1.3 on 2020-12-02 06:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='customer',
name='phone',
field=models.IntegerField(default=1),
preserve_default=False,
),
]
| 0 | 296 | 23 |
e40b6cb20eca84551c1c82862d4948b634c023ca | 147 | py | Python | citylocations/views.py | minaelee/django_cityloc_pkg_minaelee | 24aac64007cfd64f5a464005986b5b2bb1c7d3c0 | [
"MIT"
] | 2 | 2022-01-19T02:33:11.000Z | 2022-01-19T02:33:13.000Z | citylocations/views.py | minaelee/django_cityloc_pkg_minaelee | 24aac64007cfd64f5a464005986b5b2bb1c7d3c0 | [
"MIT"
] | null | null | null | citylocations/views.py | minaelee/django_cityloc_pkg_minaelee | 24aac64007cfd64f5a464005986b5b2bb1c7d3c0 | [
"MIT"
] | 1 | 2022-03-04T12:47:45.000Z | 2022-03-04T12:47:45.000Z | from django.shortcuts import render
# Create your views here.
| 21 | 60 | 0.748299 | from django.shortcuts import render
# Create your views here.
def loc_nyc(request):
return render(request, 'citylocations/loc_nyc.html', {})
| 61 | 0 | 22 |
40b3d7567dcaf95a2cec3ebf7551fe018259b5fa | 10,289 | py | Python | aim_va/train.py | sisl/neat | 42758d910f453686366eddfd1aed440e34c94828 | [
"MIT"
] | 183 | 2021-08-18T13:22:37.000Z | 2022-03-31T08:40:48.000Z | aim_va/train.py | sisl/neat | 42758d910f453686366eddfd1aed440e34c94828 | [
"MIT"
] | 10 | 2021-09-24T15:30:06.000Z | 2022-03-25T11:19:23.000Z | aim_va/train.py | sisl/neat | 42758d910f453686366eddfd1aed440e34c94828 | [
"MIT"
] | 21 | 2021-09-11T13:32:54.000Z | 2022-03-23T16:55:53.000Z | import argparse
import json
import os
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import DataLoader
import torch.nn.functional as F
torch.backends.cudnn.benchmark = True
from model import MultiTaskImageNetwork
from data import CARLA_Data
from class_converter import sub_classes
parser = argparse.ArgumentParser()
parser.add_argument('--id', type=str, default='aim_vis_abs', help='Unique experiment identifier.')
parser.add_argument('--device', type=str, default='cuda', help='Device to use')
parser.add_argument('--epochs', type=int, default=100, help='Number of train epochs.')
parser.add_argument('--lr', type=float, default=1e-4, help='Learning rate.')
parser.add_argument('--val_every', type=int, default=5, help='Validation frequency (epochs).')
parser.add_argument('--batch_size', type=int, default=24, help='Batch size')
parser.add_argument('--ignore_sides', action='store_true', help='Ignores side cameras')
parser.add_argument('--ignore_rear', action='store_true', help='Ignores rear camera')
parser.add_argument('--classes', type=str, default='no_stop')
parser.add_argument('--seq_len', type=int, default=1, help='Input sequence length (factor of 10)')
parser.add_argument('--pred_len', type=int, default=4, help='number of timesteps to predict')
parser.add_argument('--logdir', type=str, default='log', help='Directory to log data to.')
parser.add_argument('--input_scale', type=int, default=1, help='Inverse input scale factor')
parser.add_argument('--input_crop', type=float, default=0.64, help='Input crop size')
args = parser.parse_args()
args.logdir = os.path.join(args.logdir, args.id)
class Engine(object):
"""Engine that runs training and inference.
Args
- cur_epoch (int): Current epoch.
- print_every (int): How frequently (# batches) to print loss.
- validate_every (int): How frequently (# epochs) to run validation.
"""
# Data
root_dir = '/is/rg/avg/kchitta/carla9-10_data/2021/apv3'
train_towns = ['Town01', 'Town02', 'Town03', 'Town04', 'Town05', 'Town06', 'Town07', 'Town10']
val_towns = ['Town01_long', 'Town02_long', 'Town03_long', 'Town04_long', 'Town05_long', 'Town06_long']
train_data, val_data = [], []
for town in train_towns:
train_data.append(os.path.join(root_dir, town))
train_data.append(os.path.join(root_dir, town+'_small'))
for town in val_towns:
val_data.append(os.path.join(root_dir, town))
class_converter = sub_classes[args.classes]
print("classes: ", class_converter)
train_set = CARLA_Data(root=train_data,
pred_len=args.pred_len,
class_converter=class_converter,
ignore_sides=args.ignore_sides,
ignore_rear=args.ignore_rear,
seq_len=args.seq_len,
input_scale=args.input_scale,
input_crop=args.input_crop)
val_set = CARLA_Data(root=val_data,
pred_len=args.pred_len,
class_converter=class_converter,
ignore_sides=args.ignore_sides,
ignore_rear=args.ignore_rear,
seq_len=args.seq_len,
input_scale=args.input_scale,
input_crop=args.input_crop)
dataloader_train = DataLoader(train_set, batch_size=args.batch_size, shuffle=True, num_workers=4, pin_memory=True)
dataloader_val = DataLoader(val_set, batch_size=args.batch_size, shuffle=False, num_workers=4, pin_memory=True)
# Model
num_segmentation_classes = len(np.unique(class_converter))
num_cameras = 1
if not args.ignore_sides:
num_cameras += 2
if not args.ignore_rear:
num_cameras += 1
model = MultiTaskImageNetwork('cuda', num_segmentation_classes, args.pred_len, num_cameras)
optimizer = optim.AdamW(model.parameters(), lr=args.lr)
conf_log = {
"id": args.id,
"epochs": args.epochs,
"batch_size": args.batch_size,
"lr": args.lr,
"seq_len": args.seq_len,
"pred_len": args.pred_len,
"classes": class_converter,
"class_name": args.classes,
"num_cameras": num_cameras,
}
trainer = Engine(conf_log)
model_parameters = filter(lambda p: p.requires_grad, model.parameters())
params = sum([np.prod(p.size()) for p in model_parameters])
print ('Total trainable parameters: ', params)
# Create logdir
if not os.path.isdir(args.logdir):
os.makedirs(args.logdir)
print('Created dir:', args.logdir)
elif os.path.isfile(os.path.join(args.logdir, 'recent.log')):
print('Loading checkpoint from ' + args.logdir)
with open(os.path.join(args.logdir, 'recent.log'), 'r') as f:
log_table = json.load(f)
# Load variables
trainer.cur_epoch = log_table['epoch']
if 'iter' in log_table: trainer.cur_iter = log_table['iter']
trainer.bestval = log_table['bestval']
trainer.train_loss = log_table['train_loss']
trainer.val_loss = log_table['val_loss']
# Load checkpoint
model.load_state_dict(torch.load(os.path.join(args.logdir, 'model.pth')))
optimizer.load_state_dict(torch.load(os.path.join(args.logdir, 'recent_optim.pth')))
# Log args
with open(os.path.join(args.logdir, 'args.txt'), 'w') as f:
json.dump(args.__dict__, f, indent=2)
for epoch in range(trainer.cur_epoch, args.epochs):
trainer.train()
if epoch % args.val_every == 0:
trainer.validate()
trainer.save() | 32.977564 | 151 | 0.712411 | import argparse
import json
import os
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import DataLoader
import torch.nn.functional as F
torch.backends.cudnn.benchmark = True
from model import MultiTaskImageNetwork
from data import CARLA_Data
from class_converter import sub_classes
parser = argparse.ArgumentParser()
parser.add_argument('--id', type=str, default='aim_vis_abs', help='Unique experiment identifier.')
parser.add_argument('--device', type=str, default='cuda', help='Device to use')
parser.add_argument('--epochs', type=int, default=100, help='Number of train epochs.')
parser.add_argument('--lr', type=float, default=1e-4, help='Learning rate.')
parser.add_argument('--val_every', type=int, default=5, help='Validation frequency (epochs).')
parser.add_argument('--batch_size', type=int, default=24, help='Batch size')
parser.add_argument('--ignore_sides', action='store_true', help='Ignores side cameras')
parser.add_argument('--ignore_rear', action='store_true', help='Ignores rear camera')
parser.add_argument('--classes', type=str, default='no_stop')
parser.add_argument('--seq_len', type=int, default=1, help='Input sequence length (factor of 10)')
parser.add_argument('--pred_len', type=int, default=4, help='number of timesteps to predict')
parser.add_argument('--logdir', type=str, default='log', help='Directory to log data to.')
parser.add_argument('--input_scale', type=int, default=1, help='Inverse input scale factor')
parser.add_argument('--input_crop', type=float, default=0.64, help='Input crop size')
args = parser.parse_args()
args.logdir = os.path.join(args.logdir, args.id)
class Engine(object):
"""Engine that runs training and inference.
Args
- cur_epoch (int): Current epoch.
- print_every (int): How frequently (# batches) to print loss.
- validate_every (int): How frequently (# epochs) to run validation.
"""
def __init__(self, conf_log, cur_epoch=0, cur_iter=0):
self.cur_epoch = cur_epoch
self.cur_iter = cur_iter
self.bestval_epoch = cur_epoch
self.train_loss = []
self.val_loss = []
self.bestval = -1e5
def train(self):
loss_epoch = 0.
wp_epoch = 0.
num_batches = 0
sep_wp_loss = torch.zeros(args.pred_len).to(args.device, dtype=torch.float32)
model.train()
# Train loop
for data in dataloader_train:
# efficiently zero gradients
for p in model.parameters():
p.grad = None
# create batch and move to GPU
fronts_in = data['fronts']
fronts = []
if not args.ignore_sides:
lefts_in = data['lefts']
rights_in = data['rights']
lefts = []
rights = []
if not args.ignore_rear:
rears_in = data['rears']
rears = []
for i in range(args.seq_len):
fronts.append(fronts_in[i].to(args.device, dtype=torch.float32))
if not args.ignore_sides:
lefts.append(lefts_in[i].to(args.device, dtype=torch.float32))
rights.append(rights_in[i].to(args.device, dtype=torch.float32))
if not args.ignore_rear:
rears.append(rears_in[i].to(args.device, dtype=torch.float32))
# target point
target_point = torch.stack(data['target_point'], dim=1).to(args.device, dtype=torch.float32)
# inference
encoding = [model.image_encoder(fronts)]
if not args.ignore_sides:
encoding.append(model.image_encoder(lefts))
encoding.append(model.image_encoder(rights))
if not args.ignore_rear:
encoding.append(model.image_encoder(rears))
pred_wp = model(encoding, target_point)
gt_waypoints = [torch.stack(data['waypoints'][i], dim=1).to(args.device, dtype=torch.float32) for i in range(args.seq_len, len(data['waypoints']))]
gt_waypoints = torch.stack(gt_waypoints, dim=1).to(args.device, dtype=torch.float32)
loss = F.l1_loss(pred_wp, gt_waypoints, reduction='none')
sep_wp_loss += loss.mean((0,2))
loss.mean().backward()
wp_epoch += loss.mean().item()
num_batches += 1
optimizer.step()
self.cur_iter += 1
loss_epoch = wp_epoch / num_batches
sep_wp_loss = sep_wp_loss / num_batches
self.train_loss.append(loss_epoch)
self.cur_epoch += 1
def validate(self):
model.eval()
with torch.no_grad():
num_batches = 0
wp_epoch = 0.
sep_wp_loss = torch.zeros(args.pred_len).to(args.device, dtype=torch.float32)
wp_loss_list = [[]]
# Validation loop
for data in dataloader_val:
# create batch and move to GPU
fronts_in = data['fronts']
fronts = []
if not args.ignore_sides:
lefts_in = data['lefts']
rights_in = data['rights']
lefts = []
rights = []
if not args.ignore_rear:
rears_in = data['rears']
rears = []
for i in range(args.seq_len):
fronts.append(fronts_in[i].to(args.device, dtype=torch.float32))
if not args.ignore_sides:
lefts.append(lefts_in[i].to(args.device, dtype=torch.float32))
rights.append(rights_in[i].to(args.device, dtype=torch.float32))
if not args.ignore_rear:
rears.append(rears_in[i].to(args.device, dtype=torch.float32))
# target point
target_point = torch.stack(data['target_point'], dim=1).to(args.device, dtype=torch.float32)
# inference
encoding = [model.image_encoder(fronts)]
if not args.ignore_sides:
encoding.append(model.image_encoder(lefts))
encoding.append(model.image_encoder(rights))
if not args.ignore_rear:
encoding.append(model.image_encoder(rears))
pred_wp = model(encoding, target_point)
gt_waypoints = [torch.stack(data['waypoints'][i], dim=1).to(args.device, dtype=torch.float32) for i in range(args.seq_len, len(data['waypoints']))]
gt_waypoints = torch.stack(gt_waypoints, dim=1).to(args.device, dtype=torch.float32)
loss = F.l1_loss(pred_wp, gt_waypoints, reduction='none')
wp_epoch += loss.mean().item()
sep_wp_loss += loss.mean((0,2))
num_batches += 1
sep_item_loss = loss.mean((1,2)).detach().cpu().numpy()
for i, _loss in enumerate(sep_item_loss):
wp_loss_list[0].append(_loss)
wp_loss = wp_epoch / num_batches
sep_wp_loss = sep_wp_loss / num_batches
print(f'Epoch {self.cur_epoch:03d}, ' + f' Wp: {wp_loss:3.3f}')
self.val_loss.append(1.0 - wp_loss)
def save(self):
save_best = False
if self.val_loss[-1] >= self.bestval:
self.bestval = self.val_loss[-1]
self.bestval_epoch = self.cur_epoch
save_best = True
# Create a dictionary of all data to save
log_table = {
'epoch': self.cur_epoch,
'iter': self.cur_iter,
'bestval': self.bestval,
'bestval_epoch': self.bestval_epoch,
'train_loss': self.train_loss,
'val_loss': self.val_loss,
}
torch.save(model.state_dict(), os.path.join(args.logdir, 'model.pth'))
torch.save(optimizer.state_dict(), os.path.join(args.logdir, 'recent_optim.pth'))
# Log other data corresponding to the recent model
with open(os.path.join(args.logdir, 'recent.log'), 'w') as f:
f.write(json.dumps(log_table))
print('====== Saved recent model ======>')
if save_best:
torch.save(model.state_dict(), os.path.join(args.logdir, 'best_model.pth'))
torch.save(optimizer.state_dict(), os.path.join(args.logdir, 'best_optim.pth'))
print('====== Overwrote best model ======>')
# Data
root_dir = '/is/rg/avg/kchitta/carla9-10_data/2021/apv3'
train_towns = ['Town01', 'Town02', 'Town03', 'Town04', 'Town05', 'Town06', 'Town07', 'Town10']
val_towns = ['Town01_long', 'Town02_long', 'Town03_long', 'Town04_long', 'Town05_long', 'Town06_long']
train_data, val_data = [], []
for town in train_towns:
train_data.append(os.path.join(root_dir, town))
train_data.append(os.path.join(root_dir, town+'_small'))
for town in val_towns:
val_data.append(os.path.join(root_dir, town))
class_converter = sub_classes[args.classes]
print("classes: ", class_converter)
train_set = CARLA_Data(root=train_data,
pred_len=args.pred_len,
class_converter=class_converter,
ignore_sides=args.ignore_sides,
ignore_rear=args.ignore_rear,
seq_len=args.seq_len,
input_scale=args.input_scale,
input_crop=args.input_crop)
val_set = CARLA_Data(root=val_data,
pred_len=args.pred_len,
class_converter=class_converter,
ignore_sides=args.ignore_sides,
ignore_rear=args.ignore_rear,
seq_len=args.seq_len,
input_scale=args.input_scale,
input_crop=args.input_crop)
dataloader_train = DataLoader(train_set, batch_size=args.batch_size, shuffle=True, num_workers=4, pin_memory=True)
dataloader_val = DataLoader(val_set, batch_size=args.batch_size, shuffle=False, num_workers=4, pin_memory=True)
# Model
num_segmentation_classes = len(np.unique(class_converter))
num_cameras = 1
if not args.ignore_sides:
num_cameras += 2
if not args.ignore_rear:
num_cameras += 1
model = MultiTaskImageNetwork('cuda', num_segmentation_classes, args.pred_len, num_cameras)
optimizer = optim.AdamW(model.parameters(), lr=args.lr)
conf_log = {
"id": args.id,
"epochs": args.epochs,
"batch_size": args.batch_size,
"lr": args.lr,
"seq_len": args.seq_len,
"pred_len": args.pred_len,
"classes": class_converter,
"class_name": args.classes,
"num_cameras": num_cameras,
}
trainer = Engine(conf_log)
model_parameters = filter(lambda p: p.requires_grad, model.parameters())
params = sum([np.prod(p.size()) for p in model_parameters])
print ('Total trainable parameters: ', params)
# Create logdir
if not os.path.isdir(args.logdir):
os.makedirs(args.logdir)
print('Created dir:', args.logdir)
elif os.path.isfile(os.path.join(args.logdir, 'recent.log')):
print('Loading checkpoint from ' + args.logdir)
with open(os.path.join(args.logdir, 'recent.log'), 'r') as f:
log_table = json.load(f)
# Load variables
trainer.cur_epoch = log_table['epoch']
if 'iter' in log_table: trainer.cur_iter = log_table['iter']
trainer.bestval = log_table['bestval']
trainer.train_loss = log_table['train_loss']
trainer.val_loss = log_table['val_loss']
# Load checkpoint
model.load_state_dict(torch.load(os.path.join(args.logdir, 'model.pth')))
optimizer.load_state_dict(torch.load(os.path.join(args.logdir, 'recent_optim.pth')))
# Log args
with open(os.path.join(args.logdir, 'args.txt'), 'w') as f:
json.dump(args.__dict__, f, indent=2)
for epoch in range(trainer.cur_epoch, args.epochs):
trainer.train()
if epoch % args.val_every == 0:
trainer.validate()
trainer.save() | 5,188 | 0 | 96 |
b4efd4a90ea51089667b34a662653fecb46ca69a | 4,575 | py | Python | demo/2D/air_conditioning.py | Mopolino8/pylbm | b457ccdf1e7a1009807bd1136a276886f81a9e7d | [
"BSD-3-Clause"
] | null | null | null | demo/2D/air_conditioning.py | Mopolino8/pylbm | b457ccdf1e7a1009807bd1136a276886f81a9e7d | [
"BSD-3-Clause"
] | null | null | null | demo/2D/air_conditioning.py | Mopolino8/pylbm | b457ccdf1e7a1009807bd1136a276886f81a9e7d | [
"BSD-3-Clause"
] | 1 | 2019-11-24T17:13:26.000Z | 2019-11-24T17:13:26.000Z | import numpy as np
import sympy as sp
import mpi4py.MPI as mpi
import pylbm
X, Y, LA = sp.symbols('X, Y, LA')
rho, qx, qy, T = sp.symbols('rho, qx, qy, T')
def run(dx, Tf, generator="cython", sorder=None, withPlot=True):
"""
Parameters
----------
dx: double
spatial step
Tf: double
final time
generator: pylbm generator
sorder: list
storage order
withPlot: boolean
if True plot the solution otherwise just compute the solution
"""
# parameters
T0 = .5
Tin = -.5
xmin, xmax, ymin, ymax = 0., 1., 0., 1.
Ra = 2000
Pr = 0.71
Ma = 0.01
alpha = .005
la = 1. # velocity of the scheme
rhoo = 1.
g = 9.81
uo = 0.025
nu = np.sqrt(Pr*alpha*9.81*(T0-Tin)*(ymax-ymin)/Ra)
kappa = nu/Pr
eta = nu
#print nu, kappa
snu = 1./(.5+3*nu)
seta = 1./(.5+3*eta)
sq = 8*(2-snu)/(8-snu)
se = seta
sf = [0., 0., 0., seta, se, sq, sq, snu, snu]
#print sf
a = .5
skappa = 1./(.5+10*kappa/(4+a))
#skappa = 1./(.5+np.sqrt(3)/6)
se = 1./(.5+np.sqrt(3)/3)
snu = se
sT = [0., skappa, skappa, se, snu]
#print sT
dico = {
'box':{'x':[xmin, xmax], 'y':[ymin, ymax], 'label':[1, 2, 0, 0]},
'elements':[
pylbm.Parallelogram([xmin, ymin], [ .1, 0], [0, .8], label=0),
pylbm.Parallelogram([xmax, ymin], [-.1, 0], [0, .8], label=0),
],
'space_step':dx,
'scheme_velocity':la,
'schemes':[
{
'velocities': list(range(9)),
'conserved_moments': [rho, qx, qy],
'polynomials':[
1, X, Y,
3*(X**2+Y**2)-4,
sp.Rational(1, 2)*(9*(X**2+Y**2)**2-21*(X**2+Y**2)+8),
3*X*(X**2+Y**2)-5*X, 3*Y*(X**2+Y**2)-5*Y,
X**2-Y**2, X*Y
],
'relaxation_parameters':sf,
'equilibrium':[
rho, qx, qy,
-2*rho + 3*(qx**2+qy**2),
rho - 3*(qx**2+qy**2),
-qx, -qy,
qx**2 - qy**2, qx*qy
],
'source_terms':{qy: alpha*g*T},
'init':{rho: 1., qx: 0., qy: 0.},
},
{
'velocities': list(range(5)),
'conserved_moments':T,
'polynomials':[1, X, Y, 5*(X**2+Y**2) - 4, (X**2-Y**2)],
'equilibrium':[T, T*qx, T*qy, a*T, 0.],
'relaxation_parameters':sT,
'init':{T:(init_T, (T0,))},
},
],
'boundary_conditions':{
0:{'method':{0: pylbm.bc.BouzidiBounceBack, 1: pylbm.bc.BouzidiAntiBounceBack}, 'value':(bc, (T0,))},
1:{'method':{0: pylbm.bc.BouzidiBounceBack, 1: pylbm.bc.BouzidiAntiBounceBack}, 'value': (bc_in, (T0, Tin, ymax, rhoo, uo))},
2:{'method':{0: pylbm.bc.NeumannX, 1: pylbm.bc.NeumannX},},
},
'generator': generator,
}
sol = pylbm.Simulation(dico)
if withPlot:
# create the viewer to plot the solution
viewer = pylbm.viewer.matplotlib_viewer
fig = viewer.Fig()
ax = fig[0]
im = ax.image(sol.m[T].transpose(), cmap='jet', clim=[Tin, T0])
ax.title = 'solution at t = {0:f}'.format(sol.t)
ax.polygon([[xmin/dx, ymin/dx],[xmin/dx, (ymin+.8)/dx], [(xmin+.1)/dx, (ymin+.8)/dx], [(xmin+.1)/dx, ymin/dx]], 'k')
ax.polygon([[(xmax-.1)/dx, ymin/dx],[(xmax-.1)/dx, (ymin+.8)/dx], [xmax/dx, (ymin+.8)/dx], [xmax/dx, ymin/dx]], 'k')
fig.animate(update, interval=1)
fig.show()
else:
while sol.t < Tf:
sol.one_time_step()
return sol
if __name__ == '__main__':
dx = 1./256
Tf = 10.
run(dx, Tf)
| 28.416149 | 137 | 0.459672 | import numpy as np
import sympy as sp
import mpi4py.MPI as mpi
import pylbm
X, Y, LA = sp.symbols('X, Y, LA')
rho, qx, qy, T = sp.symbols('rho, qx, qy, T')
def init_T(x, y, T0):
return T0
def bc(f, m, x, y, T0):
m[qx] = 0.
m[qy] = 0.
m[T] = T0
def bc_in(f, m, x, y, T0, Tin, ymax, rhoo, uo):
m[qx] = rhoo*uo
m[qy] = 0.
m[T] = T0 + (Tin - T0)*(ymax-y)*(y-.8)*100
def save(mpi_topo, x, y, m, num):
h5 = pylbm.H5File(mpi_topo, filename, path, num)
h5.set_grid(x, y)
h5.add_vector('velocity', [sol.m[qx], sol.m[qy]])
h5.add_scalar('Vx', sol.m[qx])
h5.add_scalar('T', sol.m[T])
h5.save()
def run(dx, Tf, generator="cython", sorder=None, withPlot=True):
"""
Parameters
----------
dx: double
spatial step
Tf: double
final time
generator: pylbm generator
sorder: list
storage order
withPlot: boolean
if True plot the solution otherwise just compute the solution
"""
# parameters
T0 = .5
Tin = -.5
xmin, xmax, ymin, ymax = 0., 1., 0., 1.
Ra = 2000
Pr = 0.71
Ma = 0.01
alpha = .005
la = 1. # velocity of the scheme
rhoo = 1.
g = 9.81
uo = 0.025
nu = np.sqrt(Pr*alpha*9.81*(T0-Tin)*(ymax-ymin)/Ra)
kappa = nu/Pr
eta = nu
#print nu, kappa
snu = 1./(.5+3*nu)
seta = 1./(.5+3*eta)
sq = 8*(2-snu)/(8-snu)
se = seta
sf = [0., 0., 0., seta, se, sq, sq, snu, snu]
#print sf
a = .5
skappa = 1./(.5+10*kappa/(4+a))
#skappa = 1./(.5+np.sqrt(3)/6)
se = 1./(.5+np.sqrt(3)/3)
snu = se
sT = [0., skappa, skappa, se, snu]
#print sT
dico = {
'box':{'x':[xmin, xmax], 'y':[ymin, ymax], 'label':[1, 2, 0, 0]},
'elements':[
pylbm.Parallelogram([xmin, ymin], [ .1, 0], [0, .8], label=0),
pylbm.Parallelogram([xmax, ymin], [-.1, 0], [0, .8], label=0),
],
'space_step':dx,
'scheme_velocity':la,
'schemes':[
{
'velocities': list(range(9)),
'conserved_moments': [rho, qx, qy],
'polynomials':[
1, X, Y,
3*(X**2+Y**2)-4,
sp.Rational(1, 2)*(9*(X**2+Y**2)**2-21*(X**2+Y**2)+8),
3*X*(X**2+Y**2)-5*X, 3*Y*(X**2+Y**2)-5*Y,
X**2-Y**2, X*Y
],
'relaxation_parameters':sf,
'equilibrium':[
rho, qx, qy,
-2*rho + 3*(qx**2+qy**2),
rho - 3*(qx**2+qy**2),
-qx, -qy,
qx**2 - qy**2, qx*qy
],
'source_terms':{qy: alpha*g*T},
'init':{rho: 1., qx: 0., qy: 0.},
},
{
'velocities': list(range(5)),
'conserved_moments':T,
'polynomials':[1, X, Y, 5*(X**2+Y**2) - 4, (X**2-Y**2)],
'equilibrium':[T, T*qx, T*qy, a*T, 0.],
'relaxation_parameters':sT,
'init':{T:(init_T, (T0,))},
},
],
'boundary_conditions':{
0:{'method':{0: pylbm.bc.BouzidiBounceBack, 1: pylbm.bc.BouzidiAntiBounceBack}, 'value':(bc, (T0,))},
1:{'method':{0: pylbm.bc.BouzidiBounceBack, 1: pylbm.bc.BouzidiAntiBounceBack}, 'value': (bc_in, (T0, Tin, ymax, rhoo, uo))},
2:{'method':{0: pylbm.bc.NeumannX, 1: pylbm.bc.NeumannX},},
},
'generator': generator,
}
sol = pylbm.Simulation(dico)
if withPlot:
# create the viewer to plot the solution
viewer = pylbm.viewer.matplotlib_viewer
fig = viewer.Fig()
ax = fig[0]
im = ax.image(sol.m[T].transpose(), cmap='jet', clim=[Tin, T0])
ax.title = 'solution at t = {0:f}'.format(sol.t)
ax.polygon([[xmin/dx, ymin/dx],[xmin/dx, (ymin+.8)/dx], [(xmin+.1)/dx, (ymin+.8)/dx], [(xmin+.1)/dx, ymin/dx]], 'k')
ax.polygon([[(xmax-.1)/dx, ymin/dx],[(xmax-.1)/dx, (ymin+.8)/dx], [xmax/dx, (ymin+.8)/dx], [xmax/dx, ymin/dx]], 'k')
def update(iframe):
nrep = 64
for i in range(nrep):
sol.one_time_step()
im.set_data(sol.m[T].transpose())
ax.title = 'temperature at t = {0:f}'.format(sol.t)
fig.animate(update, interval=1)
fig.show()
else:
while sol.t < Tf:
sol.one_time_step()
return sol
if __name__ == '__main__':
dx = 1./256
Tf = 10.
run(dx, Tf)
| 591 | 0 | 123 |
6bef6489809c1f89d4a55a23a3a461aeb8d2b118 | 5,341 | py | Python | timeeval_gui/pages/gutentag.py | HPI-Information-Systems/TimeEval-GUI | b6c857d8b5ce74660c6be5cdb1dfb9f509e0bd48 | [
"MIT"
] | null | null | null | timeeval_gui/pages/gutentag.py | HPI-Information-Systems/TimeEval-GUI | b6c857d8b5ce74660c6be5cdb1dfb9f509e0bd48 | [
"MIT"
] | null | null | null | timeeval_gui/pages/gutentag.py | HPI-Information-Systems/TimeEval-GUI | b6c857d8b5ce74660c6be5cdb1dfb9f509e0bd48 | [
"MIT"
] | null | null | null | import warnings
from typing import Tuple, Dict
import streamlit as st
from gutenTAG import GutenTAG
from timeeval_gui.timeseries_config import TimeSeriesConfig
from timeeval_gui.utils import get_base_oscillations, get_anomaly_types, get_anomaly_params, \
get_base_oscillation_parameters
from .page import Page
from ..files import Files
| 39.562963 | 117 | 0.649691 | import warnings
from typing import Tuple, Dict
import streamlit as st
from gutenTAG import GutenTAG
from timeeval_gui.timeseries_config import TimeSeriesConfig
from timeeval_gui.utils import get_base_oscillations, get_anomaly_types, get_anomaly_params, \
get_base_oscillation_parameters
from .page import Page
from ..files import Files
def general_area(ts_config: TimeSeriesConfig) -> TimeSeriesConfig:
ts_config.set_name(st.text_input("Name"))
ts_config.set_length(st.number_input("Length", min_value=10, value=1000))
if st.checkbox("Generate training time series for supervised methods"):
ts_config.set_supervised()
if st.checkbox("Generate training time series for semi-supervised methods"):
ts_config.set_semi_supervised()
return ts_config
def select_base_oscillation(key="base-oscillation") -> Tuple[str, str]:
bos = get_base_oscillations()
value = st.selectbox("Base-Oscillation", bos.items(), format_func=lambda x: x[1], key=key)
return value
def select_anomaly_type(key: str, bo_kind: str) -> Tuple[str, str]:
anomaly_types = get_anomaly_types(bo_kind)
return st.selectbox("Anomaly Type", anomaly_types.items(), format_func=lambda x: x[1], key=key)
def channel_area(c, ts_config: TimeSeriesConfig) -> TimeSeriesConfig:
base_oscillation = select_base_oscillation(f"base-oscillation-{c}")
parameters = get_base_oscillation_parameters(base_oscillation[0])
param_config = {}
for p in parameters:
if p.tpe == "number":
param_config[p.key] = st.number_input(p.name, key=f"{p.key}-{c}", help=p.help)
elif p.tpe == "integer":
param_config[p.key] = int(st.number_input(p.name, key=f"{p.key}-{c}", help=p.help))
else:
warn_msg = f"Input type ({p.tpe}) for parameter {p.name} of BO {base_oscillation[1]} not supported yet!"
warnings.warn(warn_msg)
st.warning(warn_msg)
ts_config.add_base_oscillation(base_oscillation[0], **param_config)
return ts_config
def anomaly_area(a, ts_config: TimeSeriesConfig) -> TimeSeriesConfig:
position = st.selectbox("Position", key=f"anomaly-position-{a}", options=["beginning", "middle", "end"], index=1)
length = int(st.number_input("Length", key=f"anomaly-length-{a}", min_value=1))
channel = st.selectbox("Channel", key=f"anomaly-channel-{a}",
options=list(range(len(ts_config.config["base-oscillations"]))))
n_kinds = st.number_input("Number of Anomaly Types", key=f"anomaly-types-{a}", min_value=1)
kinds = []
for t in range(int(n_kinds)):
st.write(f"##### Type {t}")
bo_kind = ts_config.config["base-oscillations"][channel]["kind"]
anomaly_type, _ = select_anomaly_type(f"anomaly-type-{a}-{t}", bo_kind)
parameters = parameter_area(a, t, anomaly_type, bo_kind)
kinds.append({"kind": anomaly_type, "parameters": parameters})
ts_config.add_anomaly(position=position, length=length, channel=channel, kinds=kinds)
return ts_config
def parameter_area(a, t, anomaly_type: str, bo_kind: str) -> Dict:
param_conf = {}
parameters = get_anomaly_params(anomaly_type)
for name, p, desc in parameters:
if name.lower() == "sinusoid_k" and bo_kind != "sine":
continue
if name.lower() == "cbf_pattern_factor" and bo_kind != "cylinder-bell-funnel":
continue
key = f"{a}-{t}-{name}"
if p == str:
param_conf[name] = st.text_input(name.upper(), key=key, help=desc)
elif p == bool:
param_conf[name] = st.checkbox(name.upper(), key=key, help=desc)
elif p == int:
param_conf[name] = st.number_input(name.upper(), key=key, step=1, help=desc)
elif p == float:
param_conf[name] = st.number_input(name.upper(), key=key, help=desc)
return param_conf
class GutenTAGPage(Page):
def _get_name(self) -> str:
return "GutenTAG"
def render(self):
st.image("images/gutentag.png")
timeseries_config = TimeSeriesConfig()
st.write("## General Settings")
timeseries_config = general_area(timeseries_config)
st.write("## Channels")
n_channels = st.number_input("Number of Channels", min_value=1)
for c in range(n_channels):
with st.expander(f"Channel {c}"):
timeseries_config = channel_area(c, timeseries_config)
st.write("## Anomalies")
n_anomalies = st.number_input("Number of Anomalies", min_value=0)
for a in range(n_anomalies):
with st.expander(f"Anomaly {a}"):
timeseries_config = anomaly_area(a, timeseries_config)
st.write("---")
gt = None
if st.button("Build Timeseries"):
if gt is None:
gt = GutenTAG.from_dict({"timeseries": [timeseries_config.config]}, plot=False)
gt.generate()
gt.timeseries[0].plot()
st.pyplot()
if st.button("Save"):
if gt is None:
gt = GutenTAG.from_dict({"timeseries": [timeseries_config.config]}, plot=False)
gt.generate()
Files().store_ts(gt)
st.success(f"> Successfully saved new time series dataset '{timeseries_config.config['name']}' to disk.")
| 4,774 | 4 | 214 |
c7c34fe453581fe316f348a07f2451f2d0cde38b | 338 | py | Python | bsff/actors/migrations/0002_auto_20210517_0212.py | ErikSeguinte/BSFF_Django | a4dde02b2af233bbcf0c625c20a5f2814a8ca214 | [
"MIT"
] | null | null | null | bsff/actors/migrations/0002_auto_20210517_0212.py | ErikSeguinte/BSFF_Django | a4dde02b2af233bbcf0c625c20a5f2814a8ca214 | [
"MIT"
] | null | null | null | bsff/actors/migrations/0002_auto_20210517_0212.py | ErikSeguinte/BSFF_Django | a4dde02b2af233bbcf0c625c20a5f2814a8ca214 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.11 on 2021-05-17 02:12
from django.db import migrations
| 18.777778 | 48 | 0.579882 | # Generated by Django 3.1.11 on 2021-05-17 02:12
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('actors', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='actor',
options={'ordering': ['full_name']},
),
]
| 0 | 231 | 23 |
005e22a20ac5fadd1b8f8ad254b24fcf4dcaa32e | 2,301 | py | Python | networktoolkit/arpspoof.py | mokytis/networktoolkit | 22f2facbb948a30eb63c8cdb2797a98ad771cfde | [
"MIT"
] | 1 | 2021-12-15T22:55:16.000Z | 2021-12-15T22:55:16.000Z | networktoolkit/arpspoof.py | mokytis/networktoolkit | 22f2facbb948a30eb63c8cdb2797a98ad771cfde | [
"MIT"
] | null | null | null | networktoolkit/arpspoof.py | mokytis/networktoolkit | 22f2facbb948a30eb63c8cdb2797a98ad771cfde | [
"MIT"
] | 1 | 2022-02-25T07:04:29.000Z | 2022-02-25T07:04:29.000Z | import time
import click
import scapy.all as scapy
from networktoolkit import networkscan
def gen_arp_response(target_ip, spoof_ip):
"""Generates an ARP Response packet
:param target_ip: ip address to send packet to
:param spoof_ip: ip address to spoof
:return: A scapy packet
"""
if scan_results := networkscan.get_clients(
target_ip, 10
): # checks to see if the target is reachable on the network
target = scan_results[0]
packet = scapy.ARP(
op=2, # ARP response (op=1 would be ARP request). We are spoofing a request packet
pdst=target_ip,
hwdst=target.mac_addr,
psrc=spoof_ip, # ip adddress we are spoofing (pretending to be)
)
return packet
def arpspoof(target_ip, spoof_ip, bi_directional=False, delay=1):
"""Spoof a given ip address by sending ARP Response packets
:param target_ip: ip address of target
:param spoof_ip: ip address to spoof
:param bi_directional: if True, also send ARP Responses to spoof_ip spoofing target_ip
:type bi_directional: bool
"""
packets = []
click.echo(f"[+] Generating ARP Response (dest={target_ip} spoofing={spoof_ip}")
packets.append(gen_arp_response(target_ip, spoof_ip))
if bi_directional:
click.echo(f"[+] Generating ARP Response (dest={spoof_ip} spoofing={target_ip}")
packets.append(gen_arp_response(spoof_ip, target_ip))
counter = 0
try:
while True:
counter += 1
for packet in packets:
scapy.send(packet, verbose=False)
click.echo(
f"Sent ARP Response to {packet.pdst} spoofing {packet.psrc} {counter} time{'s' if counter != 1 else ''}"
)
time.sleep(delay)
except KeyboardInterrupt:
click.echo(f"Detected keyboard interrupt. Exiting...")
@click.command()
@click.argument("target_ip")
@click.argument("spoof_ip")
@click.option("-b", "--bi_directional", is_flag=True, help="Spoof in both directions")
@click.option(
"-d",
"--delay",
default=1,
help="Delay between sending each set of packets (seconds)",
)
| 30.68 | 124 | 0.65189 | import time
import click
import scapy.all as scapy
from networktoolkit import networkscan
def gen_arp_response(target_ip, spoof_ip):
"""Generates an ARP Response packet
:param target_ip: ip address to send packet to
:param spoof_ip: ip address to spoof
:return: A scapy packet
"""
if scan_results := networkscan.get_clients(
target_ip, 10
): # checks to see if the target is reachable on the network
target = scan_results[0]
packet = scapy.ARP(
op=2, # ARP response (op=1 would be ARP request). We are spoofing a request packet
pdst=target_ip,
hwdst=target.mac_addr,
psrc=spoof_ip, # ip adddress we are spoofing (pretending to be)
)
return packet
def arpspoof(target_ip, spoof_ip, bi_directional=False, delay=1):
"""Spoof a given ip address by sending ARP Response packets
:param target_ip: ip address of target
:param spoof_ip: ip address to spoof
:param bi_directional: if True, also send ARP Responses to spoof_ip spoofing target_ip
:type bi_directional: bool
"""
packets = []
click.echo(f"[+] Generating ARP Response (dest={target_ip} spoofing={spoof_ip}")
packets.append(gen_arp_response(target_ip, spoof_ip))
if bi_directional:
click.echo(f"[+] Generating ARP Response (dest={spoof_ip} spoofing={target_ip}")
packets.append(gen_arp_response(spoof_ip, target_ip))
counter = 0
try:
while True:
counter += 1
for packet in packets:
scapy.send(packet, verbose=False)
click.echo(
f"Sent ARP Response to {packet.pdst} spoofing {packet.psrc} {counter} time{'s' if counter != 1 else ''}"
)
time.sleep(delay)
except KeyboardInterrupt:
click.echo(f"Detected keyboard interrupt. Exiting...")
@click.command()
@click.argument("target_ip")
@click.argument("spoof_ip")
@click.option("-b", "--bi_directional", is_flag=True, help="Spoof in both directions")
@click.option(
"-d",
"--delay",
default=1,
help="Delay between sending each set of packets (seconds)",
)
def cli(target_ip, spoof_ip, bi_directional, delay):
arpspoof(target_ip, spoof_ip, bi_directional, delay)
| 88 | 0 | 22 |
817067682d486b5c02eeb1fa01201094f32b22f1 | 1,832 | py | Python | simpleTicket/siteEngine/entities_utils.py | abogeorge/simpleTicket | ca550f4e9817e13e5723ad2483baddc036e435f5 | [
"MIT"
] | null | null | null | simpleTicket/siteEngine/entities_utils.py | abogeorge/simpleTicket | ca550f4e9817e13e5723ad2483baddc036e435f5 | [
"MIT"
] | null | null | null | simpleTicket/siteEngine/entities_utils.py | abogeorge/simpleTicket | ca550f4e9817e13e5723ad2483baddc036e435f5 | [
"MIT"
] | null | null | null | from .models import Ticket, UserProfile, Order, TicketType, OrderType
# Retrieving user profile for the specified user
# Retrieving tickets for the specificied user profile
# Retrieving orders for the specified user profile
# Retrieving the total number of subalterns for a specific user profile
# Retrieving a list of all subalterns for a specific user profile
# Returns the type of the user:
# 0 - admin; 1 - user; 2 - helpdesk;
# Returns all exmployees from the database
# Returns the id of the ticket with the specified title | 32.714286 | 75 | 0.720524 | from .models import Ticket, UserProfile, Order, TicketType, OrderType
# Retrieving user profile for the specified user
def get_profile_for_user(user):
try:
user_profile = user.userprofile
return user_profile
except UserProfile.DoesNotExist:
return None
# Retrieving tickets for the specificied user profile
def get_tickets_for_user_profile(user_profile):
tickets = Ticket.objects.filter(user_type = user_profile)
if len(tickets) == 0:
tickets = False
return tickets
# Retrieving orders for the specified user profile
def get_orders_for_user_profile(user_profile):
orders = Order.objects.filter(user_type = user_profile)
if len(orders) == 0:
orders = False;
return orders
# Retrieving the total number of subalterns for a specific user profile
def get_subalterns_number(user_profile):
subalterns = UserProfile.objects.filter(supervisor_user = user_profile)
return len(subalterns)
# Retrieving a list of all subalterns for a specific user profile
def get_subalterns(user_profile):
subalterns = UserProfile.objects.filter(supervisor_user = user_profile)
return subalterns
# Returns the type of the user:
# 0 - admin; 1 - user; 2 - helpdesk;
def get_user_type(user_profile):
user_role = user_profile.role
return user_role.role
# Returns all exmployees from the database
def get_employees():
users = UserProfile.objects.all()
return users
# Returns the id of the ticket with the specified title
def get_ticket_id_from_title(name):
tickets = Ticket.objects.all()
print ("\n\n\n")
print ("Tickets: " + str(tickets))
print ("Title: " + name)
for ticket in tickets:
print ("Ticket " + str(ticket.id) + ": " +ticket.title)
if ticket.title == name:
return ticket.id
return False | 1,119 | 0 | 176 |
da7ed9ea4c2133be2c1a9db446d0ee720683a339 | 961 | py | Python | scheduler/migrations/0004_auto_20181108_1604.py | gijzelaerr/buis | 9878919fe58f600634306da78ce3c5ddba8388c1 | [
"Apache-2.0"
] | null | null | null | scheduler/migrations/0004_auto_20181108_1604.py | gijzelaerr/buis | 9878919fe58f600634306da78ce3c5ddba8388c1 | [
"Apache-2.0"
] | 18 | 2018-09-28T15:42:15.000Z | 2019-12-17T08:56:28.000Z | scheduler/migrations/0004_auto_20181108_1604.py | gijzelaerr/buis | 9878919fe58f600634306da78ce3c5ddba8388c1 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 2.1.1 on 2018-11-08 16:04
from django.db import migrations, models
import django.db.models.deletion
| 34.321429 | 148 | 0.631634 | # Generated by Django 2.1.1 on 2018-11-08 16:04
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('scheduler', '0003_auto_20181108_1124'),
]
operations = [
migrations.CreateModel(
name='Workflow',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('run_id', models.CharField(max_length=32)),
('repository', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workflows', to='scheduler.Repository')),
],
),
migrations.AlterField(
model_name='repositorystatechange',
name='repository',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='state_changes', to='scheduler.Repository'),
),
]
| 0 | 814 | 23 |
35bdd77688b7d457b6ece73ef67063f6a8a6f99b | 5,467 | py | Python | tests/transaction_test.py | rbbonfim/pagarme-python | a7079fbdd01bd855ea09f89847d6374b499be76b | [
"MIT"
] | 79 | 2015-08-25T14:43:34.000Z | 2021-12-01T18:16:54.000Z | tests/transaction_test.py | rbbonfim/pagarme-python | a7079fbdd01bd855ea09f89847d6374b499be76b | [
"MIT"
] | 103 | 2015-09-15T15:24:14.000Z | 2021-12-20T22:52:02.000Z | tests/transaction_test.py | rbbonfim/pagarme-python | a7079fbdd01bd855ea09f89847d6374b499be76b | [
"MIT"
] | 53 | 2015-09-11T12:06:26.000Z | 2022-02-23T02:59:57.000Z | from pagarme import transaction
from tests.resources.dictionaries import transaction_dictionary
import pytest
import time
| 42.053846 | 122 | 0.788001 | from pagarme import transaction
from tests.resources.dictionaries import transaction_dictionary
import pytest
import time
def test_calculate_installments_amount():
array_installments = transaction.calculate_installments_amount(transaction_dictionary.CALCULATE_INTALLMENTS_AMOUNT)
assert array_installments['installments'] is not None
def test_capture_transaction():
trx = transaction.create(transaction_dictionary.VALID_CREDIT_CARD_TRANSACTION_CAPTURE_FALSE)
capture_transaction = transaction.capture(trx['id'], transaction_dictionary.REFUNDED_OR_CAPTURE_TRANSACTION)
assert 'paid' == capture_transaction['status']
def test_create_transaction():
trx = transaction.create(transaction_dictionary.VALID_CREDIT_CARD_TRANSACTION)
assert trx['id'] is not None
def test_create_transaction_with_split_rule_amount():
trx = transaction.create(transaction_dictionary.VALID_CREDIT_CARD_TRANSACTION_WITH_SPLIT_RULE_AMOUNT)
assert trx['split_rules'] is not None
def test_create_transaction_with_split_rule_percentage():
trx = transaction.create(transaction_dictionary.VALID_CREDIT_CARD_TRANSACTION_WITH_SPLIT_RULE_PERCENTAGE)
assert trx['split_rules'] is not None
def test_error_request():
with pytest.raises(Exception) as PagarMeException:
transaction.create(transaction_dictionary.INVALID_REQUEST)
assert 'valor' in str(PagarMeException.value)
def test_find_all_postbacks(retry):
_transaction = transaction.create(transaction_dictionary.BOLETO_TRANSACTION)
transaction.pay_boleto(_transaction['id'], transaction_dictionary.PAY_BOLETO)
search_params = {'id': _transaction['id']}
_transaction_paid = retry(lambda: transaction.find_by(search_params))
_postbacks = transaction.postbacks(_transaction_paid[0]['id'])
assert _postbacks[0]['model_id'] == str(_transaction_paid[0]['id'])
def test_find_all_transaction_events():
trx = transaction.create(transaction_dictionary.VALID_CREDIT_CARD_TRANSACTION)
all_events = transaction.events(trx['id'])
assert all_events is not None
def test_find_all_transaction_operations():
trx = transaction.create(transaction_dictionary.VALID_CREDIT_CARD_TRANSACTION)
all_operations = transaction.operations(trx['id'])
assert all_operations[0]['id'] is not None
def test_find_all_transaction_payables():
trx = transaction.create(transaction_dictionary.VALID_CREDIT_CARD_TRANSACTION)
all_payables = transaction.payables(trx['id'])
assert all_payables is not None
def test_find_all_transactions():
all_transactions = transaction.find_all()
assert all_transactions is not None
def test_find_by(retry):
trx = transaction.create(transaction_dictionary.VALID_CREDIT_CARD_TRANSACTION)
search_params = {'id': trx['id']}
find_trx = retry(lambda: transaction.find_by(search_params))
assert trx['id'] == find_trx[0]['id']
def test_find_by_id(retry):
trx = transaction.create(transaction_dictionary.VALID_CREDIT_CARD_TRANSACTION)
found_trx = retry(lambda: transaction.find_by_id(trx['id']))
assert trx['id'] == found_trx['id']
def test_find_specific_payable():
trx = transaction.create(transaction_dictionary.VALID_CREDIT_CARD_TRANSACTION)
all_payables = transaction.payables(trx['id'])
specific_payable = transaction.specific_payable(trx['id'], all_payables[0]['id'])
assert specific_payable['id'] is not None
def test_generate_card_hash_key():
card_hash_key = transaction.generate_card_hash_key()
assert card_hash_key is not None
def test_pay_boleto():
trx = transaction.create(transaction_dictionary.BOLETO_TRANSACTION)
pay_transaction = transaction.pay_boleto(trx['id'], transaction_dictionary.PAY_BOLETO)
assert 'paid' == pay_transaction['status']
def test_postbacks_redeliver(retry):
_transaction = transaction.create(transaction_dictionary.BOLETO_TRANSACTION)
transaction.pay_boleto(_transaction['id'], transaction_dictionary.PAY_BOLETO)
search_params = {'id': _transaction['id']}
_transaction_paid = retry(lambda: transaction.find_by(search_params))
_postbacks = transaction.postbacks(_transaction_paid[0]['id'])
redeliver = transaction.postback_redeliver(_transaction_paid[0]['id'], _postbacks[0]['id'])
assert redeliver['status'] == 'pending_retry'
def test_refund_transaction(retry):
trx_boleto = transaction.create(transaction_dictionary.BOLETO_TRANSACTION)
transaction.pay_boleto(trx_boleto['id'], transaction_dictionary.PAY_BOLETO)
trx_credit_card = transaction.create(transaction_dictionary.VALID_CREDIT_CARD_TRANSACTION)
refund_transaction = transaction.refund(trx_credit_card['id'], transaction_dictionary.REFUNDED_OR_CAPTURE_TRANSACTION)
search_params = {'id': str(refund_transaction['id'])}
refunded_transaction = retry(lambda: transaction.find_by(search_params))
assert 'refunded' == refunded_transaction[0]['status']
def test_specific_postback(retry):
_transaction = transaction.create(transaction_dictionary.BOLETO_TRANSACTION)
transaction.pay_boleto(_transaction['id'], transaction_dictionary.PAY_BOLETO)
search_params = {'id': _transaction['id']}
transaction_paid = retry(lambda: transaction.find_by(search_params))
postbacks = transaction.postbacks(transaction_paid[0]['id'])
specific_postback = transaction.specific_postback(transaction_paid[0]['id'], postbacks[0]['id'])
assert specific_postback['id'] == postbacks[0]['id']
| 4,889 | 0 | 437 |
b7c4c865ea08866f15cac2e1cf22972cf9599e7a | 794 | py | Python | database.py | ngmhprogramming/Contact-Book | 664a76087e8b35e1dfbb1f8fd33b1866c81849fb | [
"MIT"
] | 4 | 2018-09-24T18:29:07.000Z | 2019-03-04T13:58:19.000Z | database.py | ngmhprogramming/Contact-Book | 664a76087e8b35e1dfbb1f8fd33b1866c81849fb | [
"MIT"
] | null | null | null | database.py | ngmhprogramming/Contact-Book | 664a76087e8b35e1dfbb1f8fd33b1866c81849fb | [
"MIT"
] | null | null | null | #Database setup file
#Useful when setting up for first time
import sqlite3
conn = sqlite3.connect("/home/contactbook/website/storage.db")
c = conn.cursor()
#Create tables for users and books
c.execute("DROP TABLE IF EXISTS users;")
c.execute("""
CREATE TABLE IF NOT EXISTS users(
id integer PRIMARY KEY,
username text NOT NULL,
password text NOT NULL,
pnumber text NOT NULL
);
""")
c.execute("DROP TABLE IF EXISTS books;")
c.execute("""
CREATE TABLE IF NOT EXISTS books(
id integer PRIMARY KEY,
time integer NOT NULL,
bookname text NOT NULL,
username text NOT NULL,
public text NOT NULL
);
""")
#Ensure that the tables are created
c.execute("SELECT name FROM sqlite_master WHERE type='table';")
r = c.fetchall()
print(r)
| 23.352941 | 64 | 0.680101 | #Database setup file
#Useful when setting up for first time
import sqlite3
conn = sqlite3.connect("/home/contactbook/website/storage.db")
c = conn.cursor()
#Create tables for users and books
c.execute("DROP TABLE IF EXISTS users;")
c.execute("""
CREATE TABLE IF NOT EXISTS users(
id integer PRIMARY KEY,
username text NOT NULL,
password text NOT NULL,
pnumber text NOT NULL
);
""")
c.execute("DROP TABLE IF EXISTS books;")
c.execute("""
CREATE TABLE IF NOT EXISTS books(
id integer PRIMARY KEY,
time integer NOT NULL,
bookname text NOT NULL,
username text NOT NULL,
public text NOT NULL
);
""")
#Ensure that the tables are created
c.execute("SELECT name FROM sqlite_master WHERE type='table';")
r = c.fetchall()
print(r)
| 0 | 0 | 0 |
140a2bed33d4c40bd9b60cc12d01904376b73824 | 1,665 | py | Python | powergate/deals-POW-to-db.py | deplatformr/open-images | 3726c9802bda1d7ecbbbd9920d5566daaecc9faa | [
"MIT"
] | 2 | 2020-10-12T02:37:54.000Z | 2020-10-14T15:16:49.000Z | powergate/deals-POW-to-db.py | deplatformr/open-images | 3726c9802bda1d7ecbbbd9920d5566daaecc9faa | [
"MIT"
] | null | null | null | powergate/deals-POW-to-db.py | deplatformr/open-images | 3726c9802bda1d7ecbbbd9920d5566daaecc9faa | [
"MIT"
] | null | null | null | import os
import sqlite3
from datetime import datetime
from pygate_grpc.client import PowerGateClient
api = os.getenv('POWERGATE_API')
token = os.getenv('POWERGATE_TOKEN')
powergate = PowerGateClient(api, False)
# get final storage deals info
storage_deals = powergate.deals.storage_deal_records(
include_pending=False, include_final=True, token=token
)
total_deals = len(storage_deals)
print(str(total_deals) + " finalized storage deals found.")
if total_deals > 0:
abs_path = os.getcwd()
split = os.path.split(abs_path)
db_path = os.path.join(
split[0], "pipeline/deplatformr_open_images_workflow.sqlite")
workflow_db = sqlite3.connect(db_path)
cursor = workflow_db.cursor()
for deal in storage_deals:
try:
price = deal["dealInfo"]["pricePerEpoch"]
except:
price = 0
utc_date = datetime.utcfromtimestamp(int(deal["time"]))
cid = deal["rootCid"]
cursor.execute("SELECT name from packages where cid = ?", (cid,),)
filename = cursor.fetchone()
cursor.execute("INSERT OR IGNORE INTO deals (deal_id, payload_cid, piece_cid, timestamp, piece_size, miner_id, start_epoch, activation_epoch, duration, price, wallet, state) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)", (
deal["dealInfo"]["dealId"], deal["rootCid"], deal["dealInfo"]["pieceCid"], utc_date, deal["dealInfo"]["size"], deal["dealInfo"]["miner"], deal["dealInfo"]["startEpoch"], deal["dealInfo"]["activationEpoch"], deal["dealInfo"]["duration"], price, deal["address"], deal["dealInfo"]["stateName"]),)
workflow_db.commit()
workflow_db.close()
print("Database updated.") | 37.840909 | 305 | 0.678078 | import os
import sqlite3
from datetime import datetime
from pygate_grpc.client import PowerGateClient
api = os.getenv('POWERGATE_API')
token = os.getenv('POWERGATE_TOKEN')
powergate = PowerGateClient(api, False)
# get final storage deals info
storage_deals = powergate.deals.storage_deal_records(
include_pending=False, include_final=True, token=token
)
total_deals = len(storage_deals)
print(str(total_deals) + " finalized storage deals found.")
if total_deals > 0:
abs_path = os.getcwd()
split = os.path.split(abs_path)
db_path = os.path.join(
split[0], "pipeline/deplatformr_open_images_workflow.sqlite")
workflow_db = sqlite3.connect(db_path)
cursor = workflow_db.cursor()
for deal in storage_deals:
try:
price = deal["dealInfo"]["pricePerEpoch"]
except:
price = 0
utc_date = datetime.utcfromtimestamp(int(deal["time"]))
cid = deal["rootCid"]
cursor.execute("SELECT name from packages where cid = ?", (cid,),)
filename = cursor.fetchone()
cursor.execute("INSERT OR IGNORE INTO deals (deal_id, payload_cid, piece_cid, timestamp, piece_size, miner_id, start_epoch, activation_epoch, duration, price, wallet, state) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)", (
deal["dealInfo"]["dealId"], deal["rootCid"], deal["dealInfo"]["pieceCid"], utc_date, deal["dealInfo"]["size"], deal["dealInfo"]["miner"], deal["dealInfo"]["startEpoch"], deal["dealInfo"]["activationEpoch"], deal["dealInfo"]["duration"], price, deal["address"], deal["dealInfo"]["stateName"]),)
workflow_db.commit()
workflow_db.close()
print("Database updated.") | 0 | 0 | 0 |
8fd90e04dbe2c00d88ef581ceaa121652977d3e3 | 4,981 | py | Python | sqlany_django/introspection.py | TheProjecter/sqlany-django | 363be27eab819d879135422a78623b6ed487799a | [
"BSD-3-Clause"
] | null | null | null | sqlany_django/introspection.py | TheProjecter/sqlany-django | 363be27eab819d879135422a78623b6ed487799a | [
"BSD-3-Clause"
] | null | null | null | sqlany_django/introspection.py | TheProjecter/sqlany-django | 363be27eab819d879135422a78623b6ed487799a | [
"BSD-3-Clause"
] | null | null | null | from django.db.backends import BaseDatabaseIntrospection
from sqlanydb import ProgrammingError, OperationalError
import re
import sqlanydb
foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)")
| 47.894231 | 114 | 0.566754 | from django.db.backends import BaseDatabaseIntrospection
from sqlanydb import ProgrammingError, OperationalError
import re
import sqlanydb
foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)")
class DatabaseIntrospection(BaseDatabaseIntrospection):
data_types_reverse = { sqlanydb.DT_DATE : 'DateField',
sqlanydb.DT_TIME : 'DateTimeField',
sqlanydb.DT_TIMESTAMP : 'DateTimeField',
sqlanydb.DT_VARCHAR : 'CharField',
sqlanydb.DT_FIXCHAR : 'CharField',
sqlanydb.DT_LONGVARCHAR : 'CharField',
sqlanydb.DT_STRING : 'CharField',
sqlanydb.DT_DOUBLE : 'FloatField',
sqlanydb.DT_FLOAT : 'FloatField',
sqlanydb.DT_DECIMAL : 'IntegerField',
sqlanydb.DT_INT : 'IntegerField',
sqlanydb.DT_SMALLINT : 'IntegerField',
sqlanydb.DT_BINARY : 'BlobField',
sqlanydb.DT_LONGBINARY : 'BlobField',
sqlanydb.DT_TINYINT : 'IntegerField',
sqlanydb.DT_BIGINT : 'BigIntegerField',
sqlanydb.DT_UNSINT : 'IntegerField',
sqlanydb.DT_UNSSMALLINT : 'IntegerField',
sqlanydb.DT_UNSBIGINT : 'BigIntegerField',
sqlanydb.DT_BIT : 'IntegerField',
sqlanydb.DT_LONGNVARCHAR : 'CharField'
}
def get_table_list(self, cursor):
"Returns a list of table names in the current database."
cursor.execute("SELECT table_name FROM sys.SYSTAB WHERE creator = USER_ID()")
return [row[0] for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"Returns a description of the table, with the DB-API cursor.description interface."
cursor.execute("SELECT FIRST * FROM %s" %
self.connection.ops.quote_name(table_name))
return tuple((c[0], t, None, c[3], c[4], c[5], int(c[6]) == 1) for c, t in cursor.columns())
def _name_to_index(self, cursor, table_name):
"""
Returns a dictionary of {field_name: field_index} for the given table.
Indexes are 0-based.
"""
return dict([(d[0], i) for i, d in enumerate(self.get_table_description(cursor, table_name))])
def get_relations(self, cursor, table_name):
"""
Returns a dictionary of {field_index: (field_index_other_table, other_table)}
representing all relationships to the given table. Indexes are 0-based.
"""
my_field_dict = self._name_to_index(cursor, table_name)
constraints = []
relations = {}
cursor.execute("""
SELECT (fidx.column_id - 1), t2.table_name, (pidx.column_id - 1) FROM SYSTAB t1
INNER JOIN SYSFKEY f ON f.foreign_table_id = t1.table_id
INNER JOIN SYSTAB t2 ON t2.table_id = f.primary_table_id
INNER JOIN SYSIDXCOL fidx ON fidx.table_id = f.foreign_table_id AND fidx.index_id = f.foreign_index_id
INNER JOIN SYSIDXCOL pidx ON pidx.table_id = f.primary_table_id AND pidx.index_id = f.primary_index_id
WHERE t1.table_name = %s""", [table_name])
constraints.extend(cursor.fetchall())
for my_field_index, other_table, other_field_index in constraints:
relations[my_field_index] = (other_field_index, other_table)
return relations
def get_indexes(self, cursor, table_name):
"""
Returns a dictionary of fieldname -> infodict for the given table,
where each infodict is in the format:
{'primary_key': boolean representing whether it's the primary key,
'unique': boolean representing whether it's a unique index}
"""
# We need to skip multi-column indexes.
cursor.execute("""
select max(c.column_name),
max(ix.index_category),
max(ix."unique")
from SYSIDX ix, SYSTABLE t, SYSIDXCOL ixc, SYSCOLUMN c
where ix.table_id = t.table_id
and ixc.table_id = t.table_id
and ixc.index_id = ix.index_id
and ixc.table_id = c.table_id
and ixc.column_id = c.column_id
and t.table_name = %s
group by ix.index_id
having count(*) = 1
order by ix.index_id
""", [table_name])
indexes = {}
for col_name, cat, unique in cursor.fetchall():
indexes[col_name] = {
'primary_key': (cat == 1),
'unique': (unique == 1 or unique == 2) }
return indexes
| 0 | 4,703 | 23 |
6124b4133af3c7aa4d43412211a0b2674e595a90 | 635 | py | Python | test/cross_check_sklearn.py | Anselmoo/python-neuralnetwork | 0c2d1e457f065b7f1a505f8a8182b134d6a8d3a0 | [
"MIT"
] | null | null | null | test/cross_check_sklearn.py | Anselmoo/python-neuralnetwork | 0c2d1e457f065b7f1a505f8a8182b134d6a8d3a0 | [
"MIT"
] | null | null | null | test/cross_check_sklearn.py | Anselmoo/python-neuralnetwork | 0c2d1e457f065b7f1a505f8a8182b134d6a8d3a0 | [
"MIT"
] | null | null | null | from sklearn.neural_network import MLPClassifier
from sklearn.model_selection import cross_validate
if __name__ == '__main__':
sklearn_reference_XOR() | 35.277778 | 82 | 0.63622 | from sklearn.neural_network import MLPClassifier
from sklearn.model_selection import cross_validate
def sklearn_reference_XOR():
X = [[0., 0.], [0., 1.], [1., 0.], [1., 1.]]
y = [0., 1., 1., 0. ]
clf = MLPClassifier(solver='lbfgs', alpha=1e-5,
hidden_layer_sizes=(2, ), random_state=42,max_iter=200000)
clf.fit(X, y)
print("Training set score: %f" % clf.score(X, y))
#print("Test set score: %f" % mlp.score(learnset, learnlabels))
scores = cross_validate(clf, X=X, y=y, cv=2, scoring='neg_mean_squared_error')
print(scores)
if __name__ == '__main__':
sklearn_reference_XOR() | 457 | 0 | 23 |
2d7d85f85898bc2dd76286805e3dc6b7cdaca0f7 | 632 | py | Python | src/py/bundle.py | iTNTPiston/celer-compiler | af9c1175eab3ee44eecd78f54f4e38e6d6475045 | [
"MIT"
] | null | null | null | src/py/bundle.py | iTNTPiston/celer-compiler | af9c1175eab3ee44eecd78f54f4e38e6d6475045 | [
"MIT"
] | 1 | 2022-03-07T20:17:50.000Z | 2022-03-07T20:17:50.000Z | src/py/bundle.py | iTNTPiston/celer-compiler | af9c1175eab3ee44eecd78f54f4e38e6d6475045 | [
"MIT"
] | null | null | null | # This is a standalone bundler. You can use this script by itself
# The output is a minimized JSON, which can be distributed
# Usage: py gbundle.py <inputPath>
# Output: bundle.json
# PY_INJECT
__main__()
| 23.407407 | 66 | 0.65981 | # This is a standalone bundler. You can use this script by itself
# The output is a minimized JSON, which can be distributed
# Usage: py gbundle.py <inputPath>
# Output: bundle.json
# PY_INJECT
def __main__():
if len(sys.argv) < 2:
print(f"Usage: {sys.argv[0]} <input> ")
sys.exit(1)
inputFile = sys.argv[1]
print(f"Bundling... {inputFile}")
rebuildBundle(inputFile)
def rebuildBundle(inputFile):
rebundleHelper(inputFile, False, True, invokeJsBundle)
def invokeJsBundle(obj):
# JS_INJECT_NEXT_LINE
return dukpy.evaljs("JS_INJECT", input=obj)
__main__()
| 334 | 0 | 75 |
33bbccef791bf6998e92aa9345e31ada5e955e49 | 397 | py | Python | python_on_whales/components/volume/models.py | haruishi43/python-on-whales | e26c3e4367428588bad0b028c7258124a111c0c6 | [
"MIT"
] | null | null | null | python_on_whales/components/volume/models.py | haruishi43/python-on-whales | e26c3e4367428588bad0b028c7258124a111c0c6 | [
"MIT"
] | null | null | null | python_on_whales/components/volume/models.py | haruishi43/python-on-whales | e26c3e4367428588bad0b028c7258124a111c0c6 | [
"MIT"
] | null | null | null | from datetime import datetime
from pathlib import Path
from typing import Any, Dict, Optional
from python_on_whales.utils import DockerCamelModel
| 23.352941 | 51 | 0.740554 | from datetime import datetime
from pathlib import Path
from typing import Any, Dict, Optional
from python_on_whales.utils import DockerCamelModel
class VolumeInspectResult(DockerCamelModel):
name: str
driver: str
mountpoint: Path
created_at: datetime
status: Optional[Dict[str, Any]]
labels: Optional[Dict[str, str]]
scope: str
options: Optional[Dict[str, str]]
| 0 | 226 | 23 |
dfba76ff816591f4dcdb97195c8669ae29966c72 | 569 | py | Python | 04_TreesAndGraphs/Graph/class_Node.py | Chipdelmal/DataStructures | 426e30b91e8f27843be1d9385dc800e5d0dc8d3f | [
"MIT"
] | 1 | 2019-07-11T22:31:15.000Z | 2019-07-11T22:31:15.000Z | 04_TreesAndGraphs/Graph/class_Node.py | Chipdelmal/DataStructures | 426e30b91e8f27843be1d9385dc800e5d0dc8d3f | [
"MIT"
] | null | null | null | 04_TreesAndGraphs/Graph/class_Node.py | Chipdelmal/DataStructures | 426e30b91e8f27843be1d9385dc800e5d0dc8d3f | [
"MIT"
] | null | null | null | ##############################################################################
# Node Class
##############################################################################
| 21.884615 | 78 | 0.42355 | ##############################################################################
# Node Class
##############################################################################
class Node(object):
def __init__(self, data, links=None):
self.__data = data
self.__links = links
@property
def data(self):
return self.__data
@property
def links(self):
return self.__links
@data.setter
def data(self, data):
self.__data = data
@links.setter
def links(self, links=None):
self.__links = links
| 179 | 195 | 23 |
450f23a1a2007d7e44f1b7e317ae1cae7a8ed2e1 | 5,133 | py | Python | gimmemotifs/config.py | kipkurui/gimmemotifs | 51bd0c6700877f79179f08e5bab7de70fc2eab94 | [
"MIT"
] | 1 | 2019-07-14T08:28:25.000Z | 2019-07-14T08:28:25.000Z | gimmemotifs/config.py | kipkurui/gimmemotifs | 51bd0c6700877f79179f08e5bab7de70fc2eab94 | [
"MIT"
] | null | null | null | gimmemotifs/config.py | kipkurui/gimmemotifs | 51bd0c6700877f79179f08e5bab7de70fc2eab94 | [
"MIT"
] | null | null | null | # Copyright (c) 2009-2013 Simon van Heeringen <s.vanheeringen@ncmls.ru.nl>
#
# This module is free software. You can redistribute it and/or modify it under
# the terms of the MIT License, see the file COPYING included with this
# distribution.
""" Configuration for GimmeMotifs """
import ConfigParser
import distutils.sysconfig
import os
### CONSTANTS ###
GM_VERSION = "0.8.5"
BG_TYPES = ["random", "random_genomic", "matched_genomic", "random_promoter"]
FA_VALID_BGS = ["random", "promoter", "user"]
BED_VALID_BGS = ["genomic_matched", "random", "promoter", "user"]
BG_RANK = {"user":1, "promoter":2, "genomic_matched":3, "random":4}
#if __name__ == "__main__":
# m = MotifConfig()
# print m.is_configured("meme")
| 32.08125 | 79 | 0.606078 | # Copyright (c) 2009-2013 Simon van Heeringen <s.vanheeringen@ncmls.ru.nl>
#
# This module is free software. You can redistribute it and/or modify it under
# the terms of the MIT License, see the file COPYING included with this
# distribution.
""" Configuration for GimmeMotifs """
import ConfigParser
import distutils.sysconfig
import os
### CONSTANTS ###
GM_VERSION = "0.8.5"
BG_TYPES = ["random", "random_genomic", "matched_genomic", "random_promoter"]
FA_VALID_BGS = ["random", "promoter", "user"]
BED_VALID_BGS = ["genomic_matched", "random", "promoter", "user"]
BG_RANK = {"user":1, "promoter":2, "genomic_matched":3, "random":4}
class MotifConfig:
__shared_state = {}
prefix = distutils.sysconfig.get_config_var("prefix")
config_dir = "share/gimmemotifs/gimmemotifs.cfg"
configs = [
'cfg/gimmemotifs.cfg.example',
os.path.join('/usr', config_dir),
os.path.join(prefix, config_dir),
os.path.expanduser('~/.gimmemotifs.cfg')
]
config = None
TOOL_SECTION = "tools"
def __init__(self, use_config=""):
self.__dict__ = self.__shared_state
if use_config:
self.config = ConfigParser.ConfigParser()
cfg = self.config.read(use_config)
elif not self.config:
self.config = ConfigParser.ConfigParser()
cfg = self.config.read(self.configs)
if not cfg:
raise ValueError, "Configuration file not found!"
def bin(self, program):
try:
bin = self.config.get(program, "bin")
except:
raise ValueError, "No configuration found for %s" % program
return bin
def set_default_params(self, params):
if not self.config.has_section("params"):
self.config.add_section("params")
for k,v in params.items():
self.config.set("params", k, v)
def get_default_params(self):
return dict(self.config.items("params"))
def get_seqlogo(self):
try:
bin = self.config.get("main", "seqlogo")
return bin
except:
return None
def dir(self, program):
if self.config.has_section(program):
if self.config.has_option(program, "dir"):
try:
return self.config.get(program, "dir")
except:
return None
else:
return os.path.dirname(self.bin(program))
else:
raise ValueError, "No configuration found for %s" % program
def set_program(self, program, d):
if not self.config.has_section(program):
self.config.add_section(program)
for par,value in d.items():
self.config.set(program, par, value)
def set_template_dir(self, path):
if not self.config.has_section("main"):
self.config.add_section("main")
self.config.set("main", "template_dir", path)
def get_template_dir(self):
return self.config.get("main", "template_dir")
def set_score_dir(self, path):
if not self.config.has_section("main"):
self.config.add_section("main")
self.config.set("main", "score_dir", path)
def get_score_dir(self):
return self.config.get("main", "score_dir")
def set_seqlogo(self, bin):
if not self.config.has_section("main"):
self.config.add_section("main")
self.config.set("main", "seqlogo",bin)
def set_index_dir(self, path):
if not self.config.has_section("main"):
self.config.add_section("main")
self.config.set("main", "index_dir", path)
def get_index_dir(self):
return self.config.get("main", "index_dir")
def set_motif_dir(self, path):
if not self.config.has_section("main"):
self.config.add_section("main")
self.config.set("main", "motif_databases", path)
def get_motif_dir(self):
return self.config.get("main", "motif_databases")
def set_gene_dir(self, path):
if not self.config.has_section("main"):
self.config.add_section("main")
self.config.set("main", "gene_dir", path)
def get_gene_dir(self):
return self.config.get("main", "gene_dir")
def set_bg_dir(self, path):
if not self.config.has_section("main"):
self.config.add_section("main")
self.config.set("main", "bg", path)
def get_bg_dir(self):
return self.config.get("main", "bg")
def set_tools_dir(self, path):
if not self.config.has_section("main"):
self.config.add_section("main")
self.config.set("main", "tools", path)
def get_tools_dir(self):
return self.config.get("main", "tools")
def is_configured(self, program):
return self.config.has_section(program)
def save(self):
self.config.write(open(os.path.expanduser('~/.gimmemotifs.cfg'), "w"))
def write(self, fo):
self.config.write(fo)
#if __name__ == "__main__":
# m = MotifConfig()
# print m.is_configured("meme")
| 3,298 | 1,085 | 23 |
3daae4c1808fd8866972b8fbcba8e9f6495ccb2a | 2,914 | py | Python | MODULES/Persistence_RegistryRunKeys_SharpHide.py | evi1hack/viperpython | 04bf8e31e21385edb58ea9d25296df062197df39 | [
"BSD-3-Clause"
] | null | null | null | MODULES/Persistence_RegistryRunKeys_SharpHide.py | evi1hack/viperpython | 04bf8e31e21385edb58ea9d25296df062197df39 | [
"BSD-3-Clause"
] | null | null | null | MODULES/Persistence_RegistryRunKeys_SharpHide.py | evi1hack/viperpython | 04bf8e31e21385edb58ea9d25296df062197df39 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# @File : SimpleRewMsfModule.py
# @Date : 2019/1/11
# @Desc :
from Lib.ModuleAPI import *
| 38.853333 | 107 | 0.578243 | # -*- coding: utf-8 -*-
# @File : SimpleRewMsfModule.py
# @Date : 2019/1/11
# @Desc :
from Lib.ModuleAPI import *
class PostModule(PostMSFCSharpModule):
NAME_ZH = "Windows注册表Run键值持久化(C#)"
DESC_ZH = "模块通过调用SharpHide.exe写入隐藏的注册表键值,实现持久化.\n" \
"SharpHide.exe会将目标exe路径写入到注册表Run键值中.\n"
NAME_EN = "Windows registry Run key persistence (C#)"
DESC_EN = "The module realizes persistence by calling Sharphide.exe to write hidden registry keys.\n" \
"SharpHide.exe will write the target exe path into the registry Run key.\n"
MODULETYPE = TAG2TYPE.Persistence
PLATFORM = ["Windows"] # 平台
PERMISSIONS = ["User", "Administrator", "SYSTEM", ] # 所需权限
ATTCK = ["T1037"] # ATTCK向量
README = ["https://www.yuque.com/vipersec/module/npl2d8"]
REFERENCES = ["https://github.com/outflanknl/SharpHide"]
AUTHOR = ["Viper"]
OPTIONS = register_options([
OptionEnum(name='action',
tag_zh="执行动作",
desc_zh="针对键值的执行的命令",
tag_en="Action", desc_en="Action",
required=True,
enum_list=[
{'tag_zh': "创建", 'tag_en': "Create", 'value': "create"},
{'tag_zh': "删除", 'tag_en': "Delete", 'value': "delete"},
],
length=6),
OptionStr(name='keyvalue',
tag_zh="可执行文件目录",
desc_zh="输入开启启动的exe文件路径.",
tag_en="Exe file directory", desc_en="Enter the path of the exe file to start.",
required=True,
length=18),
OptionStr(name='arguments',
tag_zh="命令行参数", required=False,
desc_zh="执行exe是的命令行参数",
tag_en="Command line parameters", desc_en="Command line parameters for executing exe",
length=24),
])
def __init__(self, sessionid, ipaddress, custom_param):
super().__init__(sessionid, ipaddress, custom_param)
def check(self):
"""执行前的检查函数"""
session = Session(self._sessionid)
if session.is_windows is not True:
return False, "此模块只支持Windows的Meterpreter", "This module only supports Meterpreter for Windows"
self.set_assembly("SharpHide")
if self.param("action") == "delete":
self.set_arguments("action=delete")
else:
param_keyvalue = self.param("keyvalue")
arguments = f"action=create keyvalue='{param_keyvalue}'"
param_arguments = self.param("arguments")
if param_arguments is not None:
arguments += f" arguments='{param_arguments}'"
self.set_arguments(arguments)
return True, None
def callback(self, status, message, data):
assembly_out = self.get_console_output(status, message, data)
self.log_raw(assembly_out)
| 221 | 2,785 | 23 |
68dcb0dea54dfb7d43b1ba7318299abe0fbf09ba | 12,135 | py | Python | Inference.py | Charlottecuc/Glow_TTS | bb2bd6384cc4ee13e61496c5f380f06d434f168b | [
"MIT"
] | 31 | 2020-07-29T08:02:52.000Z | 2022-03-19T12:57:53.000Z | Inference.py | Charlottecuc/Glow_TTS | bb2bd6384cc4ee13e61496c5f380f06d434f168b | [
"MIT"
] | 3 | 2020-11-02T20:14:26.000Z | 2021-04-08T01:12:33.000Z | Inference.py | CODEJIN/Glow_TTS | bb2bd6384cc4ee13e61496c5f380f06d434f168b | [
"MIT"
] | 9 | 2020-09-09T03:48:44.000Z | 2022-03-24T13:15:21.000Z | import torch
import numpy as np
import logging, yaml, os, sys, argparse, time, math
from tqdm import tqdm
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
from scipy.io import wavfile
from random import sample
from Modules import GlowTTS
from Datasets import Text_to_Token, Token_Stack, Mel_Stack, Mel_for_GE2E_Stack, Pitch_Stack
from Pattern_Generator import Pattern_Generate, Text_Filtering
from Speaker_Embedding.Modules import Encoder as Speaker_Embedding, Normalize
from Arg_Parser import Recursive_Parse
hp = Recursive_Parse(yaml.load(
open('Hyper_Parameters.yaml', encoding='utf-8'),
Loader=yaml.Loader
))
if not hp.Device is None:
os.environ['CUDA_VISIBLE_DEVICES']= hp.Device
if not torch.cuda.is_available():
device = torch.device('cpu')
else:
device = torch.device('cuda:0')
torch.backends.cudnn.benchmark = True
torch.cuda.set_device(0)
logging.basicConfig(
level=logging.INFO, stream=sys.stdout,
format= '%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s'
)
if hp.Use_Mixed_Precision:
try:
from apex import amp
except:
logging.info('There is no apex modules in the environment. Mixed precision does not work.')
hp.Use_Mixed_Precision = False
if __name__ == '__main__':
argParser = argparse.ArgumentParser()
argParser.add_argument('-c', '--checkpoint', required= True)
args = argParser.parse_args()
labels = [
'Alpha',
'Bravo'
]
texts = [
'Birds of a feather flock together.',
'A creative artist works on his next composition because he was not satisfied with his previous one.'
]
scales = [1.0, 0.9]
speakers = [0, 1]
refereces = [
'./Wav_for_Inference/LJ.LJ050-0278.wav',
'./Wav_for_Inference/VCTK.p361_209.wav'
]
inferencer = Inferencer(checkpoint_path= args.checkpoint)
inferencer.Inference(
labels= labels,
texts= texts,
scales= scales,
speakers= speakers,
references= refereces,
inference_path= 'XXX'
) | 38.769968 | 248 | 0.602802 | import torch
import numpy as np
import logging, yaml, os, sys, argparse, time, math
from tqdm import tqdm
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
from scipy.io import wavfile
from random import sample
from Modules import GlowTTS
from Datasets import Text_to_Token, Token_Stack, Mel_Stack, Mel_for_GE2E_Stack, Pitch_Stack
from Pattern_Generator import Pattern_Generate, Text_Filtering
from Speaker_Embedding.Modules import Encoder as Speaker_Embedding, Normalize
from Arg_Parser import Recursive_Parse
hp = Recursive_Parse(yaml.load(
open('Hyper_Parameters.yaml', encoding='utf-8'),
Loader=yaml.Loader
))
if not hp.Device is None:
os.environ['CUDA_VISIBLE_DEVICES']= hp.Device
if not torch.cuda.is_available():
device = torch.device('cpu')
else:
device = torch.device('cuda:0')
torch.backends.cudnn.benchmark = True
torch.cuda.set_device(0)
logging.basicConfig(
level=logging.INFO, stream=sys.stdout,
format= '%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s'
)
if hp.Use_Mixed_Precision:
try:
from apex import amp
except:
logging.info('There is no apex modules in the environment. Mixed precision does not work.')
hp.Use_Mixed_Precision = False
class Dataset(torch.utils.data.Dataset):
def __init__(self, labels, texts, scales, speakers= None, references= None):
super(Dataset, self).__init__()
speakers = speakers or [None] * len(texts)
references = references or [None] * len(texts)
self.patterns = [x for x in zip(labels, texts, scales, speakers, references)]
def __getitem__(self, idx):
label, text, scale, speaker, reference = self.patterns[idx]
text = Text_Filtering(text)
token = Text_to_Token(text)
if not reference is None:
_, reference, pitch = Pattern_Generate(reference, top_db= 30)
else:
pitch = None
return token, scale, speaker, reference, pitch, label, text
def __len__(self):
return len(self.patterns)
class Collater:
def __call__(self, batch):
tokens, scales, speakers, references, pitches, labels, texts = zip(*batch)
token_Lengths = [token.shape[0] for token in tokens]
tokens = Token_Stack(tokens)
tokens = torch.LongTensor(tokens) # [Batch, Time]
token_Lengths = torch.LongTensor(token_Lengths) # [Batch]
scales = torch.FloatTensor(scales) # [Batch]
if any([(x is None) for x in references]):
prosodies = None
prosody_Lengths = None
ge2es = None
pitches = None
pitch_Lengths = None
else:
prosody_Lengths = [mel.shape[0] for mel in references]
pitch_Lengths = [pitch.shape[0] for pitch in pitches]
prosodies = Mel_Stack(references)
prosodies = torch.FloatTensor(prosodies).transpose(2, 1) # [Batch, Mel_dim, Time]
prosody_Lengths = torch.LongTensor(prosody_Lengths) # [Batch]
ge2es = Mel_for_GE2E_Stack(references)
ge2es = torch.FloatTensor(ge2es).transpose(2, 1) # [Batch, Mel_dim, Time]
pitches = Pitch_Stack(pitches)
pitches = torch.FloatTensor(pitches) # [Batch, Time]
pitch_Lengths = torch.LongTensor(pitch_Lengths) # [Batch]
if any([(x is None) for x in speakers]):
speakers = None
else:
speakers = torch.LongTensor(speakers) # [Batch]
return tokens, token_Lengths, prosodies, prosody_Lengths, speakers, ge2es, pitches, pitch_Lengths, scales, labels, texts
class Inferencer:
def __init__(self, checkpoint_path):
self.Model_Generate()
self.Load_Checkpoint(checkpoint_path)
def Model_Generate(self):
self.model_Dict = {
'GlowTTS': GlowTTS().to(device)
}
if not hp.Speaker_Embedding.GE2E.Checkpoint_Path is None:
self.model_Dict['Speaker_Embedding'] = Speaker_Embedding(
mel_dims= hp.Sound.Mel_Dim,
lstm_size= hp.Speaker_Embedding.GE2E.LSTM.Sizes,
lstm_stacks= hp.Speaker_Embedding.GE2E.LSTM.Stacks,
embedding_size= hp.Speaker_Embedding.Embedding_Size,
).to(device)
if hp.Use_Mixed_Precision:
self.model_Dict['GlowTTS'] = amp.initialize(
models= self.model_Dict['GlowTTS']
)
for model in self.model_Dict.values():
model.eval()
@torch.no_grad()
def Inference_Step(self, tokens, token_lengths, prosodies, prosody_lengths, speakers, ge2es, pitches, pitch_lengths, length_scales, labels, texts, start_index= 0, tag_index= False, inference_path= './inference'):
tokens = tokens.to(device)
token_lengths = token_lengths.to(device)
prosodies = prosodies if prosodies is None else prosodies.to(device)
prosody_lengths = prosody_lengths if prosody_lengths is None else prosody_lengths.to(device)
speakers = speakers if speakers is None else speakers.to(device)
ge2es = ge2es if ge2es is None else ge2es.to(device)
pitches = pitches if pitches is None else pitches.to(device)
pitch_lengths = pitch_lengths if pitch_lengths is None else pitch_lengths.to(device)
length_scales = length_scales.to(device)
mels, mel_Lengths, attentions = self.model_Dict['GlowTTS'].inference(
tokens= tokens,
token_lengths= token_lengths,
mels_for_prosody= prosodies,
mel_lengths_for_prosody= prosody_lengths,
speakers= speakers,
mels_for_ge2e= ge2es,
pitches= pitches,
pitch_lengths= pitch_lengths,
length_scale= length_scales
)
files = []
for index, label in enumerate(labels):
tags = []
tags.append(label)
if tag_index: tags.append('IDX_{}'.format(index + start_index))
files.append('.'.join(tags))
os.makedirs(os.path.join(inference_path, 'PNG').replace('\\', '/'), exist_ok= True)
for index, (mel, mel_Length, attention, label, text, length_Scale, file) in enumerate(zip(
mels.cpu().numpy(),
mel_Lengths.cpu().numpy(),
attentions.cpu().numpy(),
labels,
texts,
length_scales,
files
)):
mel = mel[:, :mel_Length]
attention = attention[:len(text) + 2, :mel_Length]
new_Figure = plt.figure(figsize=(20, 5 * 3), dpi=100)
plt.subplot2grid((3, 1), (0, 0))
plt.imshow(mel, aspect='auto', origin='lower')
plt.title('Mel Label: {} Text: {} Length scale: {:.3f}'.format(label, text if len(text) < 70 else text[:70] + '…', length_Scale))
plt.colorbar()
plt.subplot2grid((3, 1), (1, 0), rowspan= 2)
plt.imshow(attention, aspect='auto', origin='lower', interpolation= 'none')
plt.title('Attention Label: {} Text: {} Length scale: {:.3f}'.format(label, text if len(text) < 70 else text[:70] + '…', length_Scale))
plt.yticks(
range(len(text) + 2),
['<S>'] + list(text) + ['<E>'],
fontsize = 10
)
plt.colorbar()
plt.tight_layout()
plt.savefig(os.path.join(inference_path, 'PNG', '{}.PNG'.format(file)).replace('\\', '/'))
plt.close(new_Figure)
os.makedirs(os.path.join(inference_path, 'NPY').replace('\\', '/'), exist_ok= True)
os.makedirs(os.path.join(inference_path, 'NPY', 'Mel').replace('\\', '/'), exist_ok= True)
os.makedirs(os.path.join(inference_path, 'NPY', 'Attention').replace('\\', '/'), exist_ok= True)
for index, (mel, mel_Length, file) in enumerate(zip(
mels.cpu().numpy(),
mel_Lengths.cpu().numpy(),
files
)):
mel = mel[:, :mel_Length]
attention = attention[:len(text) + 2, :mel_Length]
np.save(
os.path.join(inference_path, 'NPY', 'Mel', file).replace('\\', '/'),
mel.T,
allow_pickle= False
)
np.save(
os.path.join(inference_path, 'NPY', 'Attention', file).replace('\\', '/'),
attentions.cpu().numpy()[index],
allow_pickle= False
)
def Inference(
self,
labels,
texts,
scales,
speakers= None,
references= None,
inference_path= './inference'
):
logging.info('Start inference.')
dataLoader = torch.utils.data.DataLoader(
dataset= Dataset(
labels= labels,
texts= texts,
scales= scales,
speakers= speakers,
references= references
),
shuffle= False,
collate_fn= Collater(),
batch_size= hp.Inference_Batch_Size or hp.Train.Batch_Size,
num_workers= hp.Train.Num_Workers,
pin_memory= True
)
logging.info('The number of inference patterns = {}.'.format(len(dataLoader.dataset)))
for step, (tokens, token_Lengths, prosodies, prosody_Lengths, speakers, ge2es, pitches, pitch_Lengths, scales, labels, texts) in tqdm(
enumerate(dataLoader),
desc='[Inference]',
total= math.ceil(len(dataLoader.dataset) / (hp.Inference_Batch_Size or hp.Train.Batch_Size))
):
self.Inference_Step(tokens, token_Lengths, prosodies, prosody_Lengths, speakers, ge2es, pitches, pitch_Lengths, scales, labels, texts, start_index= step * (hp.Inference_Batch_Size or hp.Train.Batch_Size), inference_path= inference_path)
def Load_Checkpoint(self, checkpoint_path):
state_Dict = torch.load(checkpoint_path, map_location= 'cpu')
self.model_Dict['GlowTTS'].load_state_dict(state_Dict['Model'])
if hp.Use_Mixed_Precision:
if not 'AMP' in state_Dict.keys():
logging.info('No AMP state dict is in the checkpoint. Model regards this checkpoint is trained without mixed precision.')
else:
amp.load_state_dict(state_Dict['AMP'])
for flow in self.model_Dict['GlowTTS'].layer_Dict['Decoder'].layer_Dict['Flows']:
flow.layers[0].initialized = True # Activation_Norm is already initialized when checkpoint is loaded.
logging.info('Checkpoint loaded at {} steps.'.format(state_Dict['Steps']))
if 'GE2E' in self.model_Dict['GlowTTS'].layer_Dict.keys():
self.GE2E_Load_Checkpoint()
def GE2E_Load_Checkpoint(self):
state_Dict = torch.load(
hp.Speaker_Embedding.GE2E.Checkpoint_Path,
map_location= 'cpu'
)
self.model_Dict['GlowTTS'].layer_Dict['GE2E'].load_state_dict(state_Dict['Model'])
logging.info('Speaker embedding checkpoint \'{}\' loaded.'.format(hp.Speaker_Embedding.GE2E.Checkpoint_Path))
if __name__ == '__main__':
argParser = argparse.ArgumentParser()
argParser.add_argument('-c', '--checkpoint', required= True)
args = argParser.parse_args()
labels = [
'Alpha',
'Bravo'
]
texts = [
'Birds of a feather flock together.',
'A creative artist works on his next composition because he was not satisfied with his previous one.'
]
scales = [1.0, 0.9]
speakers = [0, 1]
refereces = [
'./Wav_for_Inference/LJ.LJ050-0278.wav',
'./Wav_for_Inference/VCTK.p361_209.wav'
]
inferencer = Inferencer(checkpoint_path= args.checkpoint)
inferencer.Inference(
labels= labels,
texts= texts,
scales= scales,
speakers= speakers,
references= refereces,
inference_path= 'XXX'
) | 9,658 | 192 | 175 |
75f57034047c546f47d90812c02bf87a3533167e | 728 | py | Python | corehq/sql_proxy_accessors/migrations/0035_livequery_sql.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 471 | 2015-01-10T02:55:01.000Z | 2022-03-29T18:07:18.000Z | corehq/sql_proxy_accessors/migrations/0035_livequery_sql.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 14,354 | 2015-01-01T07:38:23.000Z | 2022-03-31T20:55:14.000Z | corehq/sql_proxy_accessors/migrations/0035_livequery_sql.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 175 | 2015-01-06T07:16:47.000Z | 2022-03-29T13:27:01.000Z | # Generated by Django 1.10.7 on 2017-07-03 21:23
from django.conf import settings
from django.db import migrations
from corehq.sql_db.operations import RawSQLMigration
migrator = RawSQLMigration(('corehq', 'sql_proxy_accessors', 'sql_templates'), {
'PL_PROXY_CLUSTER_NAME': settings.PL_PROXY_CLUSTER_NAME
})
| 26 | 80 | 0.696429 | # Generated by Django 1.10.7 on 2017-07-03 21:23
from django.conf import settings
from django.db import migrations
from corehq.sql_db.operations import RawSQLMigration
migrator = RawSQLMigration(('corehq', 'sql_proxy_accessors', 'sql_templates'), {
'PL_PROXY_CLUSTER_NAME': settings.PL_PROXY_CLUSTER_NAME
})
class Migration(migrations.Migration):
dependencies = [
('sql_proxy_accessors', '0034_livequery_sql'),
]
operations = [
migrator.get_migration('get_modified_case_ids.sql'),
migrator.get_migration('get_closed_and_deleted_ids.sql'),
migrations.RunSQL(
'DROP FUNCTION IF EXISTS filter_open_case_ids(TEXT, TEXT[])',
'SELECT 1'
),
]
| 0 | 388 | 23 |
df4317f0c3c77e1436e79549c46cfdffd452373a | 5,772 | py | Python | lib/services/vpc/ncloud_vpc/model/route_parameter.py | NaverCloudPlatform/ncloud-sdk-python | 5976dfabd205c615fcf57ac2f0ab67313ee6953c | [
"MIT"
] | 12 | 2018-11-20T04:30:49.000Z | 2021-11-09T12:34:26.000Z | lib/services/vpc/ncloud_vpc/model/route_parameter.py | NaverCloudPlatform/ncloud-sdk-python | 5976dfabd205c615fcf57ac2f0ab67313ee6953c | [
"MIT"
] | 1 | 2019-01-24T15:56:15.000Z | 2019-05-31T07:56:55.000Z | lib/services/vpc/ncloud_vpc/model/route_parameter.py | NaverCloudPlatform/ncloud-sdk-python | 5976dfabd205c615fcf57ac2f0ab67313ee6953c | [
"MIT"
] | 6 | 2018-06-29T03:45:50.000Z | 2022-03-18T01:51:45.000Z | # coding: utf-8
"""
vpc
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class RouteParameter(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'destination_cidr_block': 'str',
'target_type_code': 'str',
'target_no': 'str',
'target_name': 'str'
}
attribute_map = {
'destination_cidr_block': 'destinationCidrBlock',
'target_type_code': 'targetTypeCode',
'target_no': 'targetNo',
'target_name': 'targetName'
}
def __init__(self, destination_cidr_block=None, target_type_code=None, target_no=None, target_name=None): # noqa: E501
"""RouteParameter - a model defined in Swagger""" # noqa: E501
self._destination_cidr_block = None
self._target_type_code = None
self._target_no = None
self._target_name = None
self.discriminator = None
self.destination_cidr_block = destination_cidr_block
self.target_type_code = target_type_code
if target_no is not None:
self.target_no = target_no
if target_name is not None:
self.target_name = target_name
@property
def destination_cidr_block(self):
"""Gets the destination_cidr_block of this RouteParameter. # noqa: E501
목적지CIDR블록 # noqa: E501
:return: The destination_cidr_block of this RouteParameter. # noqa: E501
:rtype: str
"""
return self._destination_cidr_block
@destination_cidr_block.setter
def destination_cidr_block(self, destination_cidr_block):
"""Sets the destination_cidr_block of this RouteParameter.
목적지CIDR블록 # noqa: E501
:param destination_cidr_block: The destination_cidr_block of this RouteParameter. # noqa: E501
:type: str
"""
if destination_cidr_block is None:
raise ValueError("Invalid value for `destination_cidr_block`, must not be `None`") # noqa: E501
self._destination_cidr_block = destination_cidr_block
@property
def target_type_code(self):
"""Gets the target_type_code of this RouteParameter. # noqa: E501
목적지유형코드 # noqa: E501
:return: The target_type_code of this RouteParameter. # noqa: E501
:rtype: str
"""
return self._target_type_code
@target_type_code.setter
def target_type_code(self, target_type_code):
"""Sets the target_type_code of this RouteParameter.
목적지유형코드 # noqa: E501
:param target_type_code: The target_type_code of this RouteParameter. # noqa: E501
:type: str
"""
if target_type_code is None:
raise ValueError("Invalid value for `target_type_code`, must not be `None`") # noqa: E501
self._target_type_code = target_type_code
@property
def target_no(self):
"""Gets the target_no of this RouteParameter. # noqa: E501
목적지번호 # noqa: E501
:return: The target_no of this RouteParameter. # noqa: E501
:rtype: str
"""
return self._target_no
@target_no.setter
def target_no(self, target_no):
"""Sets the target_no of this RouteParameter.
목적지번호 # noqa: E501
:param target_no: The target_no of this RouteParameter. # noqa: E501
:type: str
"""
self._target_no = target_no
@property
def target_name(self):
"""Gets the target_name of this RouteParameter. # noqa: E501
목적지이름 # noqa: E501
:return: The target_name of this RouteParameter. # noqa: E501
:rtype: str
"""
return self._target_name
@target_name.setter
def target_name(self, target_name):
"""Sets the target_name of this RouteParameter.
목적지이름 # noqa: E501
:param target_name: The target_name of this RouteParameter. # noqa: E501
:type: str
"""
self._target_name = target_name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RouteParameter):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 29.151515 | 123 | 0.602044 | # coding: utf-8
"""
vpc
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class RouteParameter(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'destination_cidr_block': 'str',
'target_type_code': 'str',
'target_no': 'str',
'target_name': 'str'
}
attribute_map = {
'destination_cidr_block': 'destinationCidrBlock',
'target_type_code': 'targetTypeCode',
'target_no': 'targetNo',
'target_name': 'targetName'
}
def __init__(self, destination_cidr_block=None, target_type_code=None, target_no=None, target_name=None): # noqa: E501
"""RouteParameter - a model defined in Swagger""" # noqa: E501
self._destination_cidr_block = None
self._target_type_code = None
self._target_no = None
self._target_name = None
self.discriminator = None
self.destination_cidr_block = destination_cidr_block
self.target_type_code = target_type_code
if target_no is not None:
self.target_no = target_no
if target_name is not None:
self.target_name = target_name
@property
def destination_cidr_block(self):
"""Gets the destination_cidr_block of this RouteParameter. # noqa: E501
목적지CIDR블록 # noqa: E501
:return: The destination_cidr_block of this RouteParameter. # noqa: E501
:rtype: str
"""
return self._destination_cidr_block
@destination_cidr_block.setter
def destination_cidr_block(self, destination_cidr_block):
"""Sets the destination_cidr_block of this RouteParameter.
목적지CIDR블록 # noqa: E501
:param destination_cidr_block: The destination_cidr_block of this RouteParameter. # noqa: E501
:type: str
"""
if destination_cidr_block is None:
raise ValueError("Invalid value for `destination_cidr_block`, must not be `None`") # noqa: E501
self._destination_cidr_block = destination_cidr_block
@property
def target_type_code(self):
"""Gets the target_type_code of this RouteParameter. # noqa: E501
목적지유형코드 # noqa: E501
:return: The target_type_code of this RouteParameter. # noqa: E501
:rtype: str
"""
return self._target_type_code
@target_type_code.setter
def target_type_code(self, target_type_code):
"""Sets the target_type_code of this RouteParameter.
목적지유형코드 # noqa: E501
:param target_type_code: The target_type_code of this RouteParameter. # noqa: E501
:type: str
"""
if target_type_code is None:
raise ValueError("Invalid value for `target_type_code`, must not be `None`") # noqa: E501
self._target_type_code = target_type_code
@property
def target_no(self):
"""Gets the target_no of this RouteParameter. # noqa: E501
목적지번호 # noqa: E501
:return: The target_no of this RouteParameter. # noqa: E501
:rtype: str
"""
return self._target_no
@target_no.setter
def target_no(self, target_no):
"""Sets the target_no of this RouteParameter.
목적지번호 # noqa: E501
:param target_no: The target_no of this RouteParameter. # noqa: E501
:type: str
"""
self._target_no = target_no
@property
def target_name(self):
"""Gets the target_name of this RouteParameter. # noqa: E501
목적지이름 # noqa: E501
:return: The target_name of this RouteParameter. # noqa: E501
:rtype: str
"""
return self._target_name
@target_name.setter
def target_name(self, target_name):
"""Sets the target_name of this RouteParameter.
목적지이름 # noqa: E501
:param target_name: The target_name of this RouteParameter. # noqa: E501
:type: str
"""
self._target_name = target_name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RouteParameter):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 0 | 0 | 0 |
d25d20d59baab1a7b7b3a00000bbbf30e1321af4 | 406 | py | Python | src/morpyengine/Systems/Render.py | Morgiver/morpyengine | c02cb575edfb4d4cec3e09cec5869c53ef9772ab | [
"MIT"
] | null | null | null | src/morpyengine/Systems/Render.py | Morgiver/morpyengine | c02cb575edfb4d4cec3e09cec5869c53ef9772ab | [
"MIT"
] | null | null | null | src/morpyengine/Systems/Render.py | Morgiver/morpyengine | c02cb575edfb4d4cec3e09cec5869c53ef9772ab | [
"MIT"
] | null | null | null | import sdl2
import sdl2.ext
| 23.882353 | 66 | 0.628079 | import sdl2
import sdl2.ext
def line(surface, x1, y1, x2, y2, color):
color = sdl2.ext.Color(color[0], color[1], color[2], color[3])
sdl2.ext.line(surface, color, (x1, y1, x2, y2))
def rect(surface, x, y, w, h, color):
color = sdl2.ext.Color(color[0], color[1], color[2], color[3])
sdl2.ext.fill(surface, color, (x, y, w, h))
def clear_surface(surface):
sdl2.ext.fill(surface, 0)
| 306 | 0 | 69 |
507fea8ae32e927570bfb219cf5df38fee51a960 | 801 | py | Python | algo/binary_search.py | xta0/Python-Playground | 513ebd2ad7f0a8c69f2f04b4f7524b31e76fa5bc | [
"MIT"
] | null | null | null | algo/binary_search.py | xta0/Python-Playground | 513ebd2ad7f0a8c69f2f04b4f7524b31e76fa5bc | [
"MIT"
] | null | null | null | algo/binary_search.py | xta0/Python-Playground | 513ebd2ad7f0a8c69f2f04b4f7524b31e76fa5bc | [
"MIT"
] | null | null | null | import random
array = random.sample(range(1,100),10)
print(array)
array.sort()
print(array)
print(binary_search(array,array[3]) == 3)
print(binary_search_recursive(array,array[3],0,9) == 3) | 23.558824 | 58 | 0.58427 | import random
def binary_search(array, num):
first = 0
last = len(array)-1
while first <= last:
mid = int((first+last)/2)
if array[mid] == num:
return mid
elif array[mid] > num:
last = mid - 1
else:
first = mid + 1
return -1
def binary_search_recursive(array,num,lo,hi):
if lo > hi:
return -1
mid = int((lo+hi)/2)
if array[mid] == num :
return mid
elif array[mid] > num:
return binary_search_recursive(array,num,lo,mid-1)
else:
return binary_search_recursive(array,num,mid+1,hi)
array = random.sample(range(1,100),10)
print(array)
array.sort()
print(array)
print(binary_search(array,array[3]) == 3)
print(binary_search_recursive(array,array[3],0,9) == 3) | 563 | 0 | 46 |
6b4ac45c387dae4b8a7314dd745975b9f471d987 | 2,606 | py | Python | Ball.py | andresnowak/Pong | 6c655de33707d7b59387624739e32c000ede6de0 | [
"MIT"
] | null | null | null | Ball.py | andresnowak/Pong | 6c655de33707d7b59387624739e32c000ede6de0 | [
"MIT"
] | null | null | null | Ball.py | andresnowak/Pong | 6c655de33707d7b59387624739e32c000ede6de0 | [
"MIT"
] | null | null | null | import pygame
| 29.280899 | 95 | 0.592863 | import pygame
class Ball:
RADIUS = 20
HIDE = pygame.Color("black") # color to hide the ball with background
SHOW = pygame.Color("white") # color tho show the ball
def __init__(self, x, y, vx, vy, screen):
self.x = x
self.y = y
self.vx = vx
self.vy = vy
self.ball = pygame.Rect(x - self.RADIUS // 2, y,
self.RADIUS, self.RADIUS)
self.screen = screen
def show(self, color):
pygame.draw.ellipse(self.screen, color, self.ball)
def move(self, player1Rect, player2Rect, player1, player2, screen_height, screen_border):
"""
moves the ball
"""
new_pos_x = self.ball.x + self.vx
new_pos_y = self.ball.y + self.vy
self.show(self.HIDE)
# update position of ball
self.ball.x = new_pos_x
self.ball.y = new_pos_y
# check if ball has crashed and make it bounce if true
self.crashed_wall(screen_border, screen_height)
self.crashed_player(player1Rect, player2Rect)
self.show(self.SHOW)
def stop_ball_going_out_of_bounds(self, screen_border, screen_height):
if self.ball.top <= screen_border:
self.ball.top = screen_border
elif self.ball.bottom >= screen_height - screen_border:
self.ball.bottom = screen_height - screen_border
def crashed_player(self, player1, player2):
"""
Check if the ball has crashed with a player and bounce it
"""
if self.ball.colliderect(player1) or self.ball.colliderect(player2):
self.vx *= -1
if self.ball.colliderect(player1):
self.ball.right = player1.left
elif self.ball.colliderect(player2):
self.ball.left = player2.right
def crashed_wall(self, screen_border, screen_height):
if self.ball.top <= screen_border or self.ball.bottom >= screen_height - screen_border:
self.vy *= -1
self.stop_ball_going_out_of_bounds(screen_border, screen_height)
def out_of_bounds(self, screen_width_bounds):
if self.ball.right <= 0:
return True, "player1"
elif self.ball.left >= screen_width_bounds:
return True, "player2"
else:
return False, ""
def spawn(self, x, y):
"""
spawns the ball to the position it gets in x and y
"""
# hide the ball
self.show(self.HIDE)
self.ball.x = x - self.RADIUS // 2
self.ball.y = y
# show the ball
self.show(self.SHOW)
| 1,015 | 1,553 | 23 |
484cf743c24ab1fbee7284ccc093c584638eaedf | 3,381 | py | Python | standard-training/utils/figure_3_plotter.py | kaustubhsridhar/PoE-robustness | 878bd94c64534afc4fdff04ada9e12aab6ed4b28 | [
"MIT"
] | null | null | null | standard-training/utils/figure_3_plotter.py | kaustubhsridhar/PoE-robustness | 878bd94c64534afc4fdff04ada9e12aab6ed4b28 | [
"MIT"
] | null | null | null | standard-training/utils/figure_3_plotter.py | kaustubhsridhar/PoE-robustness | 878bd94c64534afc4fdff04ada9e12aab6ed4b28 | [
"MIT"
] | 1 | 2022-03-10T05:28:58.000Z | 2022-03-10T05:28:58.000Z | import numpy as np
import matplotlib.pyplot as plt
plt.style.use('seaborn')
plt.rc('font', size=24) # controls default text sizes
plt.rc('axes', titlesize=24) # fontsize of the axes title
plt.rc('axes', labelsize=24) # fontsize of the x and y labels
plt.rc('xtick', labelsize=24) # fontsize of the tick labels
plt.rc('ytick', labelsize=24) # fontsize of the tick labels
plt.rc('legend', fontsize=24) # legend fontsize
plt.rc('figure', titlesize=30) # fontsize of the figure title
| 41.740741 | 97 | 0.598639 | import numpy as np
import matplotlib.pyplot as plt
plt.style.use('seaborn')
plt.rc('font', size=24) # controls default text sizes
plt.rc('axes', titlesize=24) # fontsize of the axes title
plt.rc('axes', labelsize=24) # fontsize of the x and y labels
plt.rc('xtick', labelsize=24) # fontsize of the tick labels
plt.rc('ytick', labelsize=24) # fontsize of the tick labels
plt.rc('legend', fontsize=24) # legend fontsize
plt.rc('figure', titlesize=30) # fontsize of the figure title
def get_adv_err_for_ep(EPOCH, LR, loc_PGD):
with open(loc_PGD+'/resnet-20_ep{}_{}/log.txt'.format(EPOCH, LR)) as f:
for line in f:
pass
last_line = line
return float(last_line.rsplit('\t')[-3]) # the last character is endline
def get_adv_errs(LR, loc_PGD):
adv_errs = []
epoch_numbers = list(np.arange(5,164,5)) + [164]
for epoch in epoch_numbers:
adv_errs.append( get_adv_err_for_ep(epoch, LR, loc_PGD) )
return epoch_numbers, adv_errs
def get_val_errs(LR, loc_checkpoint):
val_errs = []
with open(loc_checkpoint+'/resnet-20_{}/log.txt'.format(LR)) as f:
for i, line in enumerate(f):
if i>0:
val_err_value = float(line.rsplit('\t')[-2])
val_errs.append(val_err_value)
return val_errs
def make_figure_3(Lvals, loc_checkpoint, loc_PGD, option=1):
# Lvals = [10.9005, 7.6474, 3.6954, 7.9951, 6.0226]
lrs = [(round(1./Ls, 4), round(2./Ls, 4)) for Ls in Lvals]
marks = ['o', 's', '^', 'd', 'v', 'p', 'h', '>', '<']
clr = ['b', 'g', 'k', 'r', 'p']
legend_list = []
fig = plt.figure(figsize=(24,7))
plt.subplot(1, 2, 1)
yc = get_val_errs(0.1, loc_checkpoint)
plt.plot(yc, linestyle = '-', linewidth=4.0, markersize = 10.0)
legend_list.append( '$\eta^1$ = 0.1' )
for i, (lr_conv, lr_pers) in enumerate(lrs):
y1c = get_val_errs(lr_conv, loc_checkpoint)
plt.plot(y1c, linestyle = '--', linewidth=4.0, markersize = 10.0)
legend_list.append( '$\eta^1$ = 2/{}'.format(Lvals[i]) )
y2c = get_val_errs(lr_pers, loc_checkpoint)
plt.plot(y2c, linestyle = '-', linewidth=4.0, markersize = 10.0)
legend_list.append( '$\eta^1$ = 1/{}'.format(Lvals[i]) )
plt.legend(legend_list)
plt.xlabel('epochs')
plt.ylabel('Clean Accuracy (%)')
plt.subplot(1, 2, 2)
x, y = get_adv_errs(0.1, loc_PGD)
plt.plot(x, y, linestyle = '-', marker = marks[0], linewidth=4.0, markersize = 10.0)
legend_list.append( '$\eta^1$ = 0.1' )
for i, (lr_conv, lr_pers) in enumerate(lrs):
x1, y1 = get_adv_errs(lr_conv, loc_PGD)
x2, y2 = get_adv_errs(lr_pers, loc_PGD)
plt.plot(x1, y1, linestyle = '--', marker = marks[i+1], linewidth=4.0, markersize = 10.0)
legend_list.append( '$\eta^1$ = 2/{}'.format(Lvals[i]) )
plt.plot(x2, y2, linestyle = '-', marker = marks[i+1], linewidth=4.0, markersize = 10.0)
legend_list.append( '$\eta^1$ = 1/{}'.format(Lvals[i]) )
plt.legend(legend_list, loc='lower right')
plt.xlabel('epochs')
plt.ylabel('PGD Accuracy (%)')
if option==1:
plt.ylim(bottom=7)
elif option==2:
plt.ylim(bottom=12)
print('Clean: basline {} | 2/L {} | 1/L {}'.format(yc[-1], y1c[-1], y2c[-1]))
print('PGD: basline {} | 2/L {} | 1/L {}'.format(y[-1], y1[-1], y2[-1]))
| 2,780 | 0 | 92 |
d2a704e29e850f8c185306db4f5217c7f428ffa9 | 8,393 | py | Python | app.py | brand-fabian/ood-xdmod | 0d232bed6905e66ccc270f8e96af6bc0c7fbc425 | [
"MIT"
] | null | null | null | app.py | brand-fabian/ood-xdmod | 0d232bed6905e66ccc270f8e96af6bc0c7fbc425 | [
"MIT"
] | null | null | null | app.py | brand-fabian/ood-xdmod | 0d232bed6905e66ccc270f8e96af6bc0c7fbc425 | [
"MIT"
] | null | null | null | import requests
import base64
import zlib
import re
from urllib.parse import quote, unquote
import urllib3
from flask import Flask, redirect, request, Response
from uuid import uuid4
from textwrap import wrap
from lxml import etree
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
xdmod = Flask(__name__)
OOD_URL = "https://ood.example.org/pun/dev/xdmod"
XDMOD_URL = "https://xdmod.example.org"
EXCL_HEADERS = ['content-encoding', 'content-length', 'transfer-encoding', 'connection']
VERIFY=False
REPLACE_URI = [ r'/rest', r'/gui']
PRIVATE_KEY = "PRIVATE_KEY"
CERT = "CERT"
@xdmod.route("/<path:path>", methods=['GET', 'POST', 'DELETE', 'PUT', 'PATCH'])
@xdmod.route("/")
if __name__ == "__main__":
xdmod.run()
| 35.264706 | 109 | 0.577028 | import requests
import base64
import zlib
import re
from urllib.parse import quote, unquote
import urllib3
from flask import Flask, redirect, request, Response
from uuid import uuid4
from textwrap import wrap
from lxml import etree
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
xdmod = Flask(__name__)
OOD_URL = "https://ood.example.org/pun/dev/xdmod"
XDMOD_URL = "https://xdmod.example.org"
EXCL_HEADERS = ['content-encoding', 'content-length', 'transfer-encoding', 'connection']
VERIFY=False
REPLACE_URI = [ r'/rest', r'/gui']
PRIVATE_KEY = "PRIVATE_KEY"
CERT = "CERT"
def _pem_format(value, is_key=True):
if is_key:
return '-----BEGIN PRIVATE KEY-----\n' + '\n'.join(wrap(value, 64)) + '\n-----END PRIVATE KEY-----\n'
else:
return '-----BEGIN CERTIFICATE-----\n' + '\n'.join(wrap(value, 64)) + '\n-----END CERTIFICATE-----\n'
def sign_xml(
value,
key = PRIVATE_KEY,
cert = CERT,
namespaces={
'saml': 'urn:oasis:names:tc:SAML:2.0:assertion',
'samlp': 'urn:oasis:names:tc:SAML:2.0:protocol',
'md': 'http://schemas.xmlsoap.org/soap/envelope/',
'xs': 'urn:oasis:names:tc:SAML:2.0:metadata',
'xsi': 'http://www.w3.org/2001/XMLSchema',
'xenc': 'http://www.w3.org/2001/XMLSchema-instance',
'ds': 'http://www.w3.org/2000/09/xmldsig#',
}
):
import xmlsec
sign_algorithm_transform = xmlsec.Transform.RSA_SHA256
digest_algorithm_transform = xmlsec.Transform.SHA256
sig = value.xpath('/samlp:Response/ds:Signature', namespaces=namespaces)[0]
sig.getparent().remove(sig)
signature = xmlsec.template.create(value, xmlsec.Transform.EXCL_C14N,
sign_algorithm_transform, ns='dsig')
issuer = value.xpath('/samlp:Response/saml:Issuer', namespaces=namespaces)
if len(issuer) > 0:
issuer = issuer[0]
root = issuer.getparent()
root.insert(root.index(issuer)+1, signature)
elem_to_sign = root
else:
raise Exception("No issuer found in xml.")
elem_id = elem_to_sign.get('ID', None)
if elem_id is not None:
if elem_id:
elem_id = '#' + elem_id
else:
elem_id = '#' + uuid4().hex
xmlsec.tree.add_ids(elem_to_sign, ["ID"])
ref = xmlsec.template.add_reference(signature, digest_algorithm_transform,
uri=elem_id)
xmlsec.template.add_transform(ref, xmlsec.Transform.ENVELOPED)
xmlsec.template.add_transform(ref, xmlsec.Transform.EXCL_C14N)
key_info = xmlsec.template.ensure_key_info(signature)
xmlsec.template.add_x509_data(key_info)
dsig_ctx = xmlsec.SignatureContext()
sign_key = xmlsec.Key.from_memory(_pem_format(key), xmlsec.KeyFormat.PEM, None)
sign_key.load_cert_from_memory(_pem_format(cert, is_key=False), xmlsec.KeyFormat.PEM)
dsig_ctx.key = sign_key
dsig_ctx.sign(signature)
return etree.tostring(value)
def decode_to_etree(value):
saml_resp = base64.b64decode(unquote(value))
return etree.fromstring(saml_resp)
def decode_and_inflate(value):
compressed = base64.b64decode(value)
return zlib.decompress(compressed, -15)
def deflate_and_encode(value):
return base64.b64encode(zlib.compress(value.encode())[2:-4])
def _proxy_url(path):
base_url = '{}/{}'.format(XDMOD_URL, path)
if len(request.args) > 0:
arguments = []
for key, value in request.args.items():
if key.lower() == 'returnto':
arguments.append('{}={}'.format(
key, quote(unquote(value).strip('"'), safe='')
))
else:
arguments.append('{}={}'.format(key, value))
base_url += '?' + '&'.join(arguments)
return base_url
def _proxy(path, *args, **kwargs):
# Handle incoming login data
data = request.get_data().decode()
if 'SAMLResponse' in data:
match = re.search(r"SAMLResponse=(.*?)&", data)
if match:
saml_resp = match.group(1)
saml_str = base64.b64decode(unquote(saml_resp)).decode()
saml_str = saml_str.replace(OOD_URL, XDMOD_URL)
saml_str = quote(base64.b64encode(saml_str.encode()))
saml_str = sign_xml(decode_to_etree(saml_str))
saml_str = quote(base64.b64encode(saml_str))
data = data.replace(saml_resp, saml_str)
if 'RelayState' in data:
data = data.replace(
quote(OOD_URL + '/', safe=''),
quote(XDMOD_URL + '/', safe=''),
)
data = data.encode()
# Make backend request
resp = requests.request(
method=request.method,
url=_proxy_url(path),
headers={key: value for (key, value) in request.headers if key != 'Host'},
data=data,
cookies=request.cookies,
allow_redirects=False,
verify=VERIFY,
)
headers = [(name, value) for (name, value) in resp.raw.headers.items()
if name.lower() not in EXCL_HEADERS]
# Replace absolute URL's in javascript and php/html files
if path.endswith('js') or path.endswith('html') or path.endswith('php'):
content = resp.content.decode()
for rep in REPLACE_URI:
content = content.replace(rep, rep[1:])
content = content.encode()
# Hack XDMod login
if path.endswith('index.php'):
new_content = []
for line_no, line in enumerate(content.decode().split('\n')):
new_content.append(line.strip())
if line_no == 15:
new_content.append('<script type="text/javascript" src="static/login.js"></script>')
content = '\n'.join(new_content).encode()
elif path.endswith('login.php'):
new_content = []
for line_no, line in enumerate(content.decode().split('\n')):
if '/index.php' in line:
new_content.append(line.replace(
'/index.php',
OOD_URL + '/index.php',
))
else:
new_content.append(line)
content = '\n'.join(new_content).encode()
else:
content = resp.content
# Handle login redirects
if resp.status_code == 302:
headers_302 = []
for name, value in headers:
if name == 'Location':
match = re.search(r"SAMLRequest=(.*?)&", value)
if match:
req = match.group(1)
val = decode_and_inflate(unquote(req)).decode()
val = val.replace(XDMOD_URL, OOD_URL)
val = deflate_and_encode(val)
val = quote(val, safe='')
target = value.replace(req, val)
else:
target = value
if XDMOD_URL in target:
target = target.replace(XDMOD_URL, OOD_URL)
elif quote(XDMOD_URL + '/', safe='') in target:
target = target.replace(
quote(XDMOD_URL + '/', safe=''),
quote(OOD_URL + '/', safe=''),
)
# Strip simplesaml/module... part of the url
if target.endswith('gui/general/login.php'):
target = OOD_URL + '/gui/general/login.php'
else:
target = value
headers_302.append((name, target))
headers = headers_302
elif resp.status_code == 303:
headers_303 = []
for name, value in headers:
if name == 'Location':
if value.endswith('gui/general/login.php'):
target = OOD_URL + '/gui/general/login.php'
elif XDMOD_URL in value:
target = value.replace(XDMOD_URL, OOD_URL)
else:
target = value
else:
target = value
headers_303.append((name, target))
headers = headers_303
response = Response(content, resp.status_code, headers)
return response
@xdmod.route("/<path:path>", methods=['GET', 'POST', 'DELETE', 'PUT', 'PATCH'])
def proxy(path):
return _proxy(path)
@xdmod.route("/")
def default():
return redirect('index.php')
if __name__ == "__main__":
xdmod.run()
| 7,441 | 0 | 205 |
fe6b21eaf265ba3e318f32fa7935241ff691cad5 | 364 | py | Python | source code samples/Network/client.py | NelsonBilber/python-overview | 7438eddebaf33b534eb8a06da7769744e54052b1 | [
"MIT"
] | null | null | null | source code samples/Network/client.py | NelsonBilber/python-overview | 7438eddebaf33b534eb8a06da7769744e54052b1 | [
"MIT"
] | null | null | null | source code samples/Network/client.py | NelsonBilber/python-overview | 7438eddebaf33b534eb8a06da7769744e54052b1 | [
"MIT"
] | null | null | null | # echo server - client
# source code from here -> https://realpython.com/python-sockets/
import socket
HOST = '127.0.0.1'
PORT = 65432
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((HOST, PORT))
s.sendall(b'Hello from client script')
data = s.recv(1024)
print ('Received reply(echo) from server: ', repr(data)) | 26 | 66 | 0.67033 | # echo server - client
# source code from here -> https://realpython.com/python-sockets/
import socket
HOST = '127.0.0.1'
PORT = 65432
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((HOST, PORT))
s.sendall(b'Hello from client script')
data = s.recv(1024)
print ('Received reply(echo) from server: ', repr(data)) | 0 | 0 | 0 |
f04935366d11084ad6a2d168289b83338e9f447a | 6,500 | py | Python | scale/ingest/triggers/configuration/ingest_trigger_rule_1_0.py | stevevarner/scale | 9623b261db4ddcf770f00df16afc91176142bb7c | [
"Apache-2.0"
] | null | null | null | scale/ingest/triggers/configuration/ingest_trigger_rule_1_0.py | stevevarner/scale | 9623b261db4ddcf770f00df16afc91176142bb7c | [
"Apache-2.0"
] | null | null | null | scale/ingest/triggers/configuration/ingest_trigger_rule_1_0.py | stevevarner/scale | 9623b261db4ddcf770f00df16afc91176142bb7c | [
"Apache-2.0"
] | null | null | null | """Defines the configuration for an ingest trigger"""
from __future__ import unicode_literals
import logging
from jsonschema import validate
from jsonschema.exceptions import ValidationError
from ingest.triggers.ingest_trigger_condition import IngestTriggerCondition
from job.configuration.data.job_connection import JobConnection
from recipe.configuration.data.recipe_connection import LegacyRecipeConnection
from recipe.triggers.configuration.trigger_rule import RecipeTriggerRuleConfiguration
from storage.models import Workspace
from trigger.configuration.exceptions import InvalidTriggerRule
logger = logging.getLogger(__name__)
INGEST_TRIGGER_SCHEMA = {
"type": "object",
"required": ["data"],
"additionalProperties": False,
"properties": {
"version": {
"description": "Version of the ingest trigger schema",
"type": "string",
},
"condition": {
"description": "Condition for an ingested file to trigger an event",
"type": "object",
"additionalProperties": False,
"properties": {
"media_type": {
"description": "Media type required by an ingested file to trigger an event",
"type": "string",
},
"data_types": {
"description": "Data types required by an ingested file to trigger an event",
"type": "array",
"items": {"$ref": "#/definitions/data_type_tag"}
},
}
},
"data": {
"description": "The input data to pass to a triggered job/recipe",
"type": "object",
"required": ["input_data_name", "workspace_name"],
"additionalProperties": False,
"properties": {
"input_data_name": {
"description": "The name of the job/recipe input data to pass the ingested file to",
"type": "string",
},
"workspace_name": {
"description": "The name of the workspace to use for the triggered job/recipe",
"type": "string",
}
}
}
},
"definitions": {
"data_type_tag": {
"description": "A simple data type tag string",
"type": "string",
}
}
}
class IngestTriggerRuleConfiguration(RecipeTriggerRuleConfiguration):
"""Represents a rule that triggers when ingested source files meet the defined conditions
"""
def __init__(self, trigger_rule_type, configuration):
"""Creates an ingest trigger from the given configuration
:param trigger_rule_type: The trigger rule type
:type trigger_rule_type: str
:param configuration: The ingest trigger configuration
:type configuration: dict
:raises trigger.configuration.exceptions.InvalidTriggerRule: If the configuration is invalid
"""
super(IngestTriggerRuleConfiguration, self).__init__(trigger_rule_type, configuration)
try:
validate(configuration, INGEST_TRIGGER_SCHEMA)
except ValidationError as validation_error:
raise InvalidTriggerRule(validation_error)
self._populate_default_values()
version = self._dict['version']
if version != '1.0':
raise InvalidTriggerRule('%s is an unsupported version number' % version)
def get_condition(self):
"""Returns the condition for this ingest trigger rule
:return: The trigger condition
:rtype: :class:`ingest.triggers.ingest_trigger_condition.IngestTriggerCondition`
"""
media_type = None
if self._dict['condition']['media_type']:
media_type = self._dict['condition']['media_type']
data_types = set(self._dict['condition']['data_types'])
return IngestTriggerCondition(media_type, data_types)
def get_input_data_name(self):
"""Returns the name of the input data that the ingested file should be passed to
:return: The input data name
:rtype: str
"""
return self._dict['data']['input_data_name']
def get_workspace_name(self):
"""Returns the name of the workspace to use for the triggered job/recipe
:return: The workspace name
:rtype: str
"""
return self._dict['data']['workspace_name']
def validate(self):
"""See :meth:`trigger.configuration.trigger_rule.TriggerRuleConfiguration.validate`
"""
workspace_name = self.get_workspace_name()
if Workspace.objects.filter(name=workspace_name).count() == 0:
raise InvalidTriggerRule('%s is an invalid workspace name' % workspace_name)
def validate_trigger_for_job(self, job_interface):
"""See :meth:`job.triggers.configuration.trigger_rule.JobTriggerRuleConfiguration.validate_trigger_for_job`
"""
input_file_name = self.get_input_data_name()
media_type = self.get_condition().get_media_type()
media_types = [media_type] if media_type else None
connection = JobConnection()
connection.add_input_file(input_file_name, False, media_types, False, False)
connection.add_workspace()
return job_interface.validate_connection(connection)
def validate_trigger_for_recipe(self, recipe_definition):
"""See :meth:`recipe.triggers.configuration.trigger_rule.RecipeTriggerRuleConfiguration.validate_trigger_for_recipe`
"""
input_file_name = self.get_input_data_name()
media_type = self.get_condition().get_media_type()
media_types = [media_type] if media_type else None
connection = LegacyRecipeConnection()
connection.add_input_file(input_file_name, False, media_types, False)
connection.add_workspace()
return recipe_definition.validate_connection(connection)
def _populate_default_values(self):
"""Populates any missing default values in the configuration
"""
if 'version' not in self._dict:
self._dict['version'] = '1.0'
if 'condition' not in self._dict:
self._dict['condition'] = {}
if 'media_type' not in self._dict['condition']:
self._dict['condition']['media_type'] = ''
if 'data_types' not in self._dict['condition']:
self._dict['condition']['data_types'] = []
| 36.111111 | 124 | 0.638154 | """Defines the configuration for an ingest trigger"""
from __future__ import unicode_literals
import logging
from jsonschema import validate
from jsonschema.exceptions import ValidationError
from ingest.triggers.ingest_trigger_condition import IngestTriggerCondition
from job.configuration.data.job_connection import JobConnection
from recipe.configuration.data.recipe_connection import LegacyRecipeConnection
from recipe.triggers.configuration.trigger_rule import RecipeTriggerRuleConfiguration
from storage.models import Workspace
from trigger.configuration.exceptions import InvalidTriggerRule
logger = logging.getLogger(__name__)
INGEST_TRIGGER_SCHEMA = {
"type": "object",
"required": ["data"],
"additionalProperties": False,
"properties": {
"version": {
"description": "Version of the ingest trigger schema",
"type": "string",
},
"condition": {
"description": "Condition for an ingested file to trigger an event",
"type": "object",
"additionalProperties": False,
"properties": {
"media_type": {
"description": "Media type required by an ingested file to trigger an event",
"type": "string",
},
"data_types": {
"description": "Data types required by an ingested file to trigger an event",
"type": "array",
"items": {"$ref": "#/definitions/data_type_tag"}
},
}
},
"data": {
"description": "The input data to pass to a triggered job/recipe",
"type": "object",
"required": ["input_data_name", "workspace_name"],
"additionalProperties": False,
"properties": {
"input_data_name": {
"description": "The name of the job/recipe input data to pass the ingested file to",
"type": "string",
},
"workspace_name": {
"description": "The name of the workspace to use for the triggered job/recipe",
"type": "string",
}
}
}
},
"definitions": {
"data_type_tag": {
"description": "A simple data type tag string",
"type": "string",
}
}
}
class IngestTriggerRuleConfiguration(RecipeTriggerRuleConfiguration):
"""Represents a rule that triggers when ingested source files meet the defined conditions
"""
def __init__(self, trigger_rule_type, configuration):
"""Creates an ingest trigger from the given configuration
:param trigger_rule_type: The trigger rule type
:type trigger_rule_type: str
:param configuration: The ingest trigger configuration
:type configuration: dict
:raises trigger.configuration.exceptions.InvalidTriggerRule: If the configuration is invalid
"""
super(IngestTriggerRuleConfiguration, self).__init__(trigger_rule_type, configuration)
try:
validate(configuration, INGEST_TRIGGER_SCHEMA)
except ValidationError as validation_error:
raise InvalidTriggerRule(validation_error)
self._populate_default_values()
version = self._dict['version']
if version != '1.0':
raise InvalidTriggerRule('%s is an unsupported version number' % version)
def get_condition(self):
"""Returns the condition for this ingest trigger rule
:return: The trigger condition
:rtype: :class:`ingest.triggers.ingest_trigger_condition.IngestTriggerCondition`
"""
media_type = None
if self._dict['condition']['media_type']:
media_type = self._dict['condition']['media_type']
data_types = set(self._dict['condition']['data_types'])
return IngestTriggerCondition(media_type, data_types)
def get_input_data_name(self):
"""Returns the name of the input data that the ingested file should be passed to
:return: The input data name
:rtype: str
"""
return self._dict['data']['input_data_name']
def get_workspace_name(self):
"""Returns the name of the workspace to use for the triggered job/recipe
:return: The workspace name
:rtype: str
"""
return self._dict['data']['workspace_name']
def validate(self):
"""See :meth:`trigger.configuration.trigger_rule.TriggerRuleConfiguration.validate`
"""
workspace_name = self.get_workspace_name()
if Workspace.objects.filter(name=workspace_name).count() == 0:
raise InvalidTriggerRule('%s is an invalid workspace name' % workspace_name)
def validate_trigger_for_job(self, job_interface):
"""See :meth:`job.triggers.configuration.trigger_rule.JobTriggerRuleConfiguration.validate_trigger_for_job`
"""
input_file_name = self.get_input_data_name()
media_type = self.get_condition().get_media_type()
media_types = [media_type] if media_type else None
connection = JobConnection()
connection.add_input_file(input_file_name, False, media_types, False, False)
connection.add_workspace()
return job_interface.validate_connection(connection)
def validate_trigger_for_recipe(self, recipe_definition):
"""See :meth:`recipe.triggers.configuration.trigger_rule.RecipeTriggerRuleConfiguration.validate_trigger_for_recipe`
"""
input_file_name = self.get_input_data_name()
media_type = self.get_condition().get_media_type()
media_types = [media_type] if media_type else None
connection = LegacyRecipeConnection()
connection.add_input_file(input_file_name, False, media_types, False)
connection.add_workspace()
return recipe_definition.validate_connection(connection)
def _populate_default_values(self):
"""Populates any missing default values in the configuration
"""
if 'version' not in self._dict:
self._dict['version'] = '1.0'
if 'condition' not in self._dict:
self._dict['condition'] = {}
if 'media_type' not in self._dict['condition']:
self._dict['condition']['media_type'] = ''
if 'data_types' not in self._dict['condition']:
self._dict['condition']['data_types'] = []
| 0 | 0 | 0 |
b48d92b8f54660405f3f06cb6e06183b868aa1a4 | 5,481 | py | Python | pikka_bird_collector/parsers/erlang.py | tiredpixel/pikka-bird-collector-py | 5273b9f1ee89831a84044e863db480bbf4730552 | [
"MIT"
] | 2 | 2015-06-12T19:55:16.000Z | 2019-04-05T22:33:05.000Z | pikka_bird_collector/parsers/erlang.py | tiredpixel/pikka-bird-collector-py | 5273b9f1ee89831a84044e863db480bbf4730552 | [
"MIT"
] | 3 | 2015-06-06T20:56:03.000Z | 2020-01-29T14:41:54.000Z | pikka_bird_collector/parsers/erlang.py | tiredpixel/pikka-bird-collector-py | 5273b9f1ee89831a84044e863db480bbf4730552 | [
"MIT"
] | null | null | null | import json
import re
from .base import Base
class Erlang(Base):
"""
Parses simple Erlang data format, as output by RabbitMQ status.
e.g.
[{pid,296},
{running_applications,
[{rabbitmq_management_visualiser,"RabbitMQ Visualiser","3.5.1"}]}]
"""
RE_STRINGY = [re.compile(r'<<"(.*)">>'), r'"\g<1>"']
CONTEXT_STRUCTURE = 0
CONTEXT_KEY = 1
CONTEXT_VALUE = 2
CHAR_ARR_S = '{'
CHAR_ARR_E = '}'
CHAR_OBJ_S = '['
CHAR_OBJ_E = ']'
CHAR_QUOTE = '"'
CHAR_SEP = ','
CHAR_WSP = [' ', '\n']
CHAR_E = [CHAR_ARR_E, CHAR_OBJ_E]
CHAR_SKIP = CHAR_WSP + [CHAR_ARR_E]
@staticmethod
def dict_set(d, k, v):
"""
Set value within arbitrary-depth dict, referenced by key path.
Note this uses simple recursion, and will blow the stack if too
deep.
PARAMETERS:
d : dict
dict to update
k : list
reverse-ordered key (e.g. ['depth-3', 'depth-2', 'depth-1'])
v : type
value to set
"""
if len(k) == 1:
d[k[0]] = v
else:
k2 = k.pop()
if k2 not in d:
d[k2] = {}
Erlang.dict_set(d[k2], k, v)
@staticmethod
| 28.252577 | 83 | 0.494253 | import json
import re
from .base import Base
class Erlang(Base):
"""
Parses simple Erlang data format, as output by RabbitMQ status.
e.g.
[{pid,296},
{running_applications,
[{rabbitmq_management_visualiser,"RabbitMQ Visualiser","3.5.1"}]}]
"""
RE_STRINGY = [re.compile(r'<<"(.*)">>'), r'"\g<1>"']
CONTEXT_STRUCTURE = 0
CONTEXT_KEY = 1
CONTEXT_VALUE = 2
CHAR_ARR_S = '{'
CHAR_ARR_E = '}'
CHAR_OBJ_S = '['
CHAR_OBJ_E = ']'
CHAR_QUOTE = '"'
CHAR_SEP = ','
CHAR_WSP = [' ', '\n']
CHAR_E = [CHAR_ARR_E, CHAR_OBJ_E]
CHAR_SKIP = CHAR_WSP + [CHAR_ARR_E]
@staticmethod
def dict_set(d, k, v):
"""
Set value within arbitrary-depth dict, referenced by key path.
Note this uses simple recursion, and will blow the stack if too
deep.
PARAMETERS:
d : dict
dict to update
k : list
reverse-ordered key (e.g. ['depth-3', 'depth-2', 'depth-1'])
v : type
value to set
"""
if len(k) == 1:
d[k[0]] = v
else:
k2 = k.pop()
if k2 not in d:
d[k2] = {}
Erlang.dict_set(d[k2], k, v)
def parse2(self, raw):
self.__reset(raw)
parse_contexts = {
Erlang.CONTEXT_STRUCTURE: self.__parse_structure,
Erlang.CONTEXT_KEY: self.__parse_key,
Erlang.CONTEXT_VALUE: self.__parse_value}
while self.r_i < len(self.raw):
parse_contexts[self.r_context]()
self.r_i += 1
return self.ds
def __parse_structure(self):
c = self.__read_skip()
if c == Erlang.CHAR_OBJ_S and self.__read_lookahead_structure():
self.r_i += 1
self.r_context = Erlang.CONTEXT_KEY
def __parse_key(self):
c = self.__read_char(self.r_i)
if c == Erlang.CHAR_SEP:
if self.e_context_p == Erlang.CONTEXT_KEY:
self.r_depth += 1 # dive, dive!
self.__emit_key()
self.r_context = Erlang.CONTEXT_VALUE
else:
self.r_buffer += c
def __parse_value(self):
c = self.__read_char(self.r_i)
if c == Erlang.CHAR_QUOTE:
self.r_quoted *= -1 # toggle
if self.r_quoted == -1 and c in Erlang.CHAR_E:
if self.e_context_p == Erlang.CONTEXT_KEY:
self.__emit_value()
self.r_i += 1
c = self.__read_skip()
if c == Erlang.CHAR_SEP:
self.r_context = Erlang.CONTEXT_STRUCTURE
elif c == Erlang.CHAR_OBJ_E:
self.r_depth -= 1 # going up!
elif self.r_quoted == -1 and c == Erlang.CHAR_OBJ_S:
if self.__read_lookahead_structure():
self.r_context = Erlang.CONTEXT_STRUCTURE
elif self.r_quoted == 1 or c != Erlang.CHAR_ARR_S:
self.r_buffer += c
def __reset(self, raw):
self.raw = raw
self.r_i = 0 # read pointer
self.r_context = Erlang.CONTEXT_STRUCTURE # type of data
self.r_buffer = '' # buffer for data
self.r_depth = 0 # depth within structure
self.r_quoted = -1 # within quotes? t: 1, f: -1
self.e_context_p = None # previous type of data
self.e_buffer = [] # buffer for data
self.e_key = [] # full path of key
def __read_char(self, i):
if i < len(self.raw):
return self.raw[i]
def __read_skip(self):
c = self.__read_char(self.r_i)
while c in Erlang.CHAR_SKIP:
self.r_i += 1
c = self.__read_char(self.r_i)
return c
def __read_lookahead_structure(self):
r_i_0 = self.r_i
self.r_i += 1
c = self.__read_skip()
if c == Erlang.CHAR_ARR_S:
status = True
else:
status = False
self.r_i = r_i_0
return status
def __emit_key(self):
k = self.r_buffer.strip() # hacky strip
k = self.converter_key(k)
if len(self.e_key) > self.r_depth:
self.e_key[self.r_depth] = k
else:
self.e_key.append(k)
self.e_buffer = [k]
self.e_key = self.e_key[:(self.r_depth + 1)]
self.e_context_p = Erlang.CONTEXT_KEY
self.r_buffer = ''
def __emit_value(self):
v = Erlang.__parse_str_setting_value(self.r_buffer)
v = self.converter_value(v)
self.e_buffer.append(v)
Erlang.dict_set(self.ds, self.e_key[::-1], v)
self.e_context_p = Erlang.CONTEXT_VALUE
self.r_buffer = ''
@staticmethod
def __parse_str_setting_value(v):
v = Erlang.RE_STRINGY[0].sub(Erlang.RE_STRINGY[1], v)
try:
v = json.loads('[' + v + ']')
except ValueError:
v = v.split(Erlang.CHAR_SEP)
if len(v) == 0:
v = None
elif len(v) == 1:
v = v[0]
return v
| 3,704 | 0 | 336 |