content stringlengths 7 1.05M | fixed_cases stringlengths 1 1.28M |
|---|---|
class Solution:
def climbStairs(self, n):
"""
:type n: int
:rtype: int
"""
dp1 = 1
dp2 = 2
dp_step = 0
if n <= 1:
return dp1
if n == 2:
return dp2
while n > 2:
dp_step = dp1 + dp2
dp1 = dp2
dp2 = dp_step
n -= 1
return dp_step
| class Solution:
def climb_stairs(self, n):
"""
:type n: int
:rtype: int
"""
dp1 = 1
dp2 = 2
dp_step = 0
if n <= 1:
return dp1
if n == 2:
return dp2
while n > 2:
dp_step = dp1 + dp2
dp1 = dp2
dp2 = dp_step
n -= 1
return dp_step |
numero = int(input('Digite um numero para ver sua tabuada: '))
print('---------------')
print('{} x 1 = {}'.format(numero, numero * 1))
print('{} x 2 = {}'.format(numero, numero * 2))
print('{} x 3 = {}'.format(numero, numero * 3))
print('{} x 4 = {}'.format(numero, numero * 4))
print('{} x 5 = {}'.format(numero, numero * 5))
print('{} x 6 = {}'.format(numero, numero * 6))
print('{} x 7 = {}'.format(numero, numero * 7))
print('{} x 8 = {}'.format(numero, numero * 8))
print('{} x 9 = {}'.format(numero, numero * 9))
print('{} x 10 = {}'.format(numero, numero * 10))
print('---------------') | numero = int(input('Digite um numero para ver sua tabuada: '))
print('---------------')
print('{} x 1 = {}'.format(numero, numero * 1))
print('{} x 2 = {}'.format(numero, numero * 2))
print('{} x 3 = {}'.format(numero, numero * 3))
print('{} x 4 = {}'.format(numero, numero * 4))
print('{} x 5 = {}'.format(numero, numero * 5))
print('{} x 6 = {}'.format(numero, numero * 6))
print('{} x 7 = {}'.format(numero, numero * 7))
print('{} x 8 = {}'.format(numero, numero * 8))
print('{} x 9 = {}'.format(numero, numero * 9))
print('{} x 10 = {}'.format(numero, numero * 10))
print('---------------') |
'''
We want to make a row of bricks that is goal inches long. We have a number of
small bricks (1 inch each) and big bricks (5 inches each). Return True if it
is possible to make the goal by choosing from the given bricks. This is a
little harder than it looks and can be done without any loops.
'''
def make_bricks(small, big, goal):
if small + 5*big < goal:
return False
if small >= goal % 5:
return True
return False
| """
We want to make a row of bricks that is goal inches long. We have a number of
small bricks (1 inch each) and big bricks (5 inches each). Return True if it
is possible to make the goal by choosing from the given bricks. This is a
little harder than it looks and can be done without any loops.
"""
def make_bricks(small, big, goal):
if small + 5 * big < goal:
return False
if small >= goal % 5:
return True
return False |
def validacao_de_nota():
notas_validas = soma = 0
while True:
if notas_validas == 2:
break
nota = float(input())
if 0 <= nota <= 10:
soma += nota
notas_validas += 1
else:
print('nota invalida')
media = soma / 2
print(f'media = {media:.2f}')
validacao_de_nota()
| def validacao_de_nota():
notas_validas = soma = 0
while True:
if notas_validas == 2:
break
nota = float(input())
if 0 <= nota <= 10:
soma += nota
notas_validas += 1
else:
print('nota invalida')
media = soma / 2
print(f'media = {media:.2f}')
validacao_de_nota() |
# -*- coding: utf-8 -*-
def main(names):
def get_format(is_angy):
return "{0}.My name is {1}" if is_angy else "{0}.{1} is my classmate"
for i, n in enumerate(names):
print(get_format(n == "Angy").format(i, n))
if __name__ == "__main__":
names = ("Bill", "Anne", "Angy", "Cony", "Daniel", "Occhan")
main(names)
| def main(names):
def get_format(is_angy):
return '{0}.My name is {1}' if is_angy else '{0}.{1} is my classmate'
for (i, n) in enumerate(names):
print(get_format(n == 'Angy').format(i, n))
if __name__ == '__main__':
names = ('Bill', 'Anne', 'Angy', 'Cony', 'Daniel', 'Occhan')
main(names) |
# # 6. write a function that takes an integer n and prints a square of n*n #
def quadrat(n):
sq=n*n
print(sq)
return sq
quadrat(10) | def quadrat(n):
sq = n * n
print(sq)
return sq
quadrat(10) |
'''test for having the file checked .
if we request the channels from the file and there are no channels
and when you call the channels from file .. you specify which file to call from .(which list)
if the stream is already there then you dont want it to add the stream
and notify the user that it already exists.
''' | """test for having the file checked .
if we request the channels from the file and there are no channels
and when you call the channels from file .. you specify which file to call from .(which list)
if the stream is already there then you dont want it to add the stream
and notify the user that it already exists.
""" |
"""Errors used in pydexcom."""
class DexcomError(Exception):
"""Base class for all Dexcom errors."""
pass
class AccountError(DexcomError):
"""Errors involving Dexcom Share API credentials."""
pass
class SessionError(DexcomError):
"""Errors involving Dexcom Share API session."""
pass
class ArguementError(DexcomError):
"""Error involving arguements."""
pass
| """Errors used in pydexcom."""
class Dexcomerror(Exception):
"""Base class for all Dexcom errors."""
pass
class Accounterror(DexcomError):
"""Errors involving Dexcom Share API credentials."""
pass
class Sessionerror(DexcomError):
"""Errors involving Dexcom Share API session."""
pass
class Arguementerror(DexcomError):
"""Error involving arguements."""
pass |
with open("day2.txt", "rt") as file:
data = file.readlines()
valid = 0
valid2 = 0
for entry in data:
parts = entry.split(' ')
limits = parts[0].split('-')
letter = parts[1].split(':')[0]
password = parts[2]
count = 0
for ch in password:
if ch == letter:
count += 1
if (count >= int(limits[0]) and count <= int(limits[1])):
valid += 1
if (password[int(limits[0])-1] == letter or password[int(limits[1])-1] == letter):
if (not password[int(limits[0])-1] == password[int(limits[1])-1]):
valid2 += 1
print(valid, valid2)
| with open('day2.txt', 'rt') as file:
data = file.readlines()
valid = 0
valid2 = 0
for entry in data:
parts = entry.split(' ')
limits = parts[0].split('-')
letter = parts[1].split(':')[0]
password = parts[2]
count = 0
for ch in password:
if ch == letter:
count += 1
if count >= int(limits[0]) and count <= int(limits[1]):
valid += 1
if password[int(limits[0]) - 1] == letter or password[int(limits[1]) - 1] == letter:
if not password[int(limits[0]) - 1] == password[int(limits[1]) - 1]:
valid2 += 1
print(valid, valid2) |
# SPDX-License-Identifier: BSD-2-Clause
"""osdk-manager exceptions.
Manage osdk and opm binary installation, and help to scaffold, release, and
version Operator SDK-based Kubernetes operators.
This file contains the custom exceptions utilized for the osdk_manager.
"""
class ContainerRuntimeException(Exception):
"""Unable to identify a container runtime in your current environment."""
pass
class ShellRuntimeException(RuntimeError):
"""Shell command returned non-zero return code.
Attributes:
code -- the return code from the shell command
"""
def __init__(self, code: int = None):
"""Save the code with the exception."""
self.code = code
| """osdk-manager exceptions.
Manage osdk and opm binary installation, and help to scaffold, release, and
version Operator SDK-based Kubernetes operators.
This file contains the custom exceptions utilized for the osdk_manager.
"""
class Containerruntimeexception(Exception):
"""Unable to identify a container runtime in your current environment."""
pass
class Shellruntimeexception(RuntimeError):
"""Shell command returned non-zero return code.
Attributes:
code -- the return code from the shell command
"""
def __init__(self, code: int=None):
"""Save the code with the exception."""
self.code = code |
TRAINING_FILE_ORIG = '../input/adult.csv'
TRAINING_FILE = '../input/adult_folds.csv'
| training_file_orig = '../input/adult.csv'
training_file = '../input/adult_folds.csv' |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for devil.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into depot_tools.
"""
def _RunPylint(input_api, output_api):
return input_api.RunTests(
input_api.canned_checks.RunPylint(
input_api, output_api, pylintrc='pylintrc'))
def _RunUnitTests(input_api, output_api):
def J(*dirs):
"""Returns a path relative to presubmit directory."""
return input_api.os_path.join(input_api.PresubmitLocalPath(), 'devil',
*dirs)
test_env = dict(input_api.environ)
test_env.update({
'PYTHONDONTWRITEBYTECODE': '1',
'PYTHONPATH': ':'.join([J(), J('..')]),
})
message_type = (output_api.PresubmitError if input_api.is_committing else
output_api.PresubmitPromptWarning)
return input_api.RunTests([
input_api.Command(
name='devil/bin/run_py_tests',
cmd=[
input_api.os_path.join(input_api.PresubmitLocalPath(), 'bin',
'run_py_tests')
],
kwargs={'env': test_env},
message=message_type)
])
def _EnsureNoPylibUse(input_api, output_api):
def other_python_files(f):
this_presubmit_file = input_api.os_path.join(input_api.PresubmitLocalPath(),
'PRESUBMIT.py')
return (f.LocalPath().endswith('.py')
and not f.AbsoluteLocalPath() == this_presubmit_file)
changed_files = input_api.AffectedSourceFiles(other_python_files)
import_error_re = input_api.re.compile(
r'(from pylib.* import)|(import pylib)')
errors = []
for f in changed_files:
errors.extend('%s:%d' % (f.LocalPath(), line_number)
for line_number, line_text in f.ChangedContents()
if import_error_re.search(line_text))
if errors:
return [
output_api.PresubmitError(
'pylib modules should not be imported from devil modules.',
items=errors)
]
return []
def CommonChecks(input_api, output_api):
output = []
output += _RunPylint(input_api, output_api)
output += _RunUnitTests(input_api, output_api)
output += _EnsureNoPylibUse(input_api, output_api)
return output
def CheckChangeOnUpload(input_api, output_api):
return CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CommonChecks(input_api, output_api)
| """Presubmit script for devil.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into depot_tools.
"""
def __run_pylint(input_api, output_api):
return input_api.RunTests(input_api.canned_checks.RunPylint(input_api, output_api, pylintrc='pylintrc'))
def __run_unit_tests(input_api, output_api):
def j(*dirs):
"""Returns a path relative to presubmit directory."""
return input_api.os_path.join(input_api.PresubmitLocalPath(), 'devil', *dirs)
test_env = dict(input_api.environ)
test_env.update({'PYTHONDONTWRITEBYTECODE': '1', 'PYTHONPATH': ':'.join([j(), j('..')])})
message_type = output_api.PresubmitError if input_api.is_committing else output_api.PresubmitPromptWarning
return input_api.RunTests([input_api.Command(name='devil/bin/run_py_tests', cmd=[input_api.os_path.join(input_api.PresubmitLocalPath(), 'bin', 'run_py_tests')], kwargs={'env': test_env}, message=message_type)])
def __ensure_no_pylib_use(input_api, output_api):
def other_python_files(f):
this_presubmit_file = input_api.os_path.join(input_api.PresubmitLocalPath(), 'PRESUBMIT.py')
return f.LocalPath().endswith('.py') and (not f.AbsoluteLocalPath() == this_presubmit_file)
changed_files = input_api.AffectedSourceFiles(other_python_files)
import_error_re = input_api.re.compile('(from pylib.* import)|(import pylib)')
errors = []
for f in changed_files:
errors.extend(('%s:%d' % (f.LocalPath(), line_number) for (line_number, line_text) in f.ChangedContents() if import_error_re.search(line_text)))
if errors:
return [output_api.PresubmitError('pylib modules should not be imported from devil modules.', items=errors)]
return []
def common_checks(input_api, output_api):
output = []
output += __run_pylint(input_api, output_api)
output += __run_unit_tests(input_api, output_api)
output += __ensure_no_pylib_use(input_api, output_api)
return output
def check_change_on_upload(input_api, output_api):
return common_checks(input_api, output_api)
def check_change_on_commit(input_api, output_api):
return common_checks(input_api, output_api) |
# -*- coding: utf-8 -*-
"""Top-level package for filter_classified_reads."""
__author__ = """Peter Kruczkiewicz"""
__email__ = 'peter.kruczkiewicz@gmail.com'
__version__ = '0.2.1'
| """Top-level package for filter_classified_reads."""
__author__ = 'Peter Kruczkiewicz'
__email__ = 'peter.kruczkiewicz@gmail.com'
__version__ = '0.2.1' |
"""Styles for the frontend."""
async def style():
"""Return styles."""
return """
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/css/materialize.min.css">
<link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.8.2/css/all.css" integrity="sha384-oS3vJWv+0UjzBfQzYUhtDYW+Pj2yciDJxpsK1OYPAYjqT085Qq/1cq5FLXAZQ7Ay" crossorigin="anonymous">
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/js/materialize.min.js"></script>
<style>
.yaml {
font-family: monospace, monospace;
font-size: 1em;
border-style: solid;
border-width: thin;
margin: 0;
}
a {
color: #ffab40;
}
</style>
"""
| """Styles for the frontend."""
async def style():
"""Return styles."""
return '\n <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/css/materialize.min.css">\n <link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.8.2/css/all.css" integrity="sha384-oS3vJWv+0UjzBfQzYUhtDYW+Pj2yciDJxpsK1OYPAYjqT085Qq/1cq5FLXAZQ7Ay" crossorigin="anonymous">\n <script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/js/materialize.min.js"></script>\n <style>\n .yaml {\n font-family: monospace, monospace;\n font-size: 1em;\n border-style: solid;\n border-width: thin;\n margin: 0;\n }\n a {\n color: #ffab40;\n }\n </style>\n ' |
open_brackets = ["[","{","("]
close_brackets = ["]","}",")"]
def validate_brackets(string):
stack = []
for i in string:
if i in open_brackets:
stack.append(i)
elif i in close_brackets:
pos = close_brackets.index(i)
if ((len(stack) > 0) and
(open_brackets[pos] == stack[len(stack)-1])):
stack.pop()
else:
return "invalid"
if len(stack) == 0:
return "valid"
else:
return "invalid"
| open_brackets = ['[', '{', '(']
close_brackets = [']', '}', ')']
def validate_brackets(string):
stack = []
for i in string:
if i in open_brackets:
stack.append(i)
elif i in close_brackets:
pos = close_brackets.index(i)
if len(stack) > 0 and open_brackets[pos] == stack[len(stack) - 1]:
stack.pop()
else:
return 'invalid'
if len(stack) == 0:
return 'valid'
else:
return 'invalid' |
def my_name(name):
# import ipdb;ipdb.set_trace()
return f"My name is: {name}"
if __name__ == "__main__":
my_name("bob")
| def my_name(name):
return f'My name is: {name}'
if __name__ == '__main__':
my_name('bob') |
'''
Created on 25 apr 2019
@author: Matteo
'''
CD_RETURN_IMMEDIATELY = 1
CD_ADD_AND_CONTINUE_WAITING = 2
CD_CONTINUE_WAITING = 0
CD_ABORT_AND_RETRY = 3
| """
Created on 25 apr 2019
@author: Matteo
"""
cd_return_immediately = 1
cd_add_and_continue_waiting = 2
cd_continue_waiting = 0
cd_abort_and_retry = 3 |
load("@fbcode_macros//build_defs/lib:cpp_common.bzl", "cpp_common")
load("@fbcode_macros//build_defs/lib:src_and_dep_helpers.bzl", "src_and_dep_helpers")
load("@fbcode_macros//build_defs/lib:string_macros.bzl", "string_macros")
load("@fbcode_macros//build_defs/lib:target_utils.bzl", "target_utils")
load("@fbcode_macros//build_defs/lib:visibility.bzl", "get_visibility")
load("@fbcode_macros//build_defs:platform_utils.bzl", "platform_utils")
# save original native module before it's shadowed by the attribute
_native = native
def _convert_ocaml(
name,
rule_type,
srcs = (),
deps = (),
compiler_flags = None,
ocamldep_flags = None,
native = True,
warnings_flags = None,
supports_coverage = None,
external_deps = (),
visibility = None,
ppx_flag = None,
nodefaultlibs = False):
_ignore = supports_coverage
base_path = _native.package_name()
is_binary = rule_type == "ocaml_binary"
# Translate visibility
visibility = get_visibility(visibility, name)
platform = platform_utils.get_platform_for_base_path(base_path)
attributes = {}
attributes["name"] = name
attributes["srcs"] = src_and_dep_helpers.convert_source_list(base_path, srcs)
attributes["visibility"] = visibility
if warnings_flags:
attributes["warnings_flags"] = warnings_flags
attributes["compiler_flags"] = ["-warn-error", "+a", "-safe-string"]
if compiler_flags:
attributes["compiler_flags"].extend(
string_macros.convert_args_with_macros(
compiler_flags,
platform = platform,
),
)
attributes["ocamldep_flags"] = []
if ocamldep_flags:
attributes["ocamldep_flags"].extend(ocamldep_flags)
if ppx_flag != None:
attributes["compiler_flags"].extend(["-ppx", ppx_flag])
attributes["ocamldep_flags"].extend(["-ppx", ppx_flag])
if not native:
attributes["bytecode_only"] = True
if rule_type == "ocaml_binary":
attributes["platform"] = platform_utils.get_buck_platform_for_base_path(base_path)
dependencies = []
# Add the C/C++ build info lib to deps.
if rule_type == "ocaml_binary":
cxx_build_info = cpp_common.cxx_build_info_rule(
base_path,
name,
rule_type,
platform,
visibility = visibility,
)
dependencies.append(cxx_build_info)
# Translate dependencies.
for dep in deps:
dependencies.append(target_utils.parse_target(dep, default_base_path = base_path))
# Translate external dependencies.
for dep in external_deps:
dependencies.append(src_and_dep_helpers.normalize_external_dep(dep))
# Add in binary-specific link deps.
if is_binary:
dependencies.extend(
cpp_common.get_binary_link_deps(
base_path,
name,
default_deps = not nodefaultlibs,
),
)
# If any deps were specified, add them to the output attrs.
if dependencies:
attributes["deps"], attributes["platform_deps"] = (
src_and_dep_helpers.format_all_deps(dependencies)
)
platform = platform_utils.get_platform_for_base_path(base_path)
ldflags = cpp_common.get_ldflags(
base_path,
name,
rule_type,
binary = is_binary,
platform = platform if is_binary else None,
)
if nodefaultlibs:
ldflags.append("-nodefaultlibs")
if "-flto" in ldflags:
attributes["compiler_flags"].extend(["-ccopt", "-flto", "-cclib", "-flto"])
if "-flto=thin" in ldflags:
attributes["compiler_flags"].extend(["-ccopt", "-flto=thin", "-cclib", "-flto=thin"])
return attributes
ocaml_common = struct(
convert_ocaml = _convert_ocaml,
)
| load('@fbcode_macros//build_defs/lib:cpp_common.bzl', 'cpp_common')
load('@fbcode_macros//build_defs/lib:src_and_dep_helpers.bzl', 'src_and_dep_helpers')
load('@fbcode_macros//build_defs/lib:string_macros.bzl', 'string_macros')
load('@fbcode_macros//build_defs/lib:target_utils.bzl', 'target_utils')
load('@fbcode_macros//build_defs/lib:visibility.bzl', 'get_visibility')
load('@fbcode_macros//build_defs:platform_utils.bzl', 'platform_utils')
_native = native
def _convert_ocaml(name, rule_type, srcs=(), deps=(), compiler_flags=None, ocamldep_flags=None, native=True, warnings_flags=None, supports_coverage=None, external_deps=(), visibility=None, ppx_flag=None, nodefaultlibs=False):
_ignore = supports_coverage
base_path = _native.package_name()
is_binary = rule_type == 'ocaml_binary'
visibility = get_visibility(visibility, name)
platform = platform_utils.get_platform_for_base_path(base_path)
attributes = {}
attributes['name'] = name
attributes['srcs'] = src_and_dep_helpers.convert_source_list(base_path, srcs)
attributes['visibility'] = visibility
if warnings_flags:
attributes['warnings_flags'] = warnings_flags
attributes['compiler_flags'] = ['-warn-error', '+a', '-safe-string']
if compiler_flags:
attributes['compiler_flags'].extend(string_macros.convert_args_with_macros(compiler_flags, platform=platform))
attributes['ocamldep_flags'] = []
if ocamldep_flags:
attributes['ocamldep_flags'].extend(ocamldep_flags)
if ppx_flag != None:
attributes['compiler_flags'].extend(['-ppx', ppx_flag])
attributes['ocamldep_flags'].extend(['-ppx', ppx_flag])
if not native:
attributes['bytecode_only'] = True
if rule_type == 'ocaml_binary':
attributes['platform'] = platform_utils.get_buck_platform_for_base_path(base_path)
dependencies = []
if rule_type == 'ocaml_binary':
cxx_build_info = cpp_common.cxx_build_info_rule(base_path, name, rule_type, platform, visibility=visibility)
dependencies.append(cxx_build_info)
for dep in deps:
dependencies.append(target_utils.parse_target(dep, default_base_path=base_path))
for dep in external_deps:
dependencies.append(src_and_dep_helpers.normalize_external_dep(dep))
if is_binary:
dependencies.extend(cpp_common.get_binary_link_deps(base_path, name, default_deps=not nodefaultlibs))
if dependencies:
(attributes['deps'], attributes['platform_deps']) = src_and_dep_helpers.format_all_deps(dependencies)
platform = platform_utils.get_platform_for_base_path(base_path)
ldflags = cpp_common.get_ldflags(base_path, name, rule_type, binary=is_binary, platform=platform if is_binary else None)
if nodefaultlibs:
ldflags.append('-nodefaultlibs')
if '-flto' in ldflags:
attributes['compiler_flags'].extend(['-ccopt', '-flto', '-cclib', '-flto'])
if '-flto=thin' in ldflags:
attributes['compiler_flags'].extend(['-ccopt', '-flto=thin', '-cclib', '-flto=thin'])
return attributes
ocaml_common = struct(convert_ocaml=_convert_ocaml) |
##
# File: PdbxChemCompConstants.py
# Date: 21-Feb-2012 John Westbrook
#
# Update:
# 21-Feb-2012 jdw add to chemcomputil repository
# 1-Feb-2017 jdw unified with chem_ref_data
#
##
"""
A collection of chemical data and information.
"""
__docformat__ = "restructuredtext en"
__author__ = "John Westbrook"
__email__ = "john.westbrook@rcsb.org"
__license__ = "Apache 2.0"
class PdbxChemCompConstants(object):
periodicTable = [
"H",
"HE",
"LI",
"BE",
"B",
"C",
"N",
"O",
"F",
"NE",
"NA",
"MG",
"AL",
"SI",
"P",
"S",
"CL",
"AR",
"K",
"CA",
"SC",
"TI",
"V",
"CR",
"MN",
"FE",
"CO",
"NI",
"CU",
"ZN",
"GA",
"GE",
"AS",
"SE",
"BR",
"KR",
"RB",
"SR",
"Y",
"ZR",
"NB",
"MO",
"TC",
"RU",
"RH",
"PD",
"AG",
"CD",
"IN",
"SN",
"SB",
"TE",
"I",
"XE",
"CS",
"BA",
"LA",
"CE",
"PR",
"ND",
"PM",
"SM",
"EU",
"GD",
"TB",
"DY",
"HO",
"ER",
"TM",
"YB",
"LU",
"HF",
"TA",
"W",
"RE",
"OS",
"IR",
"PT",
"AU",
"HG",
"TL",
"PB",
"BI",
"PO",
"AT",
"RN",
"FR",
"RA",
"AC",
"TH",
"PA",
"U",
"NP",
"PU",
"AM",
"CM",
"BK",
"CF",
"ES",
"FM",
"MD",
"NO",
"LR",
"UNQ",
"UNP",
"UNH",
"UNS",
"UNO",
"UNE",
]
| """
A collection of chemical data and information.
"""
__docformat__ = 'restructuredtext en'
__author__ = 'John Westbrook'
__email__ = 'john.westbrook@rcsb.org'
__license__ = 'Apache 2.0'
class Pdbxchemcompconstants(object):
periodic_table = ['H', 'HE', 'LI', 'BE', 'B', 'C', 'N', 'O', 'F', 'NE', 'NA', 'MG', 'AL', 'SI', 'P', 'S', 'CL', 'AR', 'K', 'CA', 'SC', 'TI', 'V', 'CR', 'MN', 'FE', 'CO', 'NI', 'CU', 'ZN', 'GA', 'GE', 'AS', 'SE', 'BR', 'KR', 'RB', 'SR', 'Y', 'ZR', 'NB', 'MO', 'TC', 'RU', 'RH', 'PD', 'AG', 'CD', 'IN', 'SN', 'SB', 'TE', 'I', 'XE', 'CS', 'BA', 'LA', 'CE', 'PR', 'ND', 'PM', 'SM', 'EU', 'GD', 'TB', 'DY', 'HO', 'ER', 'TM', 'YB', 'LU', 'HF', 'TA', 'W', 'RE', 'OS', 'IR', 'PT', 'AU', 'HG', 'TL', 'PB', 'BI', 'PO', 'AT', 'RN', 'FR', 'RA', 'AC', 'TH', 'PA', 'U', 'NP', 'PU', 'AM', 'CM', 'BK', 'CF', 'ES', 'FM', 'MD', 'NO', 'LR', 'UNQ', 'UNP', 'UNH', 'UNS', 'UNO', 'UNE'] |
class AverageMeter(object):
"""Stores the summation and counts the number to compute the average value.
"""
def __init__(self):
self._sum = 0
self._count = 0
@property
def avg(self):
return self._sum / self._count if self._count != 0 else 0
@property
def count(self):
return self._count
@property
def sum(self):
return self._sum
def update(self, value, n=1):
self._sum += value * n
self._count += n
def reset(self):
self._sum = 0
self._count = 0
| class Averagemeter(object):
"""Stores the summation and counts the number to compute the average value.
"""
def __init__(self):
self._sum = 0
self._count = 0
@property
def avg(self):
return self._sum / self._count if self._count != 0 else 0
@property
def count(self):
return self._count
@property
def sum(self):
return self._sum
def update(self, value, n=1):
self._sum += value * n
self._count += n
def reset(self):
self._sum = 0
self._count = 0 |
r'''
.. _snippets-cli-tagging:
Command Line Interface: Tagging
===============================
This is the tested source code for the snippets used in :ref:`cli-tagging`. The
config file we're using in this example can be downloaded
:download:`here <../../examples/snippets/resources/datafs_mongo.yml>`.
Example 1
---------
Displayed example 1 code:
.. EXAMPLE-BLOCK-1-START
.. code-block:: bash
$ datafs create archive1 --tag "foo" --tag "bar" --description \
> "tag test 1 has bar"
created versioned archive <DataArchive local://archive1>
$ datafs create archive2 --tag "foo" --tag "baz" --description \
> "tag test 2 has baz"
created versioned archive <DataArchive local://archive2>
.. EXAMPLE-BLOCK-1-END
Example 2
---------
.. EXAMPLE-BLOCK-2-START
.. code-block:: bash
$ datafs search bar
archive1
$ datafs search baz
archive2
$ datafs search foo # doctest: +SKIP
archive1
archive2
.. EXAMPLE-BLOCK-2-END
Example 3
---------
.. EXAMPLE-BLOCK-3-START
.. code-block:: bash
$ datafs create archive3 --tag "foo" --tag "bar" --tag "baz" \
> --description 'tag test 3 has all the tags!'
created versioned archive <DataArchive local://archive3>
$ datafs search bar foo # doctest: +SKIP
archive1
archive3
$ datafs search bar foo baz
archive3
.. EXAMPLE-BLOCK-3-END
Example 4
---------
.. EXAMPLE-BLOCK-4-START
.. code-block:: bash
$ datafs search qux
$ datafs search foo qux
.. EXAMPLE-BLOCK-4-END
Example 5
---------
.. EXAMPLE-BLOCK-5-START
.. code-block:: bash
$ datafs get_tags archive1
foo bar
.. EXAMPLE-BLOCK-5-END
Example 6
---------
.. EXAMPLE-BLOCK-6-START
.. code-block:: bash
$ datafs add_tags archive1 qux
$ datafs search foo qux
archive1
.. EXAMPLE-BLOCK-6-END
Example 7
---------
.. EXAMPLE-BLOCK-7-START
.. code-block:: bash
$ datafs delete_tags archive1 foo bar
$ datafs search foo bar
archive3
.. EXAMPLE-BLOCK-7-END
Teardown
--------
.. code-block:: bash
$ datafs delete archive1
deleted archive <DataArchive local://archive1>
$ datafs delete archive2
deleted archive <DataArchive local://archive2>
$ datafs delete archive3
deleted archive <DataArchive local://archive3>
'''
| """
.. _snippets-cli-tagging:
Command Line Interface: Tagging
===============================
This is the tested source code for the snippets used in :ref:`cli-tagging`. The
config file we're using in this example can be downloaded
:download:`here <../../examples/snippets/resources/datafs_mongo.yml>`.
Example 1
---------
Displayed example 1 code:
.. EXAMPLE-BLOCK-1-START
.. code-block:: bash
$ datafs create archive1 --tag "foo" --tag "bar" --description \\
> "tag test 1 has bar"
created versioned archive <DataArchive local://archive1>
$ datafs create archive2 --tag "foo" --tag "baz" --description \\
> "tag test 2 has baz"
created versioned archive <DataArchive local://archive2>
.. EXAMPLE-BLOCK-1-END
Example 2
---------
.. EXAMPLE-BLOCK-2-START
.. code-block:: bash
$ datafs search bar
archive1
$ datafs search baz
archive2
$ datafs search foo # doctest: +SKIP
archive1
archive2
.. EXAMPLE-BLOCK-2-END
Example 3
---------
.. EXAMPLE-BLOCK-3-START
.. code-block:: bash
$ datafs create archive3 --tag "foo" --tag "bar" --tag "baz" \\
> --description 'tag test 3 has all the tags!'
created versioned archive <DataArchive local://archive3>
$ datafs search bar foo # doctest: +SKIP
archive1
archive3
$ datafs search bar foo baz
archive3
.. EXAMPLE-BLOCK-3-END
Example 4
---------
.. EXAMPLE-BLOCK-4-START
.. code-block:: bash
$ datafs search qux
$ datafs search foo qux
.. EXAMPLE-BLOCK-4-END
Example 5
---------
.. EXAMPLE-BLOCK-5-START
.. code-block:: bash
$ datafs get_tags archive1
foo bar
.. EXAMPLE-BLOCK-5-END
Example 6
---------
.. EXAMPLE-BLOCK-6-START
.. code-block:: bash
$ datafs add_tags archive1 qux
$ datafs search foo qux
archive1
.. EXAMPLE-BLOCK-6-END
Example 7
---------
.. EXAMPLE-BLOCK-7-START
.. code-block:: bash
$ datafs delete_tags archive1 foo bar
$ datafs search foo bar
archive3
.. EXAMPLE-BLOCK-7-END
Teardown
--------
.. code-block:: bash
$ datafs delete archive1
deleted archive <DataArchive local://archive1>
$ datafs delete archive2
deleted archive <DataArchive local://archive2>
$ datafs delete archive3
deleted archive <DataArchive local://archive3>
""" |
class Funcionario:
def __init__(self, nome, salario):
self.nome=nome
self.salario=float(salario)
def aum_salario(self, pct):
self.salario += (self.salario*pct/100)
def get_salario(self):
return self.salario | class Funcionario:
def __init__(self, nome, salario):
self.nome = nome
self.salario = float(salario)
def aum_salario(self, pct):
self.salario += self.salario * pct / 100
def get_salario(self):
return self.salario |
class Node(object):
def __init__(self, val=None, children=None):
self.val = val
self.children = children
| class Node(object):
def __init__(self, val=None, children=None):
self.val = val
self.children = children |
num = int(input('digite um numero:'))
if num / 1 and num/ num:
print('esse numero primo')
else:
print('esse numerp nap e primo') | num = int(input('digite um numero:'))
if num / 1 and num / num:
print('esse numero primo')
else:
print('esse numerp nap e primo') |
'''
https://www.hackerrank.com/challenges/python-loops/problem
Task
====
The provided code stub reads and integer, , from STDIN. For all non-negative integers , print .
Example
=======
The list of non-negative integers that are less than is . Print the square of each number on a separate line.
0
1
4
Input Format
============
The first and only line contains the integer, .
Constraints
===========
Output Format
===========
Print lines, one corresponding to each .
Sample Input 0
5
Sample Output 0
0
1
4
9
16
'''
if __name__ == '__main__':
n = int(input())
a = [0] * n
print(a[0])
for i in range(1, n):
a[i] = a[i-1] + ((i-1) << 1) + 1
# i , 1, 2, 3
# 2(i-1)+1 0, 1, 3, 5
# a[i] 0, 1, 4, 9
print(a[i])
| """
https://www.hackerrank.com/challenges/python-loops/problem
Task
====
The provided code stub reads and integer, , from STDIN. For all non-negative integers , print .
Example
=======
The list of non-negative integers that are less than is . Print the square of each number on a separate line.
0
1
4
Input Format
============
The first and only line contains the integer, .
Constraints
===========
Output Format
===========
Print lines, one corresponding to each .
Sample Input 0
5
Sample Output 0
0
1
4
9
16
"""
if __name__ == '__main__':
n = int(input())
a = [0] * n
print(a[0])
for i in range(1, n):
a[i] = a[i - 1] + (i - 1 << 1) + 1
print(a[i]) |
# Time: O(m*n), m = len(word1), n = len(word2)
# Space: O(m*n)
class Solution:
def minDistance(self, word1: str, word2: str) -> int:
"""
3 options for each char
if chars match don't need to do anything
subprobem(i,j) = subproblem(i-1,j-1)
else:
subproblem(i,j) = 1 + min of(1,2,3)
1.Insertion: subproblem(word1[i], word2[j-1])
2.Deletion: subproblem(word[i-1], word2[j])
3.Replacement: subproblem(word1[i-1], word2[j-1])
"""
dp_mat = [[0 for _ in range(len(word2)+1)] for _ in range(len(word1)+1)]
for sublen in range(len(word1)+1): # if second word is empty
dp_mat[sublen][0] = sublen
for sublen in range(len(word2)+1): # if first word is empty
dp_mat[0][sublen] = sublen
for i in range(1, len(word1)+1):
for j in range(1, len(word2)+1):
if word1[i-1]==word2[j-1]:
dp_mat[i][j] = dp_mat[i-1][j-1]
else:
insertion = dp_mat[i][j-1]
deletion = dp_mat[i-1][j]
replace = dp_mat[i-1][j-1]
dp_mat[i][j] = 1 + min(insertion, deletion, replace)
return dp_mat[-1][-1]
| class Solution:
def min_distance(self, word1: str, word2: str) -> int:
"""
3 options for each char
if chars match don't need to do anything
subprobem(i,j) = subproblem(i-1,j-1)
else:
subproblem(i,j) = 1 + min of(1,2,3)
1.Insertion: subproblem(word1[i], word2[j-1])
2.Deletion: subproblem(word[i-1], word2[j])
3.Replacement: subproblem(word1[i-1], word2[j-1])
"""
dp_mat = [[0 for _ in range(len(word2) + 1)] for _ in range(len(word1) + 1)]
for sublen in range(len(word1) + 1):
dp_mat[sublen][0] = sublen
for sublen in range(len(word2) + 1):
dp_mat[0][sublen] = sublen
for i in range(1, len(word1) + 1):
for j in range(1, len(word2) + 1):
if word1[i - 1] == word2[j - 1]:
dp_mat[i][j] = dp_mat[i - 1][j - 1]
else:
insertion = dp_mat[i][j - 1]
deletion = dp_mat[i - 1][j]
replace = dp_mat[i - 1][j - 1]
dp_mat[i][j] = 1 + min(insertion, deletion, replace)
return dp_mat[-1][-1] |
'''
Created on 24.07.2019
@author: LK
'''
# TODO: Inheritance of tmcl interface
class Landungsbruecke(object):
GP_VitalSignsErrorMask = 1
GP_DriversEnable = 2
GP_DebugMode = 3
GP_BoardAssignment = 4
GP_HWID = 5
GP_PinState = 6
| """
Created on 24.07.2019
@author: LK
"""
class Landungsbruecke(object):
gp__vital_signs_error_mask = 1
gp__drivers_enable = 2
gp__debug_mode = 3
gp__board_assignment = 4
gp_hwid = 5
gp__pin_state = 6 |
def strategy(history, memory):
if history.shape[1] % 3 == 2: # CC
return 0, None # D
else:
return 1, None # C | def strategy(history, memory):
if history.shape[1] % 3 == 2:
return (0, None)
else:
return (1, None) |
class PixivException(Exception):
pass
class DownloadException(PixivException):
pass
class APIException(PixivException):
pass
class LoginPasswordError(APIException):
pass
class LoginTokenError(APIException):
pass
| class Pixivexception(Exception):
pass
class Downloadexception(PixivException):
pass
class Apiexception(PixivException):
pass
class Loginpassworderror(APIException):
pass
class Logintokenerror(APIException):
pass |
def createMessageForArduino(flags, device_id, datasize, data):
# prepare data, datasize depends whether we are working on
# strings or not and therefor calculate it again
byteData, datasize = getBytesForData(data)
message = b'\xff'
message += bytes([flags])
message += bytes([int(device_id)])
message += bytes([datasize])
if datasize > 0:
message += byteData
message += b'\xfe'
print("Message to be send to arduino: ", message, " with length: ", len(message))
return message
def createDeviceInitializationMessage(device_id, sensor):
if sensor:
return createMessageForArduino(flags=128, device_id=device_id, datasize=0, data=[])
else:
return createMessageForArduino(flags=(128 + 32), device_id=device_id, datasize=0, data=[])
def createActuatorNewValueMessage(device_id, data):
return createMessageForArduino(flags=32, device_id=device_id, datasize=len(data), data=data)
def getBytesForData(data):
newData = b''
datasize = 0
if isinstance(data, list):
for item in data:
newDataPoint, datasizeForPoint = getBytesForDatapoint(item)
newData += newDataPoint
datasize += datasizeForPoint
else:
newData, datasize = getBytesForDatapoint(data)
return newData, datasize
def getBytesForDatapoint(datapoint):
# handle integer and strings differently
try:
newBytes = bytes([int(datapoint)])
except:
newBytes = str.encode(datapoint)
return newBytes, len(newBytes)
| def create_message_for_arduino(flags, device_id, datasize, data):
(byte_data, datasize) = get_bytes_for_data(data)
message = b'\xff'
message += bytes([flags])
message += bytes([int(device_id)])
message += bytes([datasize])
if datasize > 0:
message += byteData
message += b'\xfe'
print('Message to be send to arduino: ', message, ' with length: ', len(message))
return message
def create_device_initialization_message(device_id, sensor):
if sensor:
return create_message_for_arduino(flags=128, device_id=device_id, datasize=0, data=[])
else:
return create_message_for_arduino(flags=128 + 32, device_id=device_id, datasize=0, data=[])
def create_actuator_new_value_message(device_id, data):
return create_message_for_arduino(flags=32, device_id=device_id, datasize=len(data), data=data)
def get_bytes_for_data(data):
new_data = b''
datasize = 0
if isinstance(data, list):
for item in data:
(new_data_point, datasize_for_point) = get_bytes_for_datapoint(item)
new_data += newDataPoint
datasize += datasizeForPoint
else:
(new_data, datasize) = get_bytes_for_datapoint(data)
return (newData, datasize)
def get_bytes_for_datapoint(datapoint):
try:
new_bytes = bytes([int(datapoint)])
except:
new_bytes = str.encode(datapoint)
return (newBytes, len(newBytes)) |
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
class SDKError(Exception):
"""Base Error for Amazon Transcribe Streaming SDK"""
class HTTPException(SDKError):
"""Base error for HTTP related exceptions"""
class ServiceException(SDKError):
"""Errors returned by the service"""
class UnknownServiceException(ServiceException):
def __init__(self, status_code, error_code, message):
self.status_code: int = status_code
self.error_code: int = error_code
self.message: str = message
class BadRequestException(ServiceException):
def __init__(self, message):
self.message: str = message
self.status_code: int = 400
class ConflictException(ServiceException):
def __init__(self, message):
self.message: str = message
self.status_code = 409
class InternalFailureException(ServiceException):
def __init__(self, message):
self.message: str = message
self.status_code: int = 500
class LimitExceededException(ServiceException):
def __init__(self, message):
self.message: str = message
self.error: int = 429
class ServiceUnavailableException(ServiceException):
def __init__(self, message):
self.message: str = message
self.status_code: int = 503
class SerializationException(ServiceException):
def __init__(self, message):
self.message: str = message
self.status_code: int = 400
class SerializerException(SDKError):
"""Encountered an issue when seralizing a request or event"""
class ValidationException(SDKError):
"""Encountered an issue validating a given value"""
class CredentialsException(SDKError):
"""Encountered an issue while resolving or using credentials"""
| class Sdkerror(Exception):
"""Base Error for Amazon Transcribe Streaming SDK"""
class Httpexception(SDKError):
"""Base error for HTTP related exceptions"""
class Serviceexception(SDKError):
"""Errors returned by the service"""
class Unknownserviceexception(ServiceException):
def __init__(self, status_code, error_code, message):
self.status_code: int = status_code
self.error_code: int = error_code
self.message: str = message
class Badrequestexception(ServiceException):
def __init__(self, message):
self.message: str = message
self.status_code: int = 400
class Conflictexception(ServiceException):
def __init__(self, message):
self.message: str = message
self.status_code = 409
class Internalfailureexception(ServiceException):
def __init__(self, message):
self.message: str = message
self.status_code: int = 500
class Limitexceededexception(ServiceException):
def __init__(self, message):
self.message: str = message
self.error: int = 429
class Serviceunavailableexception(ServiceException):
def __init__(self, message):
self.message: str = message
self.status_code: int = 503
class Serializationexception(ServiceException):
def __init__(self, message):
self.message: str = message
self.status_code: int = 400
class Serializerexception(SDKError):
"""Encountered an issue when seralizing a request or event"""
class Validationexception(SDKError):
"""Encountered an issue validating a given value"""
class Credentialsexception(SDKError):
"""Encountered an issue while resolving or using credentials""" |
# -*- coding: utf-8 -*-
class ChainingHash(object):
""" Hash table implementation which resolves collisions by using chaining.
Attributes:
num_buckets: int, how large should the internal array be to store data.
data: list, a storage for hash data.
"""
def __init__(self, num_buckets):
""" Constructs hash table given the number of buckets.
Args:
num_buckets: should be a prime and it should not be close to a
power of 2 or 10.
"""
# Size of the internal storage list.
self.num_buckets = num_buckets
# Stores the hash buckets. It's a list of lists.
self.data = [[] for i in xrange(self.num_buckets)]
def insert(self, value):
""" Add value to the hash table.
Complexity: O(1)
"""
index = self.hash_function(value)
self.data[index].append(value)
def lookup(self, value):
""" Checks whether value is in the hash table or not.
Complexity: O(1)
Returns:
bool, whether value is in the hash table or not.
"""
index = self.hash_function(value)
return value in self.data[index]
def delete(self, value):
""" Removes value from the hash.
Complexity: O(1)
Returns:
bool, whether the value was present in the data structure.
"""
index = self.hash_function(value)
if value in self.data[index]:
self.data[index].remove(value)
return True
return False
def export(self):
""" Exports a plain list with the enclosed elements.
Complexity: On()
"""
output = []
for bucket in self.data:
output.extend(bucket)
return output
def hash_function(self, value):
""" Hashing function used to compute a key for the given value.
This is done in two steps:
`hash code`: key -> really big number
`compression function`: really big number -> bucket number
Args:
value: any value hashable to be added to the hash table.
Return:
int, the position in the array where the items is located.
"""
hash_code = abs(hash(value))
bucket_index = hash_code % self.num_buckets
return bucket_index
class OpenAddressingHash(object):
""" Implements a hash table data structure using open addressing (with
double hashing) for collision resolution.
Two solutions for collision resolution using open addressing:
- liniar probing: if the first position given by the hash function is not
available, go to the next in line position and so on. Better for deletes.
- double hashing: use two hash functions, when the position indicated by
the first hash function is taken, use the second function to compute the
next position. Better space efficiency.
Args:
num_buckets: int, number of buckets for the hash function.
data: list, the input data.
max_attempts: int, how many attempts to insert/lookup the data.
"""
def __init__(self, num_buckets, max_attempts=10):
self.num_buckets = num_buckets
self.data = [None for i in xrange(self.num_buckets)]
self.max_attempts = max_attempts
def insert(self, value):
""" Attempt to insert the data into the hash table.
Uses the first hash function to compute the initial index. If that's
taken, use the second function to compute offsets to search for empty
slots.
Raises:
Exception, when the number of insert attempts exceeds a configured
threshold.
"""
first_index = self.primary_hash_function(value)
if (self.data[first_index] == None):
self.data[first_index] = value
return
attempt_count = 0
second_index = first_index
while (True):
offset = self.secondary_hash_function(value)
second_index = (second_index + offset) % self.num_buckets
if (self.data[second_index] == None):
self.data[second_index] = value
return
attempt_count += 1
if attempt_count == self.max_attempts:
raise Exception('Failed to insert the data in {count} attempts' \
.format(count=self.max_attempts))
def lookup(self, value):
""" Attempt to locate the data in the hash table.
Returns:
bool, whether or not the input value is present.
"""
first_index = self.primary_hash_function(value)
if (self.data[first_index] != None):
return True
attempt_count = 0
second_index = first_index
while (True):
offset = self.secondary_hash_function(value)
second_index = (second_index + offset) % self.num_buckets
if (self.data[second_index] != None):
return True
attempt_count += 1
if attempt_count == self.max_attempts:
return False
def delete(self, value):
""" Removes the value from the data structure.
Returns:
bool, whether or not the data structure contained the given value.
"""
first_index = self.primary_hash_function(value)
if (self.data[first_index] != None):
self.data[first_index] = None
return True
attempt_count = 0
second_index = first_index
while (True):
offset = self.secondary_hash_function(value)
second_index = (second_index + offset) % self.num_buckets
if (self.data[second_index] != None):
return True
attempt_count += 1
if attempt_count == self.max_attempts:
return False
def export(self):
""" Exports the contents of the hash table into a list. """
return [num for num in self.data if num != None]
def primary_hash_function(self, value):
""" First hash function for the initial lookup.
Args:
value: mixed, can be any hashable python value.
Returns:
int, an index in the array data structure.
"""
hash_code = abs(hash(value))
bucket_index = hash_code % self.num_buckets
return bucket_index
def secondary_hash_function(self, value):
""" Second hash function used to offset the indexes produced by the
first hash function.
Args:
value: mixed, can be any hashable python value.
Returns:
int, an index in the array data structure.
"""
hash_code = abs(hash(value)+1319497)
bucket_index = hash_code % self.num_buckets
return bucket_index
class SequentialProbingHash(object):
""" Implement a hash table using sequential probing to resolve collitions.
The hash function is used to compute the array index. If that bucket is
occupied then the array is traversed until an open position is found.
TODO: how to implement the this?!
Args:
num_buckets: int, the size of the array containing the data.
data: list, the actual array containing the data.
"""
def two_sum_problem_sort(data, total, distinct=False):
""" Returns the pairs of number in input list which sum to the given total.
Complexity O(nlogn)
Args:
data: list, all the numbers available to compute the sums.
total: int, the sum to look for.
distinct: boolean, whether to accept distinct values when computing sums.
Returns:
list, of pairs of numbers from data which sum up to total.
"""
out = []
data.sort()
for i in data:
if i > total:
continue
other = total - i
if (other in data) and ((distinct == True and i != other) or (distinct == False)):
out.append((i, other))
return out
def two_sum_problem_hash(data, total, distinct=False):
""" Returns the pairs of number in input list which sum to the given total.
Complexity O(n)
Args:
data: list, all the numbers available to compute the sums.
total: int, the sum to look for.
distinct: boolean, whether to accept distinct values when computing sums.
Returns:
list, of pairs of numbers from data which sum up to total.
"""
h = {} # Using python's native hash table which is much more performant.
for i in data:
h[i] = True
out = []
for i in data:
other = total - i
if (other in h) and ((distinct == True and i != other) or (distinct == False)):
out.append((i, other))
return out
| class Chaininghash(object):
""" Hash table implementation which resolves collisions by using chaining.
Attributes:
num_buckets: int, how large should the internal array be to store data.
data: list, a storage for hash data.
"""
def __init__(self, num_buckets):
""" Constructs hash table given the number of buckets.
Args:
num_buckets: should be a prime and it should not be close to a
power of 2 or 10.
"""
self.num_buckets = num_buckets
self.data = [[] for i in xrange(self.num_buckets)]
def insert(self, value):
""" Add value to the hash table.
Complexity: O(1)
"""
index = self.hash_function(value)
self.data[index].append(value)
def lookup(self, value):
""" Checks whether value is in the hash table or not.
Complexity: O(1)
Returns:
bool, whether value is in the hash table or not.
"""
index = self.hash_function(value)
return value in self.data[index]
def delete(self, value):
""" Removes value from the hash.
Complexity: O(1)
Returns:
bool, whether the value was present in the data structure.
"""
index = self.hash_function(value)
if value in self.data[index]:
self.data[index].remove(value)
return True
return False
def export(self):
""" Exports a plain list with the enclosed elements.
Complexity: On()
"""
output = []
for bucket in self.data:
output.extend(bucket)
return output
def hash_function(self, value):
""" Hashing function used to compute a key for the given value.
This is done in two steps:
`hash code`: key -> really big number
`compression function`: really big number -> bucket number
Args:
value: any value hashable to be added to the hash table.
Return:
int, the position in the array where the items is located.
"""
hash_code = abs(hash(value))
bucket_index = hash_code % self.num_buckets
return bucket_index
class Openaddressinghash(object):
""" Implements a hash table data structure using open addressing (with
double hashing) for collision resolution.
Two solutions for collision resolution using open addressing:
- liniar probing: if the first position given by the hash function is not
available, go to the next in line position and so on. Better for deletes.
- double hashing: use two hash functions, when the position indicated by
the first hash function is taken, use the second function to compute the
next position. Better space efficiency.
Args:
num_buckets: int, number of buckets for the hash function.
data: list, the input data.
max_attempts: int, how many attempts to insert/lookup the data.
"""
def __init__(self, num_buckets, max_attempts=10):
self.num_buckets = num_buckets
self.data = [None for i in xrange(self.num_buckets)]
self.max_attempts = max_attempts
def insert(self, value):
""" Attempt to insert the data into the hash table.
Uses the first hash function to compute the initial index. If that's
taken, use the second function to compute offsets to search for empty
slots.
Raises:
Exception, when the number of insert attempts exceeds a configured
threshold.
"""
first_index = self.primary_hash_function(value)
if self.data[first_index] == None:
self.data[first_index] = value
return
attempt_count = 0
second_index = first_index
while True:
offset = self.secondary_hash_function(value)
second_index = (second_index + offset) % self.num_buckets
if self.data[second_index] == None:
self.data[second_index] = value
return
attempt_count += 1
if attempt_count == self.max_attempts:
raise exception('Failed to insert the data in {count} attempts'.format(count=self.max_attempts))
def lookup(self, value):
""" Attempt to locate the data in the hash table.
Returns:
bool, whether or not the input value is present.
"""
first_index = self.primary_hash_function(value)
if self.data[first_index] != None:
return True
attempt_count = 0
second_index = first_index
while True:
offset = self.secondary_hash_function(value)
second_index = (second_index + offset) % self.num_buckets
if self.data[second_index] != None:
return True
attempt_count += 1
if attempt_count == self.max_attempts:
return False
def delete(self, value):
""" Removes the value from the data structure.
Returns:
bool, whether or not the data structure contained the given value.
"""
first_index = self.primary_hash_function(value)
if self.data[first_index] != None:
self.data[first_index] = None
return True
attempt_count = 0
second_index = first_index
while True:
offset = self.secondary_hash_function(value)
second_index = (second_index + offset) % self.num_buckets
if self.data[second_index] != None:
return True
attempt_count += 1
if attempt_count == self.max_attempts:
return False
def export(self):
""" Exports the contents of the hash table into a list. """
return [num for num in self.data if num != None]
def primary_hash_function(self, value):
""" First hash function for the initial lookup.
Args:
value: mixed, can be any hashable python value.
Returns:
int, an index in the array data structure.
"""
hash_code = abs(hash(value))
bucket_index = hash_code % self.num_buckets
return bucket_index
def secondary_hash_function(self, value):
""" Second hash function used to offset the indexes produced by the
first hash function.
Args:
value: mixed, can be any hashable python value.
Returns:
int, an index in the array data structure.
"""
hash_code = abs(hash(value) + 1319497)
bucket_index = hash_code % self.num_buckets
return bucket_index
class Sequentialprobinghash(object):
""" Implement a hash table using sequential probing to resolve collitions.
The hash function is used to compute the array index. If that bucket is
occupied then the array is traversed until an open position is found.
TODO: how to implement the this?!
Args:
num_buckets: int, the size of the array containing the data.
data: list, the actual array containing the data.
"""
def two_sum_problem_sort(data, total, distinct=False):
""" Returns the pairs of number in input list which sum to the given total.
Complexity O(nlogn)
Args:
data: list, all the numbers available to compute the sums.
total: int, the sum to look for.
distinct: boolean, whether to accept distinct values when computing sums.
Returns:
list, of pairs of numbers from data which sum up to total.
"""
out = []
data.sort()
for i in data:
if i > total:
continue
other = total - i
if other in data and (distinct == True and i != other or distinct == False):
out.append((i, other))
return out
def two_sum_problem_hash(data, total, distinct=False):
""" Returns the pairs of number in input list which sum to the given total.
Complexity O(n)
Args:
data: list, all the numbers available to compute the sums.
total: int, the sum to look for.
distinct: boolean, whether to accept distinct values when computing sums.
Returns:
list, of pairs of numbers from data which sum up to total.
"""
h = {}
for i in data:
h[i] = True
out = []
for i in data:
other = total - i
if other in h and (distinct == True and i != other or distinct == False):
out.append((i, other))
return out |
def max_sublist_sum(arr):
max_ending_here = 0
max_so_far = 0
for x in arr:
max_ending_here = max_ending_here + x
max_so_far = max(max_so_far, max_ending_here)
return max_so_far
"""
Max Sublist Sum
max-sublist-sum
Efficient equivalent to max(sum(arr[i:j]) for 0 <= i <= j <= len(arr))
Algorithm source: WordAligned.org by Thomas Guest
Input:
arr: A list of ints
Output:
The maximum sublist sum
Example:
>>> max_sublist_sum([4, -5, 2, 1, -1, 3])
5
"""
| def max_sublist_sum(arr):
max_ending_here = 0
max_so_far = 0
for x in arr:
max_ending_here = max_ending_here + x
max_so_far = max(max_so_far, max_ending_here)
return max_so_far
'\nMax Sublist Sum\nmax-sublist-sum\n\nEfficient equivalent to max(sum(arr[i:j]) for 0 <= i <= j <= len(arr))\n\nAlgorithm source: WordAligned.org by Thomas Guest\n\n\nInput:\n arr: A list of ints\n\nOutput:\n The maximum sublist sum\n\nExample:\n >>> max_sublist_sum([4, -5, 2, 1, -1, 3])\n 5\n' |
# List is python's version of array, zero-based indexing
months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December']
print(months[0])
print(months[2])
print(months[-1])
list_of_random_things = [1, 3.4, 'a string', True]
# watch for indexing errors
# list_of_random_things[len(list_of_random_things)]
list_of_random_things[len(list_of_random_things)-1]
print(list_of_random_things[len(list_of_random_things)-1])
# Slicing
# upperbound is inclusive, lowerbound is exclusive
list_of_random_things = [1, 3.4, 'a string', True]
list_of_random_things[1:2]
# [3.4 notice returns a list rather vs indexing single element
q3 = months[6:9]
print(q3)
first_half = months[:6]
print(first_half)
second_half = months[6:]
print(second_half)
greeting = "Hello there"
print(len(greeting), len(months))
print(greeting[6:9], months[6:9])
# Membership Operators
# in and not in
print('her' in greeting, 'her' not in greeting)
print('Sunday' in months, 'Sunday' not in months)
# Lists are mutable (strings aren't)
months[3] = 'Friday'
print(months)
# QUIZ List indexing
month = 8
days_in_month = [31,28,31,30,31,30,31,31,30,31,30,31]
# use list indexing to determine the number of days in month
num_days = days_in_month[month-1]
print(num_days)
# QUIZ Slicing Lists
eclipse_dates = ['June 21, 2001', 'December 4, 2002', 'November 23, 2003',
'March 29, 2006', 'August 1, 2008', 'July 22, 2009',
'July 11, 2010', 'November 13, 2012', 'March 20, 2015',
'March 9, 2016']
# TODO: Modify this line so it prints the last three elements of the list
print(eclipse_dates[-3:])
sentence1 = "I wish to register a complaint."
print(sentence1[30])
# New Section: List Methods
name = 'Jim'
student = name
name = 'Tim'
print(name)
print(student) #Jim
scores = ["B", "C", "A", "D", "B", "A"]
grades = scores
print("scores: " + str(scores))
print("grades: " + str(grades))
scores[3] = "B"
print("scores: " + str(scores))
print("grades: " + str(grades))
# useful functions: len() max() -> highest number or last alphabetically
# min() sorted()
sizes = [15, 6, 89, 34, 65, 35]
print(sorted(sizes))
print(sorted(sizes, reverse=True))
# join method list to strings
nautical_directions = "\n".join(["fore", "aft", "starboard", "port"])
print(nautical_directions)
names = ["Garcia", "O'Kelly", "Davis"]
print("-".join(names))
# don't forget the comma
names = ["Garcia" "O'Kelly" "Davis"]
print("-".join(names))
# append Method
python_varieties = ['Burmese Python', 'African Rock Python', 'Ball Python', 'Reticulated Python', 'Angolan Python']
python_varieties.append('Blood Python')
print(python_varieties)
# QUIZ
a = [1, 5, 8]
b = [2, 6, 9, 10]
c = [100, 200]
print(max([len(a), len(b), len(c)]))
print(min([len(a), len(b), len(c)]))
# 4,2
names = ["Carol", "Albert", "Ben", "Donna"]
print(" & ".join(sorted(names)))
# Albert & Ben & Carol & Donna
names = ["Carol", "Albert", "Ben", "Donna"]
names.append("Eugenia")
print(sorted(names))
# ["Albert", "Ben", "Carol", "Donna", "Eugenia"]
| months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December']
print(months[0])
print(months[2])
print(months[-1])
list_of_random_things = [1, 3.4, 'a string', True]
list_of_random_things[len(list_of_random_things) - 1]
print(list_of_random_things[len(list_of_random_things) - 1])
list_of_random_things = [1, 3.4, 'a string', True]
list_of_random_things[1:2]
q3 = months[6:9]
print(q3)
first_half = months[:6]
print(first_half)
second_half = months[6:]
print(second_half)
greeting = 'Hello there'
print(len(greeting), len(months))
print(greeting[6:9], months[6:9])
print('her' in greeting, 'her' not in greeting)
print('Sunday' in months, 'Sunday' not in months)
months[3] = 'Friday'
print(months)
month = 8
days_in_month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
num_days = days_in_month[month - 1]
print(num_days)
eclipse_dates = ['June 21, 2001', 'December 4, 2002', 'November 23, 2003', 'March 29, 2006', 'August 1, 2008', 'July 22, 2009', 'July 11, 2010', 'November 13, 2012', 'March 20, 2015', 'March 9, 2016']
print(eclipse_dates[-3:])
sentence1 = 'I wish to register a complaint.'
print(sentence1[30])
name = 'Jim'
student = name
name = 'Tim'
print(name)
print(student)
scores = ['B', 'C', 'A', 'D', 'B', 'A']
grades = scores
print('scores: ' + str(scores))
print('grades: ' + str(grades))
scores[3] = 'B'
print('scores: ' + str(scores))
print('grades: ' + str(grades))
sizes = [15, 6, 89, 34, 65, 35]
print(sorted(sizes))
print(sorted(sizes, reverse=True))
nautical_directions = '\n'.join(['fore', 'aft', 'starboard', 'port'])
print(nautical_directions)
names = ['Garcia', "O'Kelly", 'Davis']
print('-'.join(names))
names = ["GarciaO'KellyDavis"]
print('-'.join(names))
python_varieties = ['Burmese Python', 'African Rock Python', 'Ball Python', 'Reticulated Python', 'Angolan Python']
python_varieties.append('Blood Python')
print(python_varieties)
a = [1, 5, 8]
b = [2, 6, 9, 10]
c = [100, 200]
print(max([len(a), len(b), len(c)]))
print(min([len(a), len(b), len(c)]))
names = ['Carol', 'Albert', 'Ben', 'Donna']
print(' & '.join(sorted(names)))
names = ['Carol', 'Albert', 'Ben', 'Donna']
names.append('Eugenia')
print(sorted(names)) |
# --- Day 11: Seating System ---
# Your plane lands with plenty of time to spare. The final leg of your journey is a ferry that goes directly to the tropical island where you can finally start your vacation. As you reach the waiting area to board the ferry, you realize you're so early, nobody else has even arrived yet!
# By modeling the process people use to choose (or abandon) their seat in the waiting area, you're pretty sure you can predict the best place to sit. You make a quick map of the seat layout (your puzzle input).
# The seat layout fits neatly on a grid. Each position is either floor (.), an empty seat (L), or an occupied seat (#). For example, the initial seat layout might look like this:
# L.LL.LL.LL
# LLLLLLL.LL
# L.L.L..L..
# LLLL.LL.LL
# L.LL.LL.LL
# L.LLLLL.LL
# ..L.L.....
# LLLLLLLLLL
# L.LLLLLL.L
# L.LLLLL.LL
# Now, you just need to model the people who will be arriving shortly. Fortunately, people are entirely predictable and always follow a simple set of rules. All decisions are based on the number of occupied seats adjacent to a given seat (one of the eight positions immediately up, down, left, right, or diagonal from the seat). The following rules are applied to every seat simultaneously:
# If a seat is empty (L) and there are no occupied seats adjacent to it, the seat becomes occupied.
# If a seat is occupied (#) and four or more seats adjacent to it are also occupied, the seat becomes empty.
# Otherwise, the seat's state does not change.
# Floor (.) never changes; seats don't move, and nobody sits on the floor.
# After one round of these rules, every seat in the example layout becomes occupied:
# #.##.##.##
# #######.##
# #.#.#..#..
# ####.##.##
# #.##.##.##
# #.#####.##
# ..#.#.....
# ##########
# #.######.#
# #.#####.##
# After a second round, the seats with four or more occupied adjacent seats become empty again:
# #.LL.L#.##
# #LLLLLL.L#
# L.L.L..L..
# #LLL.LL.L#
# #.LL.LL.LL
# #.LLLL#.##
# ..L.L.....
# #LLLLLLLL#
# #.LLLLLL.L
# #.#LLLL.##
# This process continues for three more rounds:
# #.##.L#.##
# #L###LL.L#
# L.#.#..#..
# #L##.##.L#
# #.##.LL.LL
# #.###L#.##
# ..#.#.....
# #L######L#
# #.LL###L.L
# #.#L###.##
# #.#L.L#.##
# #LLL#LL.L#
# L.L.L..#..
# #LLL.##.L#
# #.LL.LL.LL
# #.LL#L#.##
# ..L.L.....
# #L#LLLL#L#
# #.LLLLLL.L
# #.#L#L#.##
# #.#L.L#.##
# #LLL#LL.L#
# L.#.L..#..
# #L##.##.L#
# #.#L.LL.LL
# #.#L#L#.##
# ..L.L.....
# #L#L##L#L#
# #.LLLLLL.L
# #.#L#L#.##
# At this point, something interesting happens: the chaos stabilizes and further applications of these rules cause no seats to change state! Once people stop moving around, you count 37 occupied seats.
# Simulate your seating area by applying the seating rules repeatedly until no seats change state. How many seats end up occupied?
def fileInput():
f = open(inputFile, 'r')
with open(inputFile) as f:
read_data = f.read().split('\n')
f.close()
return read_data
def splitSeats(data):
splitData = []
for row in data:
rowData = [seat for seat in row]
splitData.append(rowData)
return splitData
#size - [rows-1,cols-1]
#position - [x,y]
def listAdjSeats(position):
# [
# [-1,-1][0,-1][1,-1]
# [-1, 0][X, X][1, 0]
# [-1, 1][0, 1][1, 1]
# ]
x = position[0]
y = position[1]
allAdjSeats = [[x-1,y-1],[x,y-1],[x+1,y-1],[x-1,y],[x+1,y],[x-1,y+1],[x,y+1],[x+1,y+1]]
adjSeats = []
for location in allAdjSeats:
if (0 <= location[0] <= size[0]) and (0 <= location[1] <= size[1]):
adjSeats.append(location)
return adjSeats
# L = Empty Seat
# # = Taken Seat
# . = Floor
# Rule 1: If seat is empty and all adjacent (up,down,left,right,all diagonals) are empty, seat is taken
# Rule 2: If seat is taken, look if 4 or more adjacent seats are taken.
# - If more than 4 are taken, empty seat
# - If less than 4 are taken, seat doesn't change
# Compare the input with the output. If the same, END
def processSeats(data):
newData = []
rowCount = 0
for row in data:
rowData = []
colCount = 0
for seat in row:
if seat != '.': #if not a floor
adjCount = 0
adjSeats = listAdjSeats([rowCount,colCount])
for adjSeat in adjSeats:
if data[adjSeat[0]][adjSeat[1]] == '#':
adjCount += 1
if seat == 'L' and adjCount == 0: #if rule 1 is good
rowData.append('#')
elif seat == '#' and adjCount < 4: #if rule 2 is good
rowData.append('#')
else:
rowData.append('L')
# print(adjCount,'[',rowCount,colCount,']', adjSeats)
else: #if floor
rowData.append('.')
colCount += 1
newData.append(rowData)
rowCount += 1
return newData
def countTakenSeats(data):
seatCount = 0
for row in data:
for seat in row:
if seat == '#':
seatCount += 1
return seatCount
def checkSeats(data):
newData = processSeats(data)
# print('------')
# print(newData)
if data == newData:
return countTakenSeats(newData)
else:
return checkSeats(newData)
#///////////////////////////////////////////////////
inputFile = 'day11-input.txt'
if __name__ == "__main__":
data = fileInput()
data = splitSeats(data)
size = [len(data)-1,len(data[0])-1]
seatsTaken = checkSeats(data)
print(seatsTaken)
| def file_input():
f = open(inputFile, 'r')
with open(inputFile) as f:
read_data = f.read().split('\n')
f.close()
return read_data
def split_seats(data):
split_data = []
for row in data:
row_data = [seat for seat in row]
splitData.append(rowData)
return splitData
def list_adj_seats(position):
x = position[0]
y = position[1]
all_adj_seats = [[x - 1, y - 1], [x, y - 1], [x + 1, y - 1], [x - 1, y], [x + 1, y], [x - 1, y + 1], [x, y + 1], [x + 1, y + 1]]
adj_seats = []
for location in allAdjSeats:
if 0 <= location[0] <= size[0] and 0 <= location[1] <= size[1]:
adjSeats.append(location)
return adjSeats
def process_seats(data):
new_data = []
row_count = 0
for row in data:
row_data = []
col_count = 0
for seat in row:
if seat != '.':
adj_count = 0
adj_seats = list_adj_seats([rowCount, colCount])
for adj_seat in adjSeats:
if data[adjSeat[0]][adjSeat[1]] == '#':
adj_count += 1
if seat == 'L' and adjCount == 0:
rowData.append('#')
elif seat == '#' and adjCount < 4:
rowData.append('#')
else:
rowData.append('L')
else:
rowData.append('.')
col_count += 1
newData.append(rowData)
row_count += 1
return newData
def count_taken_seats(data):
seat_count = 0
for row in data:
for seat in row:
if seat == '#':
seat_count += 1
return seatCount
def check_seats(data):
new_data = process_seats(data)
if data == newData:
return count_taken_seats(newData)
else:
return check_seats(newData)
input_file = 'day11-input.txt'
if __name__ == '__main__':
data = file_input()
data = split_seats(data)
size = [len(data) - 1, len(data[0]) - 1]
seats_taken = check_seats(data)
print(seatsTaken) |
# WiFi credentials
wifi_ssid = "YourSSID"
wifi_password = "YourPassword"
# Ubidots credentials
ubidots_token = "YourToken"
| wifi_ssid = 'YourSSID'
wifi_password = 'YourPassword'
ubidots_token = 'YourToken' |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 28 20:38:28 2020
@author: Chetan Patil
"""
# server properties
KAFKA_SERVERS='server1:9092,server2:9092,server3:9092'
KAFKA_SERVERS_DEV='localhost:9092'
# transaction message properties
TRANSACTION_MESSAGE_MERCHANT_ID='merchant_id'
TRANSACTION_MESSAGE_USER_ID='user_id'
TRANSACTION_MESSAGE_TRANSACTION_TIMESTAMP='transaction_ts'
TRANSACTION_MESSAGE_TRANSACTION_MILLIS='transaction_millis'
TRANSACTION_MESSAGE_TRANSACTION_AMOUNT='amount'
TRANSACTION_MESSAGE_TRANSACTION_LATITUDE='transaction_latitude'
TRANSACTION_MESSAGE_TRANSACTION_LONGITUDE='transaction_longitude'
# user location message properties
USER_LOCATION_MESSAGE_USER_ID='user_id'
USER_LOCATION_MESSAGE_TIMESTAMP='user_location_ts'
USER_LOCATION_MESSAGE_MILLIS='user_location_millis'
USER_LOCATION_MESSAGE_LATITUDE='user_location_latitude'
USER_LOCATION_MESSAGE_LONGITUDE='user_location_longitude'
# consumer json message properties
DISTANCE='DISTANCE'
ULOC_MESSAGE_USER_ID='ULOC_USER_ID'
ULOC_MESSAGE_LOCATION_TIME='USER_LOCATION_TIME'
ULOC_MESSAGE_LOCATION_MILLIS='USER_LOCATION_MILLIS'
ULOC_MESSAGE_ULOC_LATITUDE='ULOC_LATITUDE'
ULOC_MESSAGE_ULOC_LONGITUDE='ULOC_LONGITUDE'
TLOC_MESSAGE_USER_ID='USER_ID'
TLOC_MESSAGE_MERCHANT_ID='MERCHANT_ID'
TLOC_MESSAGE_TRANSACTION_TIME='TRANSACTION_TIME'
TLOC_MESSAGE_TRANSACTION_MILLIS='TRANSACTION_MILLIS'
TLOC_MESSAGE_AMOUNT='AMOUNT'
TLOC_MESSAGE_TLOC_LATITUDE='TLOC_LATITUDE'
TLOC_MESSAGE_TLOC_LONGITUDE='TLOC_LONGITUDE'
# Topic Names
TRANSACTION_TOPIC_NAME='credit_card_transaction'
USER_TOPIC_NAME='user_location'
MY_CONSUMER_GROUP='myconsumergroup'
COMBINED_TRANSACTION_USER_LOCATION_TOPIC_NAME='COMBINED_TRANSACTION_USER_LOCATION'
CONSUMER_OFFSET_RESET_EARLIEST='earliest'
| """
Created on Sun Jun 28 20:38:28 2020
@author: Chetan Patil
"""
kafka_servers = 'server1:9092,server2:9092,server3:9092'
kafka_servers_dev = 'localhost:9092'
transaction_message_merchant_id = 'merchant_id'
transaction_message_user_id = 'user_id'
transaction_message_transaction_timestamp = 'transaction_ts'
transaction_message_transaction_millis = 'transaction_millis'
transaction_message_transaction_amount = 'amount'
transaction_message_transaction_latitude = 'transaction_latitude'
transaction_message_transaction_longitude = 'transaction_longitude'
user_location_message_user_id = 'user_id'
user_location_message_timestamp = 'user_location_ts'
user_location_message_millis = 'user_location_millis'
user_location_message_latitude = 'user_location_latitude'
user_location_message_longitude = 'user_location_longitude'
distance = 'DISTANCE'
uloc_message_user_id = 'ULOC_USER_ID'
uloc_message_location_time = 'USER_LOCATION_TIME'
uloc_message_location_millis = 'USER_LOCATION_MILLIS'
uloc_message_uloc_latitude = 'ULOC_LATITUDE'
uloc_message_uloc_longitude = 'ULOC_LONGITUDE'
tloc_message_user_id = 'USER_ID'
tloc_message_merchant_id = 'MERCHANT_ID'
tloc_message_transaction_time = 'TRANSACTION_TIME'
tloc_message_transaction_millis = 'TRANSACTION_MILLIS'
tloc_message_amount = 'AMOUNT'
tloc_message_tloc_latitude = 'TLOC_LATITUDE'
tloc_message_tloc_longitude = 'TLOC_LONGITUDE'
transaction_topic_name = 'credit_card_transaction'
user_topic_name = 'user_location'
my_consumer_group = 'myconsumergroup'
combined_transaction_user_location_topic_name = 'COMBINED_TRANSACTION_USER_LOCATION'
consumer_offset_reset_earliest = 'earliest' |
number_one = int(input())
number_two = int(input())
number_three = int(input())
for one in range(2, number_one + 1, 2):
for two in range(2, number_two + 1):
for three in range(2, number_three + 1, 2):
if two == 2 or two == 3 or two == 5 or two == 7:
print(f"{one} {two} {three}") | number_one = int(input())
number_two = int(input())
number_three = int(input())
for one in range(2, number_one + 1, 2):
for two in range(2, number_two + 1):
for three in range(2, number_three + 1, 2):
if two == 2 or two == 3 or two == 5 or (two == 7):
print(f'{one} {two} {three}') |
"""
This program shows how changing an outside variable from within a function
makes another variable!
In this program, print_something has its own variable called z. It can no longer
access the outer variable called z.
"""
z = 6.5
def print_something():
z = "hi"
print(z * 3)
print_something()
print(z * 3) | """
This program shows how changing an outside variable from within a function
makes another variable!
In this program, print_something has its own variable called z. It can no longer
access the outer variable called z.
"""
z = 6.5
def print_something():
z = 'hi'
print(z * 3)
print_something()
print(z * 3) |
#! /usr/bin/python
HOME_PATH = './'
CACHE_PATH = '/var/cache/obmc/'
FLASH_DOWNLOAD_PATH = "/tmp"
GPIO_BASE = 320
SYSTEM_NAME = "Garrison"
## System states
## state can change to next state in 2 ways:
## - a process emits a GotoSystemState signal with state name to goto
## - objects specified in EXIT_STATE_DEPEND have started
SYSTEM_STATES = [
'BASE_APPS',
'BMC_STARTING',
'BMC_READY',
'HOST_POWERING_ON',
'HOST_POWERED_ON',
'HOST_BOOTING',
'HOST_BOOTED',
'HOST_POWERED_OFF',
]
EXIT_STATE_DEPEND = {
'BASE_APPS' : {
'/org/openbmc/sensors': 0,
},
'BMC_STARTING' : {
'/org/openbmc/control/chassis0': 0,
'/org/openbmc/control/power0' : 0,
'/org/openbmc/control/host0' : 0,
'/org/openbmc/control/flash/bios' : 0,
},
}
## method will be called when state is entered
ENTER_STATE_CALLBACK = {
'HOST_POWERED_ON' : {
'boot' : {
'bus_name' : 'org.openbmc.control.Host',
'obj_name' : '/org/openbmc/control/host0',
'interface_name' : 'org.openbmc.control.Host',
},
},
'HOST_POWERED_OFF' : {
'setOff' : {
'bus_name' : 'org.openbmc.control.led',
'obj_name' : '/org/openbmc/control/led/identify',
'interface_name' : 'org.openbmc.Led',
}
},
'BMC_READY' : {
'setOn' : {
'bus_name' : 'org.openbmc.control.led',
'obj_name' : '/org/openbmc/control/led/beep',
'interface_name' : 'org.openbmc.Led',
},
'init' : {
'bus_name' : 'org.openbmc.control.Flash',
'obj_name' : '/org/openbmc/control/flash/bios',
'interface_name' : 'org.openbmc.Flash',
}
}
}
APPS = {
'startup_hacks' : {
'system_state' : 'BASE_APPS',
'start_process' : True,
'monitor_process' : False,
'process_name' : 'startup_hacks.sh',
},
'inventory' : {
'system_state' : 'BMC_STARTING',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'inventory_items.py',
'args' : [ SYSTEM_NAME ]
},
'hwmon' : {
'system_state' : 'BMC_STARTING',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'hwmon.py',
'args' : [ SYSTEM_NAME ]
},
'sensor_manager' : {
'system_state' : 'BASE_APPS',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'sensor_manager2.py',
'args' : [ SYSTEM_NAME ]
},
'host_watchdog' : {
'system_state' : 'BMC_STARTING',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'host_watchdog.exe',
},
'power_control' : {
'system_state' : 'BMC_STARTING',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'power_control.exe',
'args' : [ '3000', '10' ]
},
'power_button' : {
'system_state' : 'BMC_STARTING',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'button_power.exe',
},
'reset_button' : {
'system_state' : 'BMC_STARTING',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'button_reset.exe',
},
'led_control' : {
'system_state' : 'BMC_STARTING',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'led_controller.exe',
},
'flash_control' : {
'system_state' : 'BMC_STARTING',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'flash_bios.exe',
},
'bmc_flash_control' : {
'system_state' : 'BMC_STARTING',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'bmc_update.py',
},
'download_manager' : {
'system_state' : 'BMC_STARTING',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'download_manager.py',
'args' : [ SYSTEM_NAME ]
},
'host_control' : {
'system_state' : 'BMC_STARTING',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'control_host.exe',
},
'chassis_control' : {
'system_state' : 'BMC_STARTING',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'chassis_control.py',
},
'restore' : {
'system_state' : 'BMC_READY',
'start_process' : True,
'monitor_process' : False,
'process_name' : 'discover_system_state.py',
},
'bmc_control' : {
'system_state' : 'BMC_STARTING',
'start_process' : True,
'monitor_process' : True,
'process_name' : 'control_bmc.exe',
},
}
CACHED_INTERFACES = {
"org.openbmc.InventoryItem" : True,
"org.openbmc.control.Chassis" : True,
}
INVENTORY_ROOT = '/org/openbmc/inventory'
FRU_INSTANCES = {
'<inventory_root>/system' : { 'fru_type' : 'SYSTEM','is_fru' : True, 'present' : "True" },
'<inventory_root>/system/bios' : { 'fru_type' : 'SYSTEM','is_fru' : True, 'present' : "True" },
'<inventory_root>/system/misc' : { 'fru_type' : 'SYSTEM','is_fru' : False, },
'<inventory_root>/system/chassis' : { 'fru_type' : 'SYSTEM','is_fru' : True, 'present' : "True" },
'<inventory_root>/system/chassis/motherboard' : { 'fru_type' : 'MAIN_PLANAR','is_fru' : True, },
'<inventory_root>/system/systemevent' : { 'fru_type' : 'SYSTEM_EVENT', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/refclock' : { 'fru_type' : 'MAIN_PLANAR', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/pcieclock': { 'fru_type' : 'MAIN_PLANAR', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/todclock' : { 'fru_type' : 'MAIN_PLANAR', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/apss' : { 'fru_type' : 'MAIN_PLANAR', 'is_fru' : False, },
'<inventory_root>/system/chassis/fan0' : { 'fru_type' : 'FAN','is_fru' : True, },
'<inventory_root>/system/chassis/fan1' : { 'fru_type' : 'FAN','is_fru' : True, },
'<inventory_root>/system/chassis/fan2' : { 'fru_type' : 'FAN','is_fru' : True, },
'<inventory_root>/system/chassis/fan3' : { 'fru_type' : 'FAN','is_fru' : True, },
'<inventory_root>/system/chassis/motherboard/bmc' : { 'fru_type' : 'BMC','is_fru' : False, 'manufacturer' : 'ASPEED' },
'<inventory_root>/system/chassis/motherboard/cpu0' : { 'fru_type' : 'CPU', 'is_fru' : True, },
'<inventory_root>/system/chassis/motherboard/cpu1' : { 'fru_type' : 'CPU', 'is_fru' : True, },
'<inventory_root>/system/chassis/motherboard/cpu0/core0' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core1' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core2' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core3' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core4' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core5' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core6' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core7' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core8' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core9' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core10': { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core11': { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core0' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core1' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core2' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core3' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core4' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core5' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core6' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core7' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core8' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core9' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core10' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core11' : { 'fru_type' : 'CORE', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/membuf0' : { 'fru_type' : 'MEMORY_BUFFER', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/membuf1' : { 'fru_type' : 'MEMORY_BUFFER', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/membuf2' : { 'fru_type' : 'MEMORY_BUFFER', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/membuf3' : { 'fru_type' : 'MEMORY_BUFFER', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/membuf4' : { 'fru_type' : 'MEMORY_BUFFER', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/membuf5' : { 'fru_type' : 'MEMORY_BUFFER', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/membuf6' : { 'fru_type' : 'MEMORY_BUFFER', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/membuf7' : { 'fru_type' : 'MEMORY_BUFFER', 'is_fru' : False, },
'<inventory_root>/system/chassis/motherboard/dimm0' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm1' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm2' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm3' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm4' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm5' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm6' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm7' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm8' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm9' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm10' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm11' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm12' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm13' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm14' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm15' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm16' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm17' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm18' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm19' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm20' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm21' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm22' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm23' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm24' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm25' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm26' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm27' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm28' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm29' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm30' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
'<inventory_root>/system/chassis/motherboard/dimm31' : { 'fru_type' : 'DIMM', 'is_fru' : True,},
}
ID_LOOKUP = {
'FRU' : {
0x01 : '<inventory_root>/system/chassis/motherboard/cpu0',
0x02 : '<inventory_root>/system/chassis/motherboard/cpu1',
0x03 : '<inventory_root>/system/chassis/motherboard',
0x04 : '<inventory_root>/system/chassis/motherboard/membuf0',
0x05 : '<inventory_root>/system/chassis/motherboard/membuf1',
0x06 : '<inventory_root>/system/chassis/motherboard/membuf2',
0x07 : '<inventory_root>/system/chassis/motherboard/membuf3',
0x08 : '<inventory_root>/system/chassis/motherboard/membuf4',
0x09 : '<inventory_root>/system/chassis/motherboard/membuf5',
0x0c : '<inventory_root>/system/chassis/motherboard/dimm0',
0x0d : '<inventory_root>/system/chassis/motherboard/dimm1',
0x0e : '<inventory_root>/system/chassis/motherboard/dimm2',
0x0f : '<inventory_root>/system/chassis/motherboard/dimm3',
0x10 : '<inventory_root>/system/chassis/motherboard/dimm4',
0x11 : '<inventory_root>/system/chassis/motherboard/dimm5',
0x12 : '<inventory_root>/system/chassis/motherboard/dimm6',
0x13 : '<inventory_root>/system/chassis/motherboard/dimm7',
0x14 : '<inventory_root>/system/chassis/motherboard/dimm8',
0x15 : '<inventory_root>/system/chassis/motherboard/dimm9',
0x16 : '<inventory_root>/system/chassis/motherboard/dimm10',
0x17 : '<inventory_root>/system/chassis/motherboard/dimm11',
0x18 : '<inventory_root>/system/chassis/motherboard/dimm12',
0x19 : '<inventory_root>/system/chassis/motherboard/dimm13',
0x1a : '<inventory_root>/system/chassis/motherboard/dimm14',
0x1b : '<inventory_root>/system/chassis/motherboard/dimm15',
0x1c : '<inventory_root>/system/chassis/motherboard/dimm16',
0x1d : '<inventory_root>/system/chassis/motherboard/dimm17',
0x1e : '<inventory_root>/system/chassis/motherboard/dimm18',
0x1f : '<inventory_root>/system/chassis/motherboard/dimm19',
0x20 : '<inventory_root>/system/chassis/motherboard/dimm20',
0x21 : '<inventory_root>/system/chassis/motherboard/dimm21',
0x22 : '<inventory_root>/system/chassis/motherboard/dimm22',
0x23 : '<inventory_root>/system/chassis/motherboard/dimm23',
0x24 : '<inventory_root>/system/chassis/motherboard/dimm24',
0x25 : '<inventory_root>/system/chassis/motherboard/dimm25',
0x26 : '<inventory_root>/system/chassis/motherboard/dimm26',
0x27 : '<inventory_root>/system/chassis/motherboard/dimm27',
0x28 : '<inventory_root>/system/chassis/motherboard/dimm28',
0x29 : '<inventory_root>/system/chassis/motherboard/dimm29',
0x2a : '<inventory_root>/system/chassis/motherboard/dimm30',
0x2b : '<inventory_root>/system/chassis/motherboard/dimm31',
},
'FRU_STR' : {
'PRODUCT_0' : '<inventory_root>/system/bios',
'BOARD_1' : '<inventory_root>/system/chassis/motherboard/cpu0',
'BOARD_2' : '<inventory_root>/system/chassis/motherboard/cpu1',
'CHASSIS_3' : '<inventory_root>/system/chassis/motherboard',
'BOARD_3' : '<inventory_root>/system/misc',
'BOARD_4' : '<inventory_root>/system/chassis/motherboard/membuf0',
'BOARD_5' : '<inventory_root>/system/chassis/motherboard/membuf1',
'BOARD_6' : '<inventory_root>/system/chassis/motherboard/membuf2',
'BOARD_7' : '<inventory_root>/system/chassis/motherboard/membuf3',
'BOARD_8' : '<inventory_root>/system/chassis/motherboard/membuf4',
'BOARD_9' : '<inventory_root>/system/chassis/motherboard/membuf5',
'BOARD_10' : '<inventory_root>/system/chassis/motherboard/membuf6',
'BOARD_11' : '<inventory_root>/system/chassis/motherboard/membuf7',
'PRODUCT_12' : '<inventory_root>/system/chassis/motherboard/dimm0',
'PRODUCT_13' : '<inventory_root>/system/chassis/motherboard/dimm1',
'PRODUCT_14' : '<inventory_root>/system/chassis/motherboard/dimm2',
'PRODUCT_15' : '<inventory_root>/system/chassis/motherboard/dimm3',
'PRODUCT_16' : '<inventory_root>/system/chassis/motherboard/dimm4',
'PRODUCT_17' : '<inventory_root>/system/chassis/motherboard/dimm5',
'PRODUCT_18' : '<inventory_root>/system/chassis/motherboard/dimm6',
'PRODUCT_19' : '<inventory_root>/system/chassis/motherboard/dimm7',
'PRODUCT_20' : '<inventory_root>/system/chassis/motherboard/dimm8',
'PRODUCT_21' : '<inventory_root>/system/chassis/motherboard/dimm9',
'PRODUCT_22' : '<inventory_root>/system/chassis/motherboard/dimm10',
'PRODUCT_23' : '<inventory_root>/system/chassis/motherboard/dimm11',
'PRODUCT_24' : '<inventory_root>/system/chassis/motherboard/dimm12',
'PRODUCT_25' : '<inventory_root>/system/chassis/motherboard/dimm13',
'PRODUCT_26' : '<inventory_root>/system/chassis/motherboard/dimm14',
'PRODUCT_27' : '<inventory_root>/system/chassis/motherboard/dimm15',
'PRODUCT_28' : '<inventory_root>/system/chassis/motherboard/dimm16',
'PRODUCT_29' : '<inventory_root>/system/chassis/motherboard/dimm17',
'PRODUCT_30' : '<inventory_root>/system/chassis/motherboard/dimm18',
'PRODUCT_31' : '<inventory_root>/system/chassis/motherboard/dimm19',
'PRODUCT_32' : '<inventory_root>/system/chassis/motherboard/dimm20',
'PRODUCT_33' : '<inventory_root>/system/chassis/motherboard/dimm21',
'PRODUCT_34' : '<inventory_root>/system/chassis/motherboard/dimm22',
'PRODUCT_35' : '<inventory_root>/system/chassis/motherboard/dimm23',
'PRODUCT_36' : '<inventory_root>/system/chassis/motherboard/dimm24',
'PRODUCT_37' : '<inventory_root>/system/chassis/motherboard/dimm25',
'PRODUCT_38' : '<inventory_root>/system/chassis/motherboard/dimm26',
'PRODUCT_39' : '<inventory_root>/system/chassis/motherboard/dimm27',
'PRODUCT_40' : '<inventory_root>/system/chassis/motherboard/dimm28',
'PRODUCT_41' : '<inventory_root>/system/chassis/motherboard/dimm29',
'PRODUCT_42' : '<inventory_root>/system/chassis/motherboard/dimm30',
'PRODUCT_43' : '<inventory_root>/system/chassis/motherboard/dimm31',
'PRODUCT_47' : '<inventory_root>/system/misc',
},
'SENSOR' : {
0x04 : '/org/openbmc/sensors/host/HostStatus',
0x05 : '/org/openbmc/sensors/host/BootProgress',
0x08 : '/org/openbmc/sensors/host/cpu0/OccStatus',
0x09 : '/org/openbmc/sensors/host/cpu1/OccStatus',
0x0c : '<inventory_root>/system/chassis/motherboard/cpu0',
0x0e : '<inventory_root>/system/chassis/motherboard/cpu1',
0x1e : '<inventory_root>/system/chassis/motherboard/dimm3',
0x1f : '<inventory_root>/system/chassis/motherboard/dimm2',
0x20 : '<inventory_root>/system/chassis/motherboard/dimm1',
0x21 : '<inventory_root>/system/chassis/motherboard/dimm0',
0x22 : '<inventory_root>/system/chassis/motherboard/dimm7',
0x23 : '<inventory_root>/system/chassis/motherboard/dimm6',
0x24 : '<inventory_root>/system/chassis/motherboard/dimm5',
0x25 : '<inventory_root>/system/chassis/motherboard/dimm4',
0x26 : '<inventory_root>/system/chassis/motherboard/dimm11',
0x27 : '<inventory_root>/system/chassis/motherboard/dimm10',
0x28 : '<inventory_root>/system/chassis/motherboard/dimm9',
0x29 : '<inventory_root>/system/chassis/motherboard/dimm8',
0x2a : '<inventory_root>/system/chassis/motherboard/dimm15',
0x2b : '<inventory_root>/system/chassis/motherboard/dimm14',
0x2c : '<inventory_root>/system/chassis/motherboard/dimm13',
0x2d : '<inventory_root>/system/chassis/motherboard/dimm12',
0x2e : '<inventory_root>/system/chassis/motherboard/dimm19',
0x2f : '<inventory_root>/system/chassis/motherboard/dimm18',
0x30 : '<inventory_root>/system/chassis/motherboard/dimm17',
0x31 : '<inventory_root>/system/chassis/motherboard/dimm16',
0x32 : '<inventory_root>/system/chassis/motherboard/dimm23',
0x33 : '<inventory_root>/system/chassis/motherboard/dimm22',
0x34 : '<inventory_root>/system/chassis/motherboard/dimm21',
0x35 : '<inventory_root>/system/chassis/motherboard/dimm20',
0x36 : '<inventory_root>/system/chassis/motherboard/dimm27',
0x37 : '<inventory_root>/system/chassis/motherboard/dimm26',
0x38 : '<inventory_root>/system/chassis/motherboard/dimm25',
0x39 : '<inventory_root>/system/chassis/motherboard/dimm24',
0x3a : '<inventory_root>/system/chassis/motherboard/dimm31',
0x3b : '<inventory_root>/system/chassis/motherboard/dimm30',
0x3c : '<inventory_root>/system/chassis/motherboard/dimm29',
0x3d : '<inventory_root>/system/chassis/motherboard/dimm28',
0x3e : '<inventory_root>/system/chassis/motherboard/cpu0/core0',
0x3f : '<inventory_root>/system/chassis/motherboard/cpu0/core1',
0x40 : '<inventory_root>/system/chassis/motherboard/cpu0/core2',
0x41 : '<inventory_root>/system/chassis/motherboard/cpu0/core3',
0x42 : '<inventory_root>/system/chassis/motherboard/cpu0/core4',
0x43 : '<inventory_root>/system/chassis/motherboard/cpu0/core5',
0x44 : '<inventory_root>/system/chassis/motherboard/cpu0/core6',
0x45 : '<inventory_root>/system/chassis/motherboard/cpu0/core7',
0x46 : '<inventory_root>/system/chassis/motherboard/cpu0/core8',
0x47 : '<inventory_root>/system/chassis/motherboard/cpu0/core9',
0x48 : '<inventory_root>/system/chassis/motherboard/cpu0/core10',
0x49 : '<inventory_root>/system/chassis/motherboard/cpu0/core11',
0x4a : '<inventory_root>/system/chassis/motherboard/cpu1/core0',
0x4b : '<inventory_root>/system/chassis/motherboard/cpu1/core1',
0x4c : '<inventory_root>/system/chassis/motherboard/cpu1/core2',
0x4d : '<inventory_root>/system/chassis/motherboard/cpu1/core3',
0x4e : '<inventory_root>/system/chassis/motherboard/cpu1/core4',
0x4f : '<inventory_root>/system/chassis/motherboard/cpu1/core5',
0x50 : '<inventory_root>/system/chassis/motherboard/cpu1/core6',
0x51 : '<inventory_root>/system/chassis/motherboard/cpu1/core7',
0x52 : '<inventory_root>/system/chassis/motherboard/cpu1/core8',
0x53 : '<inventory_root>/system/chassis/motherboard/cpu1/core9',
0x54 : '<inventory_root>/system/chassis/motherboard/cpu1/core10',
0x55 : '<inventory_root>/system/chassis/motherboard/cpu1/core11',
0x56 : '<inventory_root>/system/chassis/motherboard/membuf0',
0x57 : '<inventory_root>/system/chassis/motherboard/membuf1',
0x58 : '<inventory_root>/system/chassis/motherboard/membuf2',
0x59 : '<inventory_root>/system/chassis/motherboard/membuf3',
0x5a : '<inventory_root>/system/chassis/motherboard/membuf4',
0x5b : '<inventory_root>/system/chassis/motherboard/membuf5',
0x5c : '<inventory_root>/system/chassis/motherboard/membuf6',
0x5d : '<inventory_root>/system/chassis/motherboard/membuf7',
0x5f : '/org/openbmc/sensors/host/BootCount',
0x60 : '<inventory_root>/system/chassis/motherboard',
0x61 : '<inventory_root>/system/systemevent',
0x62 : '<inventory_root>/system/powerlimit',
0x63 : '<inventory_root>/system/chassis/motherboard/refclock',
0x64 : '<inventory_root>/system/chassis/motherboard/pcieclock',
0xb1 : '<inventory_root>/system/chassis/motherboard/todclock',
0xb2 : '<inventory_root>/system/chassis/motherboard/apss',
0xb3 : '/org/openbmc/sensors/host/powercap',
0xb5 : '/org/openbmc/sensors/host/OperatingSystemStatus',
0xb6 : '<inventory_root>/system/chassis/motherboard/pcielink',
},
'GPIO_PRESENT' : {}
}
GPIO_CONFIG = {}
GPIO_CONFIG['BMC_POWER_UP'] = \
{'gpio_pin': 'D1', 'direction': 'out'}
GPIO_CONFIG['SYS_PWROK_BUFF'] = \
{'gpio_pin': 'D2', 'direction': 'in'}
GPIO_CONFIG['BMC_WD_CLEAR_PULSE_N'] = \
{'gpio_pin': 'N4', 'direction': 'out'}
GPIO_CONFIG['CM1_OE_R_N'] = \
{'gpio_pin': 'Q6', 'direction': 'out'}
GPIO_CONFIG['BMC_CP0_RESET_N'] = \
{'gpio_pin': 'O2', 'direction': 'out'}
GPIO_CONFIG['BMC_CFAM_RESET_N_R'] = \
{'gpio_pin': 'J2', 'direction': 'out'}
GPIO_CONFIG['PEX8718_DEVICES_RESET_N'] = \
{'gpio_pin': 'B6', 'direction': 'out'}
GPIO_CONFIG['CP0_DEVICES_RESET_N'] = \
{'gpio_pin': 'N3', 'direction': 'out'}
GPIO_CONFIG['CP1_DEVICES_RESET_N'] = \
{'gpio_pin': 'N5', 'direction': 'out'}
GPIO_CONFIG['FSI_DATA'] = \
{'gpio_pin': 'A5', 'direction': 'out'}
GPIO_CONFIG['FSI_CLK'] = \
{'gpio_pin': 'A4', 'direction': 'out'}
GPIO_CONFIG['FSI_ENABLE'] = \
{'gpio_pin': 'D0', 'direction': 'out'}
GPIO_CONFIG['CRONUS_SEL'] = \
{'gpio_pin': 'A6', 'direction': 'out'}
GPIO_CONFIG['BMC_THROTTLE'] = \
{'gpio_pin': 'J3', 'direction': 'out'}
GPIO_CONFIG['IDBTN'] = \
{ 'gpio_pin': 'Q7', 'direction': 'out' }
GPIO_CONFIG['POWER_BUTTON'] = \
{'gpio_pin': 'E0', 'direction': 'both'}
GPIO_CONFIG['RESET_BUTTON'] = \
{'gpio_pin': 'E4', 'direction': 'both'}
GPIO_CONFIG['PS0_PRES_N'] = \
{'gpio_pin': 'P7', 'direction': 'in'}
GPIO_CONFIG['PS1_PRES_N'] = \
{'gpio_pin': 'N0', 'direction': 'in'}
GPIO_CONFIG['CARD_PRES_N'] = \
{'gpio_pin': 'J0', 'direction': 'in'}
def convertGpio(name):
name = name.upper()
c = name[0:1]
offset = int(name[1:])
a = ord(c)-65
base = a*8+GPIO_BASE
return base+offset
HWMON_CONFIG = {
'4-0050' : {
'names' : {
'caps_curr_powercap' : { 'object_path' : 'powercap/curr_cap','poll_interval' : 10000,'scale' : 1,'units' : 'W' },
'caps_curr_powerreading' : { 'object_path' : 'powercap/system_power','poll_interval' : 10000,'scale' : 1,'units' : 'W' },
'caps_max_powercap' : { 'object_path' : 'powercap/max_cap','poll_interval' : 10000,'scale' : 1,'units' : 'W' },
'caps_min_powercap' : { 'object_path' : 'powercap/min_cap','poll_interval' : 10000,'scale' : 1,'units' : 'W' },
'caps_norm_powercap' : { 'object_path' : 'powercap/n_cap','poll_interval' : 10000,'scale' : 1,'units' : 'W' },
'caps_user_powerlimit' : { 'object_path' : 'powercap/user_cap','poll_interval' : 10000,'scale' : 1,'units' : 'W' },
},
'labels' : {
'176' : { 'object_path' : 'temperature/cpu0/core0','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'177' : { 'object_path' : 'temperature/cpu0/core1','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'178' : { 'object_path' : 'temperature/cpu0/core2','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'179' : { 'object_path' : 'temperature/cpu0/core3','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'180' : { 'object_path' : 'temperature/cpu0/core4','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'181' : { 'object_path' : 'temperature/cpu0/core5','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'182' : { 'object_path' : 'temperature/cpu0/core6','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'183' : { 'object_path' : 'temperature/cpu0/core7','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'184' : { 'object_path' : 'temperature/cpu0/core8','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'185' : { 'object_path' : 'temperature/cpu0/core9','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'186' : { 'object_path' : 'temperature/cpu0/core10','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'187' : { 'object_path' : 'temperature/cpu0/core11','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'102' : { 'object_path' : 'temperature/dimm0','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'103' : { 'object_path' : 'temperature/dimm1','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'104' : { 'object_path' : 'temperature/dimm2','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'105' : { 'object_path' : 'temperature/dimm3','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'106' : { 'object_path' : 'temperature/dimm4','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'107' : { 'object_path' : 'temperature/dimm5','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'108' : { 'object_path' : 'temperature/dimm6','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'109' : { 'object_path' : 'temperature/dimm7','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'110' : { 'object_path' : 'temperature/dimm8','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'111' : { 'object_path' : 'temperature/dimm9','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'112' : { 'object_path' : 'temperature/dimm10','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'113' : { 'object_path' : 'temperature/dimm11','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'114' : { 'object_path' : 'temperature/dimm12','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'115' : { 'object_path' : 'temperature/dimm13','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'116' : { 'object_path' : 'temperature/dimm14','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'117' : { 'object_path' : 'temperature/dimm15','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'94' : { 'object_path' : 'temperature/membuf0','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'95' : { 'object_path' : 'temperature/membuf1','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'96' : { 'object_path' : 'temperature/membuf2','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'97' : { 'object_path' : 'temperature/membuf3','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
}
},
'5-0050' : {
'labels' : {
'188' : { 'object_path' : 'temperature/cpu1/core0','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'189' : { 'object_path' : 'temperature/cpu1/core1','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'190' : { 'object_path' : 'temperature/cpu1/core2','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'191' : { 'object_path' : 'temperature/cpu1/core3','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'192' : { 'object_path' : 'temperature/cpu1/core4','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'193' : { 'object_path' : 'temperature/cpu1/core5','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'194' : { 'object_path' : 'temperature/cpu1/core6','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'195' : { 'object_path' : 'temperature/cpu1/core7','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'196' : { 'object_path' : 'temperature/cpu1/core8','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'197' : { 'object_path' : 'temperature/cpu1/core9','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'198' : { 'object_path' : 'temperature/cpu1/core10','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'199' : { 'object_path' : 'temperature/cpu1/core11','poll_interval' : 5000,'scale' : 1000,'units' : 'C',
'critical_upper' : 100, 'critical_lower' : -100, 'warning_upper' : 90, 'warning_lower' : -99, 'emergency_enabled' : True },
'118' : { 'object_path' : 'temperature/dimm16','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'119' : { 'object_path' : 'temperature/dimm17','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'120' : { 'object_path' : 'temperature/dimm18','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'121' : { 'object_path' : 'temperature/dimm19','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'122' : { 'object_path' : 'temperature/dimm20','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'123' : { 'object_path' : 'temperature/dimm21','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'124' : { 'object_path' : 'temperature/dimm22','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'125' : { 'object_path' : 'temperature/dimm23','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'126' : { 'object_path' : 'temperature/dimm24','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'127' : { 'object_path' : 'temperature/dimm25','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'128' : { 'object_path' : 'temperature/dimm26','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'129' : { 'object_path' : 'temperature/dimm27','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'130' : { 'object_path' : 'temperature/dimm28','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'131' : { 'object_path' : 'temperature/dimm29','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'132' : { 'object_path' : 'temperature/dimm30','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'133' : { 'object_path' : 'temperature/dimm31','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'98' : { 'object_path' : 'temperature/membuf4','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'99' : { 'object_path' : 'temperature/membuf5','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'100' : { 'object_path' : 'temperature/membuf6','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
'101' : { 'object_path' : 'temperature/membuf7','poll_interval' : 5000,'scale' : 1000,'units' : 'C' },
}
},
}
# Miscellaneous non-poll sensor with system specific properties.
# The sensor id is the same as those defined in ID_LOOKUP['SENSOR'].
MISC_SENSORS = {
0x5f : { 'class' : 'BootCountSensor' },
0x05 : { 'class' : 'BootProgressSensor' },
0x08 : { 'class' : 'OccStatusSensor',
'os_path' : '/sys/class/i2c-adapter/i2c-3/3-0050/online' },
0x09 : { 'class' : 'OccStatusSensor',
'os_path' : '/sys/class/i2c-adapter/i2c-3/3-0051/online' },
0xb5 : { 'class' : 'OperatingSystemStatusSensor' },
0xb3 : { 'class' : 'PowerCap',
'os_path' : '/sys/class/hwmon/hwmon3/user_powercap' },
}
| home_path = './'
cache_path = '/var/cache/obmc/'
flash_download_path = '/tmp'
gpio_base = 320
system_name = 'Garrison'
system_states = ['BASE_APPS', 'BMC_STARTING', 'BMC_READY', 'HOST_POWERING_ON', 'HOST_POWERED_ON', 'HOST_BOOTING', 'HOST_BOOTED', 'HOST_POWERED_OFF']
exit_state_depend = {'BASE_APPS': {'/org/openbmc/sensors': 0}, 'BMC_STARTING': {'/org/openbmc/control/chassis0': 0, '/org/openbmc/control/power0': 0, '/org/openbmc/control/host0': 0, '/org/openbmc/control/flash/bios': 0}}
enter_state_callback = {'HOST_POWERED_ON': {'boot': {'bus_name': 'org.openbmc.control.Host', 'obj_name': '/org/openbmc/control/host0', 'interface_name': 'org.openbmc.control.Host'}}, 'HOST_POWERED_OFF': {'setOff': {'bus_name': 'org.openbmc.control.led', 'obj_name': '/org/openbmc/control/led/identify', 'interface_name': 'org.openbmc.Led'}}, 'BMC_READY': {'setOn': {'bus_name': 'org.openbmc.control.led', 'obj_name': '/org/openbmc/control/led/beep', 'interface_name': 'org.openbmc.Led'}, 'init': {'bus_name': 'org.openbmc.control.Flash', 'obj_name': '/org/openbmc/control/flash/bios', 'interface_name': 'org.openbmc.Flash'}}}
apps = {'startup_hacks': {'system_state': 'BASE_APPS', 'start_process': True, 'monitor_process': False, 'process_name': 'startup_hacks.sh'}, 'inventory': {'system_state': 'BMC_STARTING', 'start_process': True, 'monitor_process': True, 'process_name': 'inventory_items.py', 'args': [SYSTEM_NAME]}, 'hwmon': {'system_state': 'BMC_STARTING', 'start_process': True, 'monitor_process': True, 'process_name': 'hwmon.py', 'args': [SYSTEM_NAME]}, 'sensor_manager': {'system_state': 'BASE_APPS', 'start_process': True, 'monitor_process': True, 'process_name': 'sensor_manager2.py', 'args': [SYSTEM_NAME]}, 'host_watchdog': {'system_state': 'BMC_STARTING', 'start_process': True, 'monitor_process': True, 'process_name': 'host_watchdog.exe'}, 'power_control': {'system_state': 'BMC_STARTING', 'start_process': True, 'monitor_process': True, 'process_name': 'power_control.exe', 'args': ['3000', '10']}, 'power_button': {'system_state': 'BMC_STARTING', 'start_process': True, 'monitor_process': True, 'process_name': 'button_power.exe'}, 'reset_button': {'system_state': 'BMC_STARTING', 'start_process': True, 'monitor_process': True, 'process_name': 'button_reset.exe'}, 'led_control': {'system_state': 'BMC_STARTING', 'start_process': True, 'monitor_process': True, 'process_name': 'led_controller.exe'}, 'flash_control': {'system_state': 'BMC_STARTING', 'start_process': True, 'monitor_process': True, 'process_name': 'flash_bios.exe'}, 'bmc_flash_control': {'system_state': 'BMC_STARTING', 'start_process': True, 'monitor_process': True, 'process_name': 'bmc_update.py'}, 'download_manager': {'system_state': 'BMC_STARTING', 'start_process': True, 'monitor_process': True, 'process_name': 'download_manager.py', 'args': [SYSTEM_NAME]}, 'host_control': {'system_state': 'BMC_STARTING', 'start_process': True, 'monitor_process': True, 'process_name': 'control_host.exe'}, 'chassis_control': {'system_state': 'BMC_STARTING', 'start_process': True, 'monitor_process': True, 'process_name': 'chassis_control.py'}, 'restore': {'system_state': 'BMC_READY', 'start_process': True, 'monitor_process': False, 'process_name': 'discover_system_state.py'}, 'bmc_control': {'system_state': 'BMC_STARTING', 'start_process': True, 'monitor_process': True, 'process_name': 'control_bmc.exe'}}
cached_interfaces = {'org.openbmc.InventoryItem': True, 'org.openbmc.control.Chassis': True}
inventory_root = '/org/openbmc/inventory'
fru_instances = {'<inventory_root>/system': {'fru_type': 'SYSTEM', 'is_fru': True, 'present': 'True'}, '<inventory_root>/system/bios': {'fru_type': 'SYSTEM', 'is_fru': True, 'present': 'True'}, '<inventory_root>/system/misc': {'fru_type': 'SYSTEM', 'is_fru': False}, '<inventory_root>/system/chassis': {'fru_type': 'SYSTEM', 'is_fru': True, 'present': 'True'}, '<inventory_root>/system/chassis/motherboard': {'fru_type': 'MAIN_PLANAR', 'is_fru': True}, '<inventory_root>/system/systemevent': {'fru_type': 'SYSTEM_EVENT', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/refclock': {'fru_type': 'MAIN_PLANAR', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/pcieclock': {'fru_type': 'MAIN_PLANAR', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/todclock': {'fru_type': 'MAIN_PLANAR', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/apss': {'fru_type': 'MAIN_PLANAR', 'is_fru': False}, '<inventory_root>/system/chassis/fan0': {'fru_type': 'FAN', 'is_fru': True}, '<inventory_root>/system/chassis/fan1': {'fru_type': 'FAN', 'is_fru': True}, '<inventory_root>/system/chassis/fan2': {'fru_type': 'FAN', 'is_fru': True}, '<inventory_root>/system/chassis/fan3': {'fru_type': 'FAN', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/bmc': {'fru_type': 'BMC', 'is_fru': False, 'manufacturer': 'ASPEED'}, '<inventory_root>/system/chassis/motherboard/cpu0': {'fru_type': 'CPU', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/cpu1': {'fru_type': 'CPU', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/cpu0/core0': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu0/core1': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu0/core2': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu0/core3': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu0/core4': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu0/core5': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu0/core6': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu0/core7': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu0/core8': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu0/core9': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu0/core10': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu0/core11': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu1/core0': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu1/core1': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu1/core2': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu1/core3': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu1/core4': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu1/core5': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu1/core6': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu1/core7': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu1/core8': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu1/core9': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu1/core10': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/cpu1/core11': {'fru_type': 'CORE', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/membuf0': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/membuf1': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/membuf2': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/membuf3': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/membuf4': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/membuf5': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/membuf6': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/membuf7': {'fru_type': 'MEMORY_BUFFER', 'is_fru': False}, '<inventory_root>/system/chassis/motherboard/dimm0': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm1': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm2': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm3': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm4': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm5': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm6': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm7': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm8': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm9': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm10': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm11': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm12': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm13': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm14': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm15': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm16': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm17': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm18': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm19': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm20': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm21': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm22': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm23': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm24': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm25': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm26': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm27': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm28': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm29': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm30': {'fru_type': 'DIMM', 'is_fru': True}, '<inventory_root>/system/chassis/motherboard/dimm31': {'fru_type': 'DIMM', 'is_fru': True}}
id_lookup = {'FRU': {1: '<inventory_root>/system/chassis/motherboard/cpu0', 2: '<inventory_root>/system/chassis/motherboard/cpu1', 3: '<inventory_root>/system/chassis/motherboard', 4: '<inventory_root>/system/chassis/motherboard/membuf0', 5: '<inventory_root>/system/chassis/motherboard/membuf1', 6: '<inventory_root>/system/chassis/motherboard/membuf2', 7: '<inventory_root>/system/chassis/motherboard/membuf3', 8: '<inventory_root>/system/chassis/motherboard/membuf4', 9: '<inventory_root>/system/chassis/motherboard/membuf5', 12: '<inventory_root>/system/chassis/motherboard/dimm0', 13: '<inventory_root>/system/chassis/motherboard/dimm1', 14: '<inventory_root>/system/chassis/motherboard/dimm2', 15: '<inventory_root>/system/chassis/motherboard/dimm3', 16: '<inventory_root>/system/chassis/motherboard/dimm4', 17: '<inventory_root>/system/chassis/motherboard/dimm5', 18: '<inventory_root>/system/chassis/motherboard/dimm6', 19: '<inventory_root>/system/chassis/motherboard/dimm7', 20: '<inventory_root>/system/chassis/motherboard/dimm8', 21: '<inventory_root>/system/chassis/motherboard/dimm9', 22: '<inventory_root>/system/chassis/motherboard/dimm10', 23: '<inventory_root>/system/chassis/motherboard/dimm11', 24: '<inventory_root>/system/chassis/motherboard/dimm12', 25: '<inventory_root>/system/chassis/motherboard/dimm13', 26: '<inventory_root>/system/chassis/motherboard/dimm14', 27: '<inventory_root>/system/chassis/motherboard/dimm15', 28: '<inventory_root>/system/chassis/motherboard/dimm16', 29: '<inventory_root>/system/chassis/motherboard/dimm17', 30: '<inventory_root>/system/chassis/motherboard/dimm18', 31: '<inventory_root>/system/chassis/motherboard/dimm19', 32: '<inventory_root>/system/chassis/motherboard/dimm20', 33: '<inventory_root>/system/chassis/motherboard/dimm21', 34: '<inventory_root>/system/chassis/motherboard/dimm22', 35: '<inventory_root>/system/chassis/motherboard/dimm23', 36: '<inventory_root>/system/chassis/motherboard/dimm24', 37: '<inventory_root>/system/chassis/motherboard/dimm25', 38: '<inventory_root>/system/chassis/motherboard/dimm26', 39: '<inventory_root>/system/chassis/motherboard/dimm27', 40: '<inventory_root>/system/chassis/motherboard/dimm28', 41: '<inventory_root>/system/chassis/motherboard/dimm29', 42: '<inventory_root>/system/chassis/motherboard/dimm30', 43: '<inventory_root>/system/chassis/motherboard/dimm31'}, 'FRU_STR': {'PRODUCT_0': '<inventory_root>/system/bios', 'BOARD_1': '<inventory_root>/system/chassis/motherboard/cpu0', 'BOARD_2': '<inventory_root>/system/chassis/motherboard/cpu1', 'CHASSIS_3': '<inventory_root>/system/chassis/motherboard', 'BOARD_3': '<inventory_root>/system/misc', 'BOARD_4': '<inventory_root>/system/chassis/motherboard/membuf0', 'BOARD_5': '<inventory_root>/system/chassis/motherboard/membuf1', 'BOARD_6': '<inventory_root>/system/chassis/motherboard/membuf2', 'BOARD_7': '<inventory_root>/system/chassis/motherboard/membuf3', 'BOARD_8': '<inventory_root>/system/chassis/motherboard/membuf4', 'BOARD_9': '<inventory_root>/system/chassis/motherboard/membuf5', 'BOARD_10': '<inventory_root>/system/chassis/motherboard/membuf6', 'BOARD_11': '<inventory_root>/system/chassis/motherboard/membuf7', 'PRODUCT_12': '<inventory_root>/system/chassis/motherboard/dimm0', 'PRODUCT_13': '<inventory_root>/system/chassis/motherboard/dimm1', 'PRODUCT_14': '<inventory_root>/system/chassis/motherboard/dimm2', 'PRODUCT_15': '<inventory_root>/system/chassis/motherboard/dimm3', 'PRODUCT_16': '<inventory_root>/system/chassis/motherboard/dimm4', 'PRODUCT_17': '<inventory_root>/system/chassis/motherboard/dimm5', 'PRODUCT_18': '<inventory_root>/system/chassis/motherboard/dimm6', 'PRODUCT_19': '<inventory_root>/system/chassis/motherboard/dimm7', 'PRODUCT_20': '<inventory_root>/system/chassis/motherboard/dimm8', 'PRODUCT_21': '<inventory_root>/system/chassis/motherboard/dimm9', 'PRODUCT_22': '<inventory_root>/system/chassis/motherboard/dimm10', 'PRODUCT_23': '<inventory_root>/system/chassis/motherboard/dimm11', 'PRODUCT_24': '<inventory_root>/system/chassis/motherboard/dimm12', 'PRODUCT_25': '<inventory_root>/system/chassis/motherboard/dimm13', 'PRODUCT_26': '<inventory_root>/system/chassis/motherboard/dimm14', 'PRODUCT_27': '<inventory_root>/system/chassis/motherboard/dimm15', 'PRODUCT_28': '<inventory_root>/system/chassis/motherboard/dimm16', 'PRODUCT_29': '<inventory_root>/system/chassis/motherboard/dimm17', 'PRODUCT_30': '<inventory_root>/system/chassis/motherboard/dimm18', 'PRODUCT_31': '<inventory_root>/system/chassis/motherboard/dimm19', 'PRODUCT_32': '<inventory_root>/system/chassis/motherboard/dimm20', 'PRODUCT_33': '<inventory_root>/system/chassis/motherboard/dimm21', 'PRODUCT_34': '<inventory_root>/system/chassis/motherboard/dimm22', 'PRODUCT_35': '<inventory_root>/system/chassis/motherboard/dimm23', 'PRODUCT_36': '<inventory_root>/system/chassis/motherboard/dimm24', 'PRODUCT_37': '<inventory_root>/system/chassis/motherboard/dimm25', 'PRODUCT_38': '<inventory_root>/system/chassis/motherboard/dimm26', 'PRODUCT_39': '<inventory_root>/system/chassis/motherboard/dimm27', 'PRODUCT_40': '<inventory_root>/system/chassis/motherboard/dimm28', 'PRODUCT_41': '<inventory_root>/system/chassis/motherboard/dimm29', 'PRODUCT_42': '<inventory_root>/system/chassis/motherboard/dimm30', 'PRODUCT_43': '<inventory_root>/system/chassis/motherboard/dimm31', 'PRODUCT_47': '<inventory_root>/system/misc'}, 'SENSOR': {4: '/org/openbmc/sensors/host/HostStatus', 5: '/org/openbmc/sensors/host/BootProgress', 8: '/org/openbmc/sensors/host/cpu0/OccStatus', 9: '/org/openbmc/sensors/host/cpu1/OccStatus', 12: '<inventory_root>/system/chassis/motherboard/cpu0', 14: '<inventory_root>/system/chassis/motherboard/cpu1', 30: '<inventory_root>/system/chassis/motherboard/dimm3', 31: '<inventory_root>/system/chassis/motherboard/dimm2', 32: '<inventory_root>/system/chassis/motherboard/dimm1', 33: '<inventory_root>/system/chassis/motherboard/dimm0', 34: '<inventory_root>/system/chassis/motherboard/dimm7', 35: '<inventory_root>/system/chassis/motherboard/dimm6', 36: '<inventory_root>/system/chassis/motherboard/dimm5', 37: '<inventory_root>/system/chassis/motherboard/dimm4', 38: '<inventory_root>/system/chassis/motherboard/dimm11', 39: '<inventory_root>/system/chassis/motherboard/dimm10', 40: '<inventory_root>/system/chassis/motherboard/dimm9', 41: '<inventory_root>/system/chassis/motherboard/dimm8', 42: '<inventory_root>/system/chassis/motherboard/dimm15', 43: '<inventory_root>/system/chassis/motherboard/dimm14', 44: '<inventory_root>/system/chassis/motherboard/dimm13', 45: '<inventory_root>/system/chassis/motherboard/dimm12', 46: '<inventory_root>/system/chassis/motherboard/dimm19', 47: '<inventory_root>/system/chassis/motherboard/dimm18', 48: '<inventory_root>/system/chassis/motherboard/dimm17', 49: '<inventory_root>/system/chassis/motherboard/dimm16', 50: '<inventory_root>/system/chassis/motherboard/dimm23', 51: '<inventory_root>/system/chassis/motherboard/dimm22', 52: '<inventory_root>/system/chassis/motherboard/dimm21', 53: '<inventory_root>/system/chassis/motherboard/dimm20', 54: '<inventory_root>/system/chassis/motherboard/dimm27', 55: '<inventory_root>/system/chassis/motherboard/dimm26', 56: '<inventory_root>/system/chassis/motherboard/dimm25', 57: '<inventory_root>/system/chassis/motherboard/dimm24', 58: '<inventory_root>/system/chassis/motherboard/dimm31', 59: '<inventory_root>/system/chassis/motherboard/dimm30', 60: '<inventory_root>/system/chassis/motherboard/dimm29', 61: '<inventory_root>/system/chassis/motherboard/dimm28', 62: '<inventory_root>/system/chassis/motherboard/cpu0/core0', 63: '<inventory_root>/system/chassis/motherboard/cpu0/core1', 64: '<inventory_root>/system/chassis/motherboard/cpu0/core2', 65: '<inventory_root>/system/chassis/motherboard/cpu0/core3', 66: '<inventory_root>/system/chassis/motherboard/cpu0/core4', 67: '<inventory_root>/system/chassis/motherboard/cpu0/core5', 68: '<inventory_root>/system/chassis/motherboard/cpu0/core6', 69: '<inventory_root>/system/chassis/motherboard/cpu0/core7', 70: '<inventory_root>/system/chassis/motherboard/cpu0/core8', 71: '<inventory_root>/system/chassis/motherboard/cpu0/core9', 72: '<inventory_root>/system/chassis/motherboard/cpu0/core10', 73: '<inventory_root>/system/chassis/motherboard/cpu0/core11', 74: '<inventory_root>/system/chassis/motherboard/cpu1/core0', 75: '<inventory_root>/system/chassis/motherboard/cpu1/core1', 76: '<inventory_root>/system/chassis/motherboard/cpu1/core2', 77: '<inventory_root>/system/chassis/motherboard/cpu1/core3', 78: '<inventory_root>/system/chassis/motherboard/cpu1/core4', 79: '<inventory_root>/system/chassis/motherboard/cpu1/core5', 80: '<inventory_root>/system/chassis/motherboard/cpu1/core6', 81: '<inventory_root>/system/chassis/motherboard/cpu1/core7', 82: '<inventory_root>/system/chassis/motherboard/cpu1/core8', 83: '<inventory_root>/system/chassis/motherboard/cpu1/core9', 84: '<inventory_root>/system/chassis/motherboard/cpu1/core10', 85: '<inventory_root>/system/chassis/motherboard/cpu1/core11', 86: '<inventory_root>/system/chassis/motherboard/membuf0', 87: '<inventory_root>/system/chassis/motherboard/membuf1', 88: '<inventory_root>/system/chassis/motherboard/membuf2', 89: '<inventory_root>/system/chassis/motherboard/membuf3', 90: '<inventory_root>/system/chassis/motherboard/membuf4', 91: '<inventory_root>/system/chassis/motherboard/membuf5', 92: '<inventory_root>/system/chassis/motherboard/membuf6', 93: '<inventory_root>/system/chassis/motherboard/membuf7', 95: '/org/openbmc/sensors/host/BootCount', 96: '<inventory_root>/system/chassis/motherboard', 97: '<inventory_root>/system/systemevent', 98: '<inventory_root>/system/powerlimit', 99: '<inventory_root>/system/chassis/motherboard/refclock', 100: '<inventory_root>/system/chassis/motherboard/pcieclock', 177: '<inventory_root>/system/chassis/motherboard/todclock', 178: '<inventory_root>/system/chassis/motherboard/apss', 179: '/org/openbmc/sensors/host/powercap', 181: '/org/openbmc/sensors/host/OperatingSystemStatus', 182: '<inventory_root>/system/chassis/motherboard/pcielink'}, 'GPIO_PRESENT': {}}
gpio_config = {}
GPIO_CONFIG['BMC_POWER_UP'] = {'gpio_pin': 'D1', 'direction': 'out'}
GPIO_CONFIG['SYS_PWROK_BUFF'] = {'gpio_pin': 'D2', 'direction': 'in'}
GPIO_CONFIG['BMC_WD_CLEAR_PULSE_N'] = {'gpio_pin': 'N4', 'direction': 'out'}
GPIO_CONFIG['CM1_OE_R_N'] = {'gpio_pin': 'Q6', 'direction': 'out'}
GPIO_CONFIG['BMC_CP0_RESET_N'] = {'gpio_pin': 'O2', 'direction': 'out'}
GPIO_CONFIG['BMC_CFAM_RESET_N_R'] = {'gpio_pin': 'J2', 'direction': 'out'}
GPIO_CONFIG['PEX8718_DEVICES_RESET_N'] = {'gpio_pin': 'B6', 'direction': 'out'}
GPIO_CONFIG['CP0_DEVICES_RESET_N'] = {'gpio_pin': 'N3', 'direction': 'out'}
GPIO_CONFIG['CP1_DEVICES_RESET_N'] = {'gpio_pin': 'N5', 'direction': 'out'}
GPIO_CONFIG['FSI_DATA'] = {'gpio_pin': 'A5', 'direction': 'out'}
GPIO_CONFIG['FSI_CLK'] = {'gpio_pin': 'A4', 'direction': 'out'}
GPIO_CONFIG['FSI_ENABLE'] = {'gpio_pin': 'D0', 'direction': 'out'}
GPIO_CONFIG['CRONUS_SEL'] = {'gpio_pin': 'A6', 'direction': 'out'}
GPIO_CONFIG['BMC_THROTTLE'] = {'gpio_pin': 'J3', 'direction': 'out'}
GPIO_CONFIG['IDBTN'] = {'gpio_pin': 'Q7', 'direction': 'out'}
GPIO_CONFIG['POWER_BUTTON'] = {'gpio_pin': 'E0', 'direction': 'both'}
GPIO_CONFIG['RESET_BUTTON'] = {'gpio_pin': 'E4', 'direction': 'both'}
GPIO_CONFIG['PS0_PRES_N'] = {'gpio_pin': 'P7', 'direction': 'in'}
GPIO_CONFIG['PS1_PRES_N'] = {'gpio_pin': 'N0', 'direction': 'in'}
GPIO_CONFIG['CARD_PRES_N'] = {'gpio_pin': 'J0', 'direction': 'in'}
def convert_gpio(name):
name = name.upper()
c = name[0:1]
offset = int(name[1:])
a = ord(c) - 65
base = a * 8 + GPIO_BASE
return base + offset
hwmon_config = {'4-0050': {'names': {'caps_curr_powercap': {'object_path': 'powercap/curr_cap', 'poll_interval': 10000, 'scale': 1, 'units': 'W'}, 'caps_curr_powerreading': {'object_path': 'powercap/system_power', 'poll_interval': 10000, 'scale': 1, 'units': 'W'}, 'caps_max_powercap': {'object_path': 'powercap/max_cap', 'poll_interval': 10000, 'scale': 1, 'units': 'W'}, 'caps_min_powercap': {'object_path': 'powercap/min_cap', 'poll_interval': 10000, 'scale': 1, 'units': 'W'}, 'caps_norm_powercap': {'object_path': 'powercap/n_cap', 'poll_interval': 10000, 'scale': 1, 'units': 'W'}, 'caps_user_powerlimit': {'object_path': 'powercap/user_cap', 'poll_interval': 10000, 'scale': 1, 'units': 'W'}}, 'labels': {'176': {'object_path': 'temperature/cpu0/core0', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '177': {'object_path': 'temperature/cpu0/core1', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '178': {'object_path': 'temperature/cpu0/core2', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '179': {'object_path': 'temperature/cpu0/core3', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '180': {'object_path': 'temperature/cpu0/core4', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '181': {'object_path': 'temperature/cpu0/core5', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '182': {'object_path': 'temperature/cpu0/core6', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '183': {'object_path': 'temperature/cpu0/core7', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '184': {'object_path': 'temperature/cpu0/core8', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '185': {'object_path': 'temperature/cpu0/core9', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '186': {'object_path': 'temperature/cpu0/core10', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '187': {'object_path': 'temperature/cpu0/core11', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '102': {'object_path': 'temperature/dimm0', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '103': {'object_path': 'temperature/dimm1', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '104': {'object_path': 'temperature/dimm2', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '105': {'object_path': 'temperature/dimm3', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '106': {'object_path': 'temperature/dimm4', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '107': {'object_path': 'temperature/dimm5', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '108': {'object_path': 'temperature/dimm6', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '109': {'object_path': 'temperature/dimm7', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '110': {'object_path': 'temperature/dimm8', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '111': {'object_path': 'temperature/dimm9', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '112': {'object_path': 'temperature/dimm10', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '113': {'object_path': 'temperature/dimm11', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '114': {'object_path': 'temperature/dimm12', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '115': {'object_path': 'temperature/dimm13', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '116': {'object_path': 'temperature/dimm14', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '117': {'object_path': 'temperature/dimm15', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '94': {'object_path': 'temperature/membuf0', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '95': {'object_path': 'temperature/membuf1', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '96': {'object_path': 'temperature/membuf2', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '97': {'object_path': 'temperature/membuf3', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}}}, '5-0050': {'labels': {'188': {'object_path': 'temperature/cpu1/core0', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '189': {'object_path': 'temperature/cpu1/core1', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '190': {'object_path': 'temperature/cpu1/core2', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '191': {'object_path': 'temperature/cpu1/core3', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '192': {'object_path': 'temperature/cpu1/core4', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '193': {'object_path': 'temperature/cpu1/core5', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '194': {'object_path': 'temperature/cpu1/core6', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '195': {'object_path': 'temperature/cpu1/core7', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '196': {'object_path': 'temperature/cpu1/core8', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '197': {'object_path': 'temperature/cpu1/core9', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '198': {'object_path': 'temperature/cpu1/core10', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '199': {'object_path': 'temperature/cpu1/core11', 'poll_interval': 5000, 'scale': 1000, 'units': 'C', 'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90, 'warning_lower': -99, 'emergency_enabled': True}, '118': {'object_path': 'temperature/dimm16', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '119': {'object_path': 'temperature/dimm17', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '120': {'object_path': 'temperature/dimm18', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '121': {'object_path': 'temperature/dimm19', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '122': {'object_path': 'temperature/dimm20', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '123': {'object_path': 'temperature/dimm21', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '124': {'object_path': 'temperature/dimm22', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '125': {'object_path': 'temperature/dimm23', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '126': {'object_path': 'temperature/dimm24', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '127': {'object_path': 'temperature/dimm25', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '128': {'object_path': 'temperature/dimm26', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '129': {'object_path': 'temperature/dimm27', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '130': {'object_path': 'temperature/dimm28', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '131': {'object_path': 'temperature/dimm29', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '132': {'object_path': 'temperature/dimm30', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '133': {'object_path': 'temperature/dimm31', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '98': {'object_path': 'temperature/membuf4', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '99': {'object_path': 'temperature/membuf5', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '100': {'object_path': 'temperature/membuf6', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}, '101': {'object_path': 'temperature/membuf7', 'poll_interval': 5000, 'scale': 1000, 'units': 'C'}}}}
misc_sensors = {95: {'class': 'BootCountSensor'}, 5: {'class': 'BootProgressSensor'}, 8: {'class': 'OccStatusSensor', 'os_path': '/sys/class/i2c-adapter/i2c-3/3-0050/online'}, 9: {'class': 'OccStatusSensor', 'os_path': '/sys/class/i2c-adapter/i2c-3/3-0051/online'}, 181: {'class': 'OperatingSystemStatusSensor'}, 179: {'class': 'PowerCap', 'os_path': '/sys/class/hwmon/hwmon3/user_powercap'}} |
def fahrenheit_to_celsius(temp_f):
"""Convert temperature in Fahrenheit to Celsius
"""
temp_c = (temp_f-32)*(5.0/9.0)
return temp_c
| def fahrenheit_to_celsius(temp_f):
"""Convert temperature in Fahrenheit to Celsius
"""
temp_c = (temp_f - 32) * (5.0 / 9.0)
return temp_c |
class Table(object):
"""docstring for Table"""
def __init__(self, arg):
self.arg = arg
| class Table(object):
"""docstring for Table"""
def __init__(self, arg):
self.arg = arg |
A,B,C,D = map(float,input().split())
A = (A*2+B*3+C*4+D*1)/10
print(f'Media: {A:.1f}')
if A>=7.0:
print("Aluno aprovado.")
elif A<5.0:
print("Aluno reprovado.")
elif A>=5.0 and A<7.0:
print("Aluno em exame.")
N = float(input())
print(f'Nota do exame: {N:.1f}')
N = (A+N)/2
if N>=5.0:
print("Aluno aprovado.")
print(f'Media final: {N:.1f}')
else:
print("Aluno reprovado.")
print(f'Media final: {N:.1f}') | (a, b, c, d) = map(float, input().split())
a = (A * 2 + B * 3 + C * 4 + D * 1) / 10
print(f'Media: {A:.1f}')
if A >= 7.0:
print('Aluno aprovado.')
elif A < 5.0:
print('Aluno reprovado.')
elif A >= 5.0 and A < 7.0:
print('Aluno em exame.')
n = float(input())
print(f'Nota do exame: {N:.1f}')
n = (A + N) / 2
if N >= 5.0:
print('Aluno aprovado.')
print(f'Media final: {N:.1f}')
else:
print('Aluno reprovado.')
print(f'Media final: {N:.1f}') |
# this file contains the ascii art for our equipment
# HELMET
#
#
# SHIELD ARMOR WEAPON
#
#
# OTHER ITEMS ......
equipment = {'sword':[ ' /\ ',
' || ',
' || ',
' || ',
' || ',
' || ',
' || ',
' || ',
' || ',
' || ',
' || ',
' || ',
' || ',
'o==o',
' II ',
' II ',
' II ',
' ** ',
],
'shield':[ ' | `-._/\_.-` |',
' | || |',
' | ___o()o___ |',
' | __((<>))__ |',
' | ___o()o___ |',
' | \/ |',
' \ o\/o /',
' \ || /',
' \ || /',
' ".||."',
],
'helmet': [ ' _,. ',
' ,` -.) ',
r' ( _/-\\-._ ',
' /,|`--._,-^|',
' \_| |`-._/||',
' | `-, / |',
' | || |',
' `r-._||/ ',
],
'armor': [ ' /-\__________/-\ ',
r' / \\ ||| ; \ ',
r' /_____\....::../\\ \ ',
' _/____/# \___,,__/--\_\ ',
' /____/ ######## \/\__\ ',
' /____/ ####### .\___\ ',
],
'staff': [ " .||, ",
" \.`',/ ",
" = ,. = ",
" / || \ ",
" || ",
" || ",
" || ",
" || ",
" || ",
" || ",
" || ",
" || ",
" || ",
" || ",
" || ",
" || ",
" || ",
" || ",
],
'axe':[ ' /-./\_',
' : ||,>',
' \.--||',
' ||',
' ||',
' ||',
' ||',
' ||',
' ||',
' ||',
' ||',
' ||',
' ||',
' ||',
' ||',
' ||',
' ||',
],
}
| equipment = {'sword': [' /\\ ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', 'o==o', ' II ', ' II ', ' II ', ' ** '], 'shield': [' | `-._/\\_.-` |', ' | || |', ' | ___o()o___ |', ' | __((<>))__ |', ' | ___o()o___ |', ' | \\/ |', ' \\ o\\/o /', ' \\ || /', ' \\ || /', ' ".||."'], 'helmet': [' _,. ', ' ,` -.) ', ' ( _/-\\\\-._ ', ' /,|`--._,-^|', ' \\_| |`-._/||', ' | `-, / |', ' | || |', ' `r-._||/ '], 'armor': [' /-\\__________/-\\ ', ' / \\\\ ||| ; \\ ', ' /_____\\....::../\\\\ \\ ', ' _/____/# \\___,,__/--\\_\\ ', ' /____/ ######## \\/\\__\\ ', ' /____/ ####### .\\___\\ '], 'staff': [' .||, ', " \\.`',/ ", ' = ,. = ', ' / || \\ ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || ', ' || '], 'axe': [' /-./\\_', ' : ||,>', ' \\.--||', ' ||', ' ||', ' ||', ' ||', ' ||', ' ||', ' ||', ' ||', ' ||', ' ||', ' ||', ' ||', ' ||', ' ||']} |
'''
Given a fixed length array arr of integers, duplicate each occurrence of zero, shifting the remaining elements to the right.
Note that elements beyond the length of the original array are not written.
Do the above modifications to the input array in place, do not return anything from your function.
Example 1:
Input: [1,0,2,3,0,4,5,0]
Output: null
Explanation: After calling your function, the input array is modified to: [1,0,0,2,3,0,0,4]
Example 2:
Input: [1,2,3]
Output: null
Explanation: After calling your function, the input array is modified to: [1,2,3]
'''
def duplicateZeros2(arr):
z=1
for i in range(1,len(arr)):
if z<len(arr):
print("z is ",z, "and arr[i-1] is ",arr[i-1])
if arr[z-1]==0:
j=len(arr)-1
while j!=z:
arr[j]=arr[j-1]
j-=1
arr[j]=0
z+=1
print(arr)
z+=1
print(arr)
def duplicateZeros(arr):
i=1
while i<len(arr):
print("in while, i is ", i)
if arr[i-1]==0:
arr.insert(i,0)
arr.pop()
i+=1
i+=1
print(arr)
# print(duplicateZeros([1,0,2,3,0,4,5,0]))
print(duplicateZeros([1,5,2,0,6,8,0,6,0]))
# print(duplicateZeros([1,2,3])) | """
Given a fixed length array arr of integers, duplicate each occurrence of zero, shifting the remaining elements to the right.
Note that elements beyond the length of the original array are not written.
Do the above modifications to the input array in place, do not return anything from your function.
Example 1:
Input: [1,0,2,3,0,4,5,0]
Output: null
Explanation: After calling your function, the input array is modified to: [1,0,0,2,3,0,0,4]
Example 2:
Input: [1,2,3]
Output: null
Explanation: After calling your function, the input array is modified to: [1,2,3]
"""
def duplicate_zeros2(arr):
z = 1
for i in range(1, len(arr)):
if z < len(arr):
print('z is ', z, 'and arr[i-1] is ', arr[i - 1])
if arr[z - 1] == 0:
j = len(arr) - 1
while j != z:
arr[j] = arr[j - 1]
j -= 1
arr[j] = 0
z += 1
print(arr)
z += 1
print(arr)
def duplicate_zeros(arr):
i = 1
while i < len(arr):
print('in while, i is ', i)
if arr[i - 1] == 0:
arr.insert(i, 0)
arr.pop()
i += 1
i += 1
print(arr)
print(duplicate_zeros([1, 5, 2, 0, 6, 8, 0, 6, 0])) |
linha1 = input()
linha2 = input()
linha3 = input()
if(linha1 == 'vertebrado'):
if(linha2 == 'ave'):
if(linha3 == 'carnivoro'):
print('aguia')
else:
print('pomba')
else:
if(linha3 == 'onivoro'):
print('homem')
else:
print('vaca')
else:
if (linha2 == 'inseto'):
if (linha3 == 'hematofago'):
print('pulga')
else:
print('lagarta')
else:
if (linha3 == 'hematofago'):
print('sanguessuga')
else:
print('minhoca') | linha1 = input()
linha2 = input()
linha3 = input()
if linha1 == 'vertebrado':
if linha2 == 'ave':
if linha3 == 'carnivoro':
print('aguia')
else:
print('pomba')
elif linha3 == 'onivoro':
print('homem')
else:
print('vaca')
elif linha2 == 'inseto':
if linha3 == 'hematofago':
print('pulga')
else:
print('lagarta')
elif linha3 == 'hematofago':
print('sanguessuga')
else:
print('minhoca') |
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def isUnivalTree(self, root: TreeNode) -> bool:
if root:
self.k=root.val
self.x=[]
self.x.append(root.val)
else:
return True
def abc(root):
if root:
if root.left:
self.x.append(root.left.val)
abc(root.left)
if root.right:
self.x.append(root.right.val)
abc(root.right)
abc(root)
print(len(self.x))
if len(set(self.x))!=1:
return False
return True
| class Solution:
def is_unival_tree(self, root: TreeNode) -> bool:
if root:
self.k = root.val
self.x = []
self.x.append(root.val)
else:
return True
def abc(root):
if root:
if root.left:
self.x.append(root.left.val)
abc(root.left)
if root.right:
self.x.append(root.right.val)
abc(root.right)
abc(root)
print(len(self.x))
if len(set(self.x)) != 1:
return False
return True |
def most_cash_prep(one_slot_size, start_p, wor_p, gap, lot_enhance):
"""
:param one_slot_size:
:param start_p:
:param wor_p:
:param gap: in percent, 0.05 means gap is 5% in strategy
:param lot_enhance: in percent, 0.3 means 1.3 in strategy
:return:
"""
grids = round(((start_p - wor_p) / start_p) / gap)
most = 0
for gid in range(0, grids + 1):
current_lot = round(one_slot_size * ((1 + lot_enhance) ** gid), 3)
most += current_lot
print("Gid ", gid,
(("%.0f" % (-1 * gid * gap * 100))+"%").ljust(4),
": price %.3f" % (start_p * (1 - gap * gid)),
", lot %.3f" % current_lot, ", whole lot %.3f" % most)
return most
# 500ETF 2018.07.18
print('500ETF')
most_cash_prep(one_slot_size=10000, start_p=5.436, wor_p=4.884, gap=0.02, lot_enhance=0.4)
print('--------------------------------')
# HuaBao 2018.07.18
print('HuaBao')
most_cash_prep(one_slot_size=7000, start_p=0.694, wor_p=0.5, gap=0.05, lot_enhance=0.4)
print('--------------------------------')
# 50ETF 2018.07.20
print('50ETF')
most_cash_prep(one_slot_size=10000, start_p=2.540, wor_p=2.000, gap=0.04, lot_enhance=0.4)
| def most_cash_prep(one_slot_size, start_p, wor_p, gap, lot_enhance):
"""
:param one_slot_size:
:param start_p:
:param wor_p:
:param gap: in percent, 0.05 means gap is 5% in strategy
:param lot_enhance: in percent, 0.3 means 1.3 in strategy
:return:
"""
grids = round((start_p - wor_p) / start_p / gap)
most = 0
for gid in range(0, grids + 1):
current_lot = round(one_slot_size * (1 + lot_enhance) ** gid, 3)
most += current_lot
print('Gid ', gid, ('%.0f' % (-1 * gid * gap * 100) + '%').ljust(4), ': price %.3f' % (start_p * (1 - gap * gid)), ', lot %.3f' % current_lot, ', whole lot %.3f' % most)
return most
print('500ETF')
most_cash_prep(one_slot_size=10000, start_p=5.436, wor_p=4.884, gap=0.02, lot_enhance=0.4)
print('--------------------------------')
print('HuaBao')
most_cash_prep(one_slot_size=7000, start_p=0.694, wor_p=0.5, gap=0.05, lot_enhance=0.4)
print('--------------------------------')
print('50ETF')
most_cash_prep(one_slot_size=10000, start_p=2.54, wor_p=2.0, gap=0.04, lot_enhance=0.4) |
T = int(input())
for _ in range(T):
M, H = map(int, input().split())
B = M//H**2
if B<=18:
print(1)
elif B in range(19, 25):
print(2)
elif B in range(25, 30):
print(3)
else:
print(4) | t = int(input())
for _ in range(T):
(m, h) = map(int, input().split())
b = M // H ** 2
if B <= 18:
print(1)
elif B in range(19, 25):
print(2)
elif B in range(25, 30):
print(3)
else:
print(4) |
'''
URL: https://leetcode.com/problems/regular-expression-matching/description/
Time complexity: O(n*m)
Space complexity: O(n*m)
'''
class Solution(object):
def isMatch(self, s, p):
"""
:type s: str
:type p: str
:rtype: bool
"""
is_match = [[False for j in range(len(p)+1)] for i in range(len(s)+1)]
is_match[0][0] = True
one_only_match = None
for j in range(1, len(p)+1):
if p[j-1] == "." or p[j-1].isalpha():
is_match[0][j] = False
if not one_only_match:
one_only_match = j
else:
if one_only_match == j-1 or not one_only_match:
is_match[0][j] = True
one_only_match = None
else:
is_match[0][j] = False
for i in range(1, len(s)+1):
for j in range(1, len(p)+1):
if s[i-1] == p[j-1] or p[j-1] == ".":
is_match[i][j] = is_match[i-1][j-1]
if is_match[i][j]: continue
if p[j-1] == "*":
is_match[i][j] = is_match[i][j-2]
if is_match[i][j]: continue
if s[i-1] == p[j-2] or p[j-2] == ".":
is_match[i][j] = is_match[i-1][j]
return is_match[-1][-1]
| """
URL: https://leetcode.com/problems/regular-expression-matching/description/
Time complexity: O(n*m)
Space complexity: O(n*m)
"""
class Solution(object):
def is_match(self, s, p):
"""
:type s: str
:type p: str
:rtype: bool
"""
is_match = [[False for j in range(len(p) + 1)] for i in range(len(s) + 1)]
is_match[0][0] = True
one_only_match = None
for j in range(1, len(p) + 1):
if p[j - 1] == '.' or p[j - 1].isalpha():
is_match[0][j] = False
if not one_only_match:
one_only_match = j
elif one_only_match == j - 1 or not one_only_match:
is_match[0][j] = True
one_only_match = None
else:
is_match[0][j] = False
for i in range(1, len(s) + 1):
for j in range(1, len(p) + 1):
if s[i - 1] == p[j - 1] or p[j - 1] == '.':
is_match[i][j] = is_match[i - 1][j - 1]
if is_match[i][j]:
continue
if p[j - 1] == '*':
is_match[i][j] = is_match[i][j - 2]
if is_match[i][j]:
continue
if s[i - 1] == p[j - 2] or p[j - 2] == '.':
is_match[i][j] = is_match[i - 1][j]
return is_match[-1][-1] |
ls = open('logs.txt').readlines()
for line in ls:
g = line.strip()
g = "/d/c186/FarnettoApps/" + g
print("echo loading %s"%(g))
print("java -Dserverid=farnetto -cp $CP farnetto.log4jconverter.Loader file:/%s 2>&1 | grep ERROR"%(g))
| ls = open('logs.txt').readlines()
for line in ls:
g = line.strip()
g = '/d/c186/FarnettoApps/' + g
print('echo loading %s' % g)
print('java -Dserverid=farnetto -cp $CP farnetto.log4jconverter.Loader file:/%s 2>&1 | grep ERROR' % g) |
#Your function definition goes here
def valid_date(date_str):
if len(date_str) == 8:
for ch in date_str:
if ch == "/":
return False
if ch.isalpha():
return False
date_str.split(".")
day = int(date_str[:2])
month = int(date_str[3:5])
year = int(date_str[-2:])
if day > 0 and day <= 31 and month >= 1 and month <= 12 and year >= 0 and year <= 99:
return True
return False
date_str = input("Enter a date: ")
if valid_date(date_str):
print("Date is valid")
else:
print("Date is invalid") | def valid_date(date_str):
if len(date_str) == 8:
for ch in date_str:
if ch == '/':
return False
if ch.isalpha():
return False
date_str.split('.')
day = int(date_str[:2])
month = int(date_str[3:5])
year = int(date_str[-2:])
if day > 0 and day <= 31 and (month >= 1) and (month <= 12) and (year >= 0) and (year <= 99):
return True
return False
date_str = input('Enter a date: ')
if valid_date(date_str):
print('Date is valid')
else:
print('Date is invalid') |
#Created with the Terminal ASCII Paint app by Michele Morelli - https://github.com/MicheleMorelli
def draw_house():
print(" "*64+"\n"+" "*64+"\n"+" "*5+"_"*33+" "*26+"\n"+" "*4+"|"+"/"*4+"|"+"#"*28+" "*26+"\n"+" "*4+"|"+"/"*4+"|"+"#"*28+" "*26+"\n"+" "*4+"|"+"/"*4+"|"+"#"*2+" "*2+"|"+"/"+"|"+"#"*2+" "*3+"|"+"/"+"|"+"#"*5+" "*3+"|"+"/"+"|"+"#"*2+" "*26+"\n"+" "*4+"|"+"/"*4+"|"+"#"*2+" "*2+"|"+"/"+"|"+"#"*2+" "*3+"|"+"/"+"|"+"#"*5+" "*3+"|"+"/"+"|"+"#"*2+" "*26+"\n"+" "*4+"|"+"/"*4+"|"+"#"*2+" "*2+"|"+"/"+"|"+"#"*2+" "*3+"|"+"/"+"|"+"#"*5+" "*3+"|"+"/"+"|"+"#"*2+" "*26+"\n"+" "*4+"|"+"/"*4+"|"+"#"*9+" "*3+"|"+"/"+"|"+"#"*13+" "*26+"\n"+" "*4+"|"+"/"*4+"|"+"#"*28+" "*26+"\n"+" "*4+"|"+"/"*4+"|"+"#"*28+" "*26+"\n"+" "*4+"|"+"/"*4+"|"+"#"*9+" "*7+"|"+"/"+"|"+"#"*9+" "*2+"_"*16+" "*8+"\n"+" "*4+"|"+"/"*4+"|"+"#"*9+" "*7+"|"+"/"+"|"+"#"*9+" "+"|"+"/"*2+"|"+"#"*13+"|"+" "*7+"\n"+" "*4+"|"+"/"*4+"|"+"#"*9+" "*7+"|"+"/"+"|"+"#"*9+" "+"|"+"/"*2+"|"+"#"*13+"|"+" "*7+"\n"+" "*4+"|"+"/"*4+"|"+"#"*9+" "*7+"|"+"/"+"|"+"#"*9+" "+"|"+"/"*2+"|"+"#"*13+"|"+" "*7+"\n"+" "*4+"|"+"/"*4+"|"+"#"*9+" "*7+"|"+"/"+"|"+"#"*9+" "+"|"+"/"*2+"|"+"#"*13+"|"+" "*7+"\n")
draw_house()
def draw_sdfsdf():
print(" "*64+"\n"+" "*64+"\n"+" "*64+"\n"+" "*64+"\n"+" "*64+"\n"+" "*64+"\n"+" "*64+"\n"+" "*64+"\n"+" "*64+"\n"+" "*64+"\n"+" "*64+"\n"+" "*64+"\n"+" "*64+"\n"+" "*64+"\n"+" "*64+"\n"+" "*64+"\n")
| def draw_house():
print(' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 5 + '_' * 33 + ' ' * 26 + '\n' + ' ' * 4 + '|' + '/' * 4 + '|' + '#' * 28 + ' ' * 26 + '\n' + ' ' * 4 + '|' + '/' * 4 + '|' + '#' * 28 + ' ' * 26 + '\n' + ' ' * 4 + '|' + '/' * 4 + '|' + '#' * 2 + ' ' * 2 + '|' + '/' + '|' + '#' * 2 + ' ' * 3 + '|' + '/' + '|' + '#' * 5 + ' ' * 3 + '|' + '/' + '|' + '#' * 2 + ' ' * 26 + '\n' + ' ' * 4 + '|' + '/' * 4 + '|' + '#' * 2 + ' ' * 2 + '|' + '/' + '|' + '#' * 2 + ' ' * 3 + '|' + '/' + '|' + '#' * 5 + ' ' * 3 + '|' + '/' + '|' + '#' * 2 + ' ' * 26 + '\n' + ' ' * 4 + '|' + '/' * 4 + '|' + '#' * 2 + ' ' * 2 + '|' + '/' + '|' + '#' * 2 + ' ' * 3 + '|' + '/' + '|' + '#' * 5 + ' ' * 3 + '|' + '/' + '|' + '#' * 2 + ' ' * 26 + '\n' + ' ' * 4 + '|' + '/' * 4 + '|' + '#' * 9 + ' ' * 3 + '|' + '/' + '|' + '#' * 13 + ' ' * 26 + '\n' + ' ' * 4 + '|' + '/' * 4 + '|' + '#' * 28 + ' ' * 26 + '\n' + ' ' * 4 + '|' + '/' * 4 + '|' + '#' * 28 + ' ' * 26 + '\n' + ' ' * 4 + '|' + '/' * 4 + '|' + '#' * 9 + ' ' * 7 + '|' + '/' + '|' + '#' * 9 + ' ' * 2 + '_' * 16 + ' ' * 8 + '\n' + ' ' * 4 + '|' + '/' * 4 + '|' + '#' * 9 + ' ' * 7 + '|' + '/' + '|' + '#' * 9 + ' ' + '|' + '/' * 2 + '|' + '#' * 13 + '|' + ' ' * 7 + '\n' + ' ' * 4 + '|' + '/' * 4 + '|' + '#' * 9 + ' ' * 7 + '|' + '/' + '|' + '#' * 9 + ' ' + '|' + '/' * 2 + '|' + '#' * 13 + '|' + ' ' * 7 + '\n' + ' ' * 4 + '|' + '/' * 4 + '|' + '#' * 9 + ' ' * 7 + '|' + '/' + '|' + '#' * 9 + ' ' + '|' + '/' * 2 + '|' + '#' * 13 + '|' + ' ' * 7 + '\n' + ' ' * 4 + '|' + '/' * 4 + '|' + '#' * 9 + ' ' * 7 + '|' + '/' + '|' + '#' * 9 + ' ' + '|' + '/' * 2 + '|' + '#' * 13 + '|' + ' ' * 7 + '\n')
draw_house()
def draw_sdfsdf():
print(' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n' + ' ' * 64 + '\n') |
"""
observer pattern in python
"""
class Subject:
"""
# Represents what is being 'observed'
"""
def __init__(self):
self._observers = (
[]
) # This where references to all the observers are being kept
# Note that this is a one-to-many relationship: there will be one subject to be observed by multiple _observers
def attach(self, observer):
"""
# If the observer is not already in the observers list
# append the observer to the list
:param observer:
:return:
"""
if observer not in self._observers:
self._observers.append(observer)
def detach(self, observer):
"""
Simply remove the observer
:param observer:
:return:
"""
try:
self._observers.remove(observer)
except ValueError:
pass
def notify(self, modifier=None):
"""
notify
For all the observers in the list
Don't notify the observer who is actually updating the temperature
:param modifier:
:return:
"""
for observer in self._observers:
if modifier != observer:
observer.update(self) # Alert the observers!
class Core(Subject):
"""
# Inherits from the Subject class
"""
def __init__(self, name=""):
Subject.__init__(self)
self.core_name = name # Set the name of the core
self.core_temp = 0 # Initialize the temperature of the core
@property # Getter that gets the core temperature
def temp(self):
"""
:return:
"""
return self.core_temp
@temp.setter # Setter that sets the core temperature
def temp(self, temp):
self.core_temp = temp
self.notify() # Notify the observers whenever somebody changes the core temperature
class TempViewer:
"""
TempViewer
"""
@staticmethod
def update(subject):
"""
Alert method that is invoked when the notify() method in a concrete subject is invoked
:param subject:
:return:
"""
print(
"Temperature Viewer: {} has Temperature {}".format(
subject.core_name, subject.core_temp
)
)
def main():
"""
Let's create our subjects
Let's create our observers
Let's attach our observers to the first core
Let's change the temperature of our first core
:return:
"""
core1 = Core("Core 1")
core2 = Core("Core 2")
viewer1 = TempViewer()
viewer2 = TempViewer()
core1.attach(viewer1)
core1.attach(viewer2)
core1.temp = 80
core1.temp = 90
core2.temp = 100
if __name__ == "__main__":
main()
| """
observer pattern in python
"""
class Subject:
"""
# Represents what is being 'observed'
"""
def __init__(self):
self._observers = []
def attach(self, observer):
"""
# If the observer is not already in the observers list
# append the observer to the list
:param observer:
:return:
"""
if observer not in self._observers:
self._observers.append(observer)
def detach(self, observer):
"""
Simply remove the observer
:param observer:
:return:
"""
try:
self._observers.remove(observer)
except ValueError:
pass
def notify(self, modifier=None):
"""
notify
For all the observers in the list
Don't notify the observer who is actually updating the temperature
:param modifier:
:return:
"""
for observer in self._observers:
if modifier != observer:
observer.update(self)
class Core(Subject):
"""
# Inherits from the Subject class
"""
def __init__(self, name=''):
Subject.__init__(self)
self.core_name = name
self.core_temp = 0
@property
def temp(self):
"""
:return:
"""
return self.core_temp
@temp.setter
def temp(self, temp):
self.core_temp = temp
self.notify()
class Tempviewer:
"""
TempViewer
"""
@staticmethod
def update(subject):
"""
Alert method that is invoked when the notify() method in a concrete subject is invoked
:param subject:
:return:
"""
print('Temperature Viewer: {} has Temperature {}'.format(subject.core_name, subject.core_temp))
def main():
"""
Let's create our subjects
Let's create our observers
Let's attach our observers to the first core
Let's change the temperature of our first core
:return:
"""
core1 = core('Core 1')
core2 = core('Core 2')
viewer1 = temp_viewer()
viewer2 = temp_viewer()
core1.attach(viewer1)
core1.attach(viewer2)
core1.temp = 80
core1.temp = 90
core2.temp = 100
if __name__ == '__main__':
main() |
class Rectangle:
def __init__(self, width, height):
self.width = width
self.height = height
def set_width(self, width):
self.width = width
def set_height(self, height):
self.height = height
def get_area(self):
return self.height * self.width
def get_perimeter(self):
return 2 * (self.height + self.width)
def get_diagonal(self):
return (self.height ** 2 + self.width ** 2) ** 0.5
def get_picture(self):
ret_val = "*" * self.width
ret_val += "\n"
ret_val = ret_val * self.height
return ret_val
def get_amount_inside(self, shape):
self_area = self.get_area()
shape_area = shape.get_area()
assert(self_area > shape_area)
return self_area // shape_area
def __str__(self):
txt = "{classname} (width={width},height={height})"
return txt.format(classname=self.__class__.__name__, width=self.width, height=self.height)
class Square(Rectangle):
def __init__(self, side):
super().__init__(side, side)
def set_side(self, side):
self.width = side
self.height = side
def __str__(self):
txt = "{classname} (side={side})"
return txt.format(classname=self.__class__.__name__, side=self.width)
| class Rectangle:
def __init__(self, width, height):
self.width = width
self.height = height
def set_width(self, width):
self.width = width
def set_height(self, height):
self.height = height
def get_area(self):
return self.height * self.width
def get_perimeter(self):
return 2 * (self.height + self.width)
def get_diagonal(self):
return (self.height ** 2 + self.width ** 2) ** 0.5
def get_picture(self):
ret_val = '*' * self.width
ret_val += '\n'
ret_val = ret_val * self.height
return ret_val
def get_amount_inside(self, shape):
self_area = self.get_area()
shape_area = shape.get_area()
assert self_area > shape_area
return self_area // shape_area
def __str__(self):
txt = '{classname} (width={width},height={height})'
return txt.format(classname=self.__class__.__name__, width=self.width, height=self.height)
class Square(Rectangle):
def __init__(self, side):
super().__init__(side, side)
def set_side(self, side):
self.width = side
self.height = side
def __str__(self):
txt = '{classname} (side={side})'
return txt.format(classname=self.__class__.__name__, side=self.width) |
def details():
name = input("What is your name? ")
age = input("What is your age? ")
username = input("What is your Reddit username? ")
with open("python_get_details_details.txt", "a+", encoding="utf-8") as file:
file.write(name + " " + age + " " + username + "\n")
print("Your name is " + name + ", you are " + age + " years old, and your username is " + username + "!")
if __name__ == "__main__":
details()
| def details():
name = input('What is your name? ')
age = input('What is your age? ')
username = input('What is your Reddit username? ')
with open('python_get_details_details.txt', 'a+', encoding='utf-8') as file:
file.write(name + ' ' + age + ' ' + username + '\n')
print('Your name is ' + name + ', you are ' + age + ' years old, and your username is ' + username + '!')
if __name__ == '__main__':
details() |
# -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
#########################################################################
## This is a sample controller
## - index is the default action of any application
## - user is required for authentication and authorization
## - download is for downloading files uploaded in the db (does streaming)
## - call exposes all registered services (none by default)
#########################################################################
@auth.requires_login()
def index():
response.flash=T('Welcome!')
notes=[lambda project:
A('Notes',_class="btn",_href=URL("default","note",args=[project.id]))]
grid=SQLFORM.grid(db.project, create=False,links=notes,
fields=[db.project.name,db.project.employee_name,db.project.company_name,
db.project.start_date,db.project.due_date,db.project.completed], deletable=False,
maxtextlength=50
)
return locals()
@auth.requires_login()
def note():
project=db.project(request.args(0))
db.note.post_id.default=project.id
form=crud.create(db.note) if auth.user else "Login to Post to the Project"
allnotes= db(db.note.post_id==project.id).select()
return locals()
@auth.requires_login()
def add():
project_form=SQLFORM(db.project).process()
return dict(project_form=project_form)
@auth.requires_login()
def company():
company_form=SQLFORM(db.company).process()
grid=SQLFORM.grid(db.company,create=False,deletable=False,editable=False, maxtextlength=50,
orderby=db.company.company_name)
return locals()
@auth.requires_login()
def employee():
employee_form=SQLFORM(db.auth_user).process()
grid=SQLFORM.grid(db.auth_user,create=False,fields=[db.auth_user.first_name,
db.auth_user.last_name,
db.auth_user.email],
deletable=False,editable=False,
maxtextlength=50
)
return locals()
def tester():
return locals()
def user():
"""
exposes:
http://..../[app]/default/user/login
http://..../[app]/default/user/logout
http://..../[app]/default/user/register
http://..../[app]/default/user/profile
http://..../[app]/default/user/retrieve_password
http://..../[app]/default/user/change_password
http://..../[app]/default/user/manage_users (requires membership in
use @auth.requires_login()
@auth.requires_membership('group name')
@auth.requires_permission('read','table name',record_id)
to decorate functions that need access control
"""
return dict(form=auth())
@cache.action()
def download():
"""
allows downloading of uploaded files
http://..../[app]/default/download/[filename]
"""
return response.download(request, db)
def call():
"""
exposes services. for example:
http://..../[app]/default/call/jsonrpc
decorate with @services.jsonrpc the functions to expose
supports xml, json, xmlrpc, jsonrpc, amfrpc, rss, csv
"""
return service()
@auth.requires_signature()
def data():
"""
http://..../[app]/default/data/tables
http://..../[app]/default/data/create/[table]
http://..../[app]/default/data/read/[table]/[id]
http://..../[app]/default/data/update/[table]/[id]
http://..../[app]/default/data/delete/[table]/[id]
http://..../[app]/default/data/select/[table]
http://..../[app]/default/data/search/[table]
but URLs must be signed, i.e. linked with
A('table',_href=URL('data/tables',user_signature=True))
or with the signed load operator
LOAD('default','data.load',args='tables',ajax=True,user_signature=True)
"""
return dict(form=crud())
| @auth.requires_login()
def index():
response.flash = t('Welcome!')
notes = [lambda project: a('Notes', _class='btn', _href=url('default', 'note', args=[project.id]))]
grid = SQLFORM.grid(db.project, create=False, links=notes, fields=[db.project.name, db.project.employee_name, db.project.company_name, db.project.start_date, db.project.due_date, db.project.completed], deletable=False, maxtextlength=50)
return locals()
@auth.requires_login()
def note():
project = db.project(request.args(0))
db.note.post_id.default = project.id
form = crud.create(db.note) if auth.user else 'Login to Post to the Project'
allnotes = db(db.note.post_id == project.id).select()
return locals()
@auth.requires_login()
def add():
project_form = sqlform(db.project).process()
return dict(project_form=project_form)
@auth.requires_login()
def company():
company_form = sqlform(db.company).process()
grid = SQLFORM.grid(db.company, create=False, deletable=False, editable=False, maxtextlength=50, orderby=db.company.company_name)
return locals()
@auth.requires_login()
def employee():
employee_form = sqlform(db.auth_user).process()
grid = SQLFORM.grid(db.auth_user, create=False, fields=[db.auth_user.first_name, db.auth_user.last_name, db.auth_user.email], deletable=False, editable=False, maxtextlength=50)
return locals()
def tester():
return locals()
def user():
"""
exposes:
http://..../[app]/default/user/login
http://..../[app]/default/user/logout
http://..../[app]/default/user/register
http://..../[app]/default/user/profile
http://..../[app]/default/user/retrieve_password
http://..../[app]/default/user/change_password
http://..../[app]/default/user/manage_users (requires membership in
use @auth.requires_login()
@auth.requires_membership('group name')
@auth.requires_permission('read','table name',record_id)
to decorate functions that need access control
"""
return dict(form=auth())
@cache.action()
def download():
"""
allows downloading of uploaded files
http://..../[app]/default/download/[filename]
"""
return response.download(request, db)
def call():
"""
exposes services. for example:
http://..../[app]/default/call/jsonrpc
decorate with @services.jsonrpc the functions to expose
supports xml, json, xmlrpc, jsonrpc, amfrpc, rss, csv
"""
return service()
@auth.requires_signature()
def data():
"""
http://..../[app]/default/data/tables
http://..../[app]/default/data/create/[table]
http://..../[app]/default/data/read/[table]/[id]
http://..../[app]/default/data/update/[table]/[id]
http://..../[app]/default/data/delete/[table]/[id]
http://..../[app]/default/data/select/[table]
http://..../[app]/default/data/search/[table]
but URLs must be signed, i.e. linked with
A('table',_href=URL('data/tables',user_signature=True))
or with the signed load operator
LOAD('default','data.load',args='tables',ajax=True,user_signature=True)
"""
return dict(form=crud()) |
"""
rapidapi.livescore
~~~~~~~~~~~~~~~~~~
RapidAPI LiveScore API modules.
@author: z33k
""" | """
rapidapi.livescore
~~~~~~~~~~~~~~~~~~
RapidAPI LiveScore API modules.
@author: z33k
""" |
def solve(n):
notebook = {}
for _ in range(n):
student, grade = input().split(' ')
if student not in notebook:
notebook[student] = []
notebook[student] += [float(grade)]
for st, gr in notebook.items():
print(f"{st} ->", end=' ')
[print(f"{x:.2f}", end=' ') for x in gr]
print(f"(avg: {(sum(gr)/len(gr)):.2f})")
solve(int(input()))
| def solve(n):
notebook = {}
for _ in range(n):
(student, grade) = input().split(' ')
if student not in notebook:
notebook[student] = []
notebook[student] += [float(grade)]
for (st, gr) in notebook.items():
print(f'{st} ->', end=' ')
[print(f'{x:.2f}', end=' ') for x in gr]
print(f'(avg: {sum(gr) / len(gr):.2f})')
solve(int(input())) |
"""
If the function can access the variables in the global scope.
So why do we need arguments? Explain with example.
"""
def greet(name):
return f'Hello, {name} Good morning!'
name1 = "Raj Nath Patel"
name2 = "Raj Kumar"
return_value = greet(name1) # Call function with return value
print(return_value)
return_value = greet(name2) # Call function with return value
print(return_value)
# Case 1: We have two names that we want to print
# Case 2: We don't want to create any variable
# Example positional argument
def greet_msg(name, msg):
return f'Hello, {name} {msg}'
name1 = "Raj Nath Patel"
name2 = "RaJ Kumar"
return_value = greet_msg("Raj Kumar", "Good morning!") # Call function with return value
print(return_value)
return_value = greet_msg(name1, "Good morning!") # Call function with return value
print(return_value)
return_value = greet_msg(name2, "How are you?") # Call function with return value
print(return_value)
# Example with default argument
def greet_msg_with_default(name, msg="Good Morning"):
return f'Hello, {name} {msg}'
return_value = greet_msg_with_default("Raj Kumar") # Will use default value for "msg" argument
print(return_value)
return_value = greet_msg_with_default("Raj Kumar", "How are you?") # Call function with return value
print(return_value)
# Example with Keyword arguments (related to function call)
return_value = greet_msg_with_default(msg="How are you?", name="Raj Nath Patel") # Call function with return value
print(return_value)
return_value = greet_msg_with_default(name="Raj Kumar", msg="How are you?", ) # Call function with return value
print(return_value)
return_value = greet_msg_with_default("Raj Nath Patel", msg="How are you?", ) # This is allowed
print(return_value)
# return_value = greet_msg(msg="How are you?", "Raj Nath Patel") # Uncommenting this will throw an error
# print(return_value)
| """
If the function can access the variables in the global scope.
So why do we need arguments? Explain with example.
"""
def greet(name):
return f'Hello, {name} Good morning!'
name1 = 'Raj Nath Patel'
name2 = 'Raj Kumar'
return_value = greet(name1)
print(return_value)
return_value = greet(name2)
print(return_value)
def greet_msg(name, msg):
return f'Hello, {name} {msg}'
name1 = 'Raj Nath Patel'
name2 = 'RaJ Kumar'
return_value = greet_msg('Raj Kumar', 'Good morning!')
print(return_value)
return_value = greet_msg(name1, 'Good morning!')
print(return_value)
return_value = greet_msg(name2, 'How are you?')
print(return_value)
def greet_msg_with_default(name, msg='Good Morning'):
return f'Hello, {name} {msg}'
return_value = greet_msg_with_default('Raj Kumar')
print(return_value)
return_value = greet_msg_with_default('Raj Kumar', 'How are you?')
print(return_value)
return_value = greet_msg_with_default(msg='How are you?', name='Raj Nath Patel')
print(return_value)
return_value = greet_msg_with_default(name='Raj Kumar', msg='How are you?')
print(return_value)
return_value = greet_msg_with_default('Raj Nath Patel', msg='How are you?')
print(return_value) |
class Element():
def __init__(self, identifier, name, symbol):
self.identifier = identifier
self.name = name
self.symbol = symbol
self.metabolites = []
def add_compound(self, compound):
if compound not in self.metabolites:
self.metabolites.append(compound)
def __repr__(self):
return '<{0} {1!r}>'.format(self.__class__.__name__,
(self.identifier, self.name, self.symbol))
| class Element:
def __init__(self, identifier, name, symbol):
self.identifier = identifier
self.name = name
self.symbol = symbol
self.metabolites = []
def add_compound(self, compound):
if compound not in self.metabolites:
self.metabolites.append(compound)
def __repr__(self):
return '<{0} {1!r}>'.format(self.__class__.__name__, (self.identifier, self.name, self.symbol)) |
#Name Cases.
Personal_Name = "joey Tribionny"
print("Person's name in lower case: " + Personal_Name.lower())
print("Person's name in upper case: " + Personal_Name.upper())
print("Person's name in title case: " + Personal_Name.title())
| personal__name = 'joey Tribionny'
print("Person's name in lower case: " + Personal_Name.lower())
print("Person's name in upper case: " + Personal_Name.upper())
print("Person's name in title case: " + Personal_Name.title()) |
# 1) Function that takes a string as a paratmeter and returns true if str contains at least 3 g false otherwise.
# def threeg(stri):
# gsum = 0
# for letter in stri.upper():
# if letter == 'G':
# gsum += 1
# while gsum < 3:
# return False
# print(threeg('ggg')
def g_count(any_str):
gnum = 0 # need to count the g's
for char in any_str.upper(): # char itterates through each char of the string .upper() ensures that no matter which string the format will be recognized.
if char == 'G': # only if char is tha same as the string 'G'
gnum += 1 # gnum increases by one.
if gnum >= 3: # this if must be BEHIND the firs if, so it only starts after first loop is completed.
return True
else:
return False
print(g_count('attggg')) | def g_count(any_str):
gnum = 0
for char in any_str.upper():
if char == 'G':
gnum += 1
if gnum >= 3:
return True
else:
return False
print(g_count('attggg')) |
# import pytest
class TestFormatHandler:
def test_read(self): # synced
assert True
def test_write(self): # synced
assert True
def test_append(self): # synced
assert True
def test_read_help(self): # synced
assert True
def test_write_help(self): # synced
assert True
def test__ensure_format(self): # synced
assert True
def test_add_format(self): # synced
assert True
class TestFormatMeta:
def test___new__(self): # synced
assert True
class TestFormat:
def test_initialize(self): # synced
assert True
def test_read(self): # synced
assert True
def test_read_help(self): # synced
assert True
def test_write(self): # synced
assert True
def test_write_help(self): # synced
assert True
| class Testformathandler:
def test_read(self):
assert True
def test_write(self):
assert True
def test_append(self):
assert True
def test_read_help(self):
assert True
def test_write_help(self):
assert True
def test__ensure_format(self):
assert True
def test_add_format(self):
assert True
class Testformatmeta:
def test___new__(self):
assert True
class Testformat:
def test_initialize(self):
assert True
def test_read(self):
assert True
def test_read_help(self):
assert True
def test_write(self):
assert True
def test_write_help(self):
assert True |
num = []
soma = 0
for i in range(11):
num.append(int(input()))
n = len(num)
for i in num:
soma = soma + i
media = soma / n
print(media)
| num = []
soma = 0
for i in range(11):
num.append(int(input()))
n = len(num)
for i in num:
soma = soma + i
media = soma / n
print(media) |
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_tools//tools/build_defs/pkg:pkg.bzl", "pkg_tar")
def _pre_render_script_ops_impl(ctx):
output_filename = "{}.yaml".format(ctx.attr.name)
output_yaml = ctx.actions.declare_file(output_filename)
outputs = [output_yaml]
ctx.actions.run(
inputs = [ctx.file.script],
outputs = outputs,
progress_message = "Generating pre_render_script ops-file {}".format(output_filename),
executable = ctx.executable._generator,
env = {
"INSTANCE_GROUP": ctx.attr.instance_group,
"JOB": ctx.attr.job,
"PRE_RENDER_SCRIPT": ctx.file.script.path,
"OUTPUT": output_yaml.path,
"TYPE": ctx.attr.script_type,
},
)
return [DefaultInfo(files = depset(outputs))]
pre_render_script_ops = rule(
implementation = _pre_render_script_ops_impl,
attrs = {
"instance_group": attr.string(
mandatory = True,
),
"job": attr.string(
mandatory = True,
),
"script_type": attr.string(
mandatory = True,
),
"script": attr.label(
allow_single_file = True,
mandatory = True,
),
"_generator": attr.label(
allow_single_file = True,
cfg = "host",
default = "//bosh/releases/generators/pre_render_scripts:generator.sh",
executable = True,
),
},
)
def generate_pre_render_script_ops(name, srcs):
scripts = [_map_pre_render_script(src) for src in srcs]
for script in scripts:
pre_render_script_ops(
name = script.ops_file_target_name,
instance_group = script.instance_group,
job = script.job,
script = script.src_target,
script_type = script.script_type,
)
pkg_tar(
name = name,
package_dir = "assets/operations/pre_render_scripts",
srcs = [":{}".format(script.ops_file_target_name) for script in scripts],
)
def _map_pre_render_script(src):
script_type = paths.basename(paths.dirname(src))
job = paths.basename(paths.dirname(paths.dirname(src)))
instance_group = paths.basename(paths.dirname(paths.dirname(paths.dirname(src))))
src_target = ":{}".format(src)
src_basename = paths.basename(src)
return struct(
job = job,
instance_group = instance_group,
script_type = script_type,
src_target = src_target,
ops_file_target_name = "{}_{}_{}".format(instance_group, job, src_basename.replace(".", "_")),
)
| load('@bazel_skylib//lib:paths.bzl', 'paths')
load('@bazel_tools//tools/build_defs/pkg:pkg.bzl', 'pkg_tar')
def _pre_render_script_ops_impl(ctx):
output_filename = '{}.yaml'.format(ctx.attr.name)
output_yaml = ctx.actions.declare_file(output_filename)
outputs = [output_yaml]
ctx.actions.run(inputs=[ctx.file.script], outputs=outputs, progress_message='Generating pre_render_script ops-file {}'.format(output_filename), executable=ctx.executable._generator, env={'INSTANCE_GROUP': ctx.attr.instance_group, 'JOB': ctx.attr.job, 'PRE_RENDER_SCRIPT': ctx.file.script.path, 'OUTPUT': output_yaml.path, 'TYPE': ctx.attr.script_type})
return [default_info(files=depset(outputs))]
pre_render_script_ops = rule(implementation=_pre_render_script_ops_impl, attrs={'instance_group': attr.string(mandatory=True), 'job': attr.string(mandatory=True), 'script_type': attr.string(mandatory=True), 'script': attr.label(allow_single_file=True, mandatory=True), '_generator': attr.label(allow_single_file=True, cfg='host', default='//bosh/releases/generators/pre_render_scripts:generator.sh', executable=True)})
def generate_pre_render_script_ops(name, srcs):
scripts = [_map_pre_render_script(src) for src in srcs]
for script in scripts:
pre_render_script_ops(name=script.ops_file_target_name, instance_group=script.instance_group, job=script.job, script=script.src_target, script_type=script.script_type)
pkg_tar(name=name, package_dir='assets/operations/pre_render_scripts', srcs=[':{}'.format(script.ops_file_target_name) for script in scripts])
def _map_pre_render_script(src):
script_type = paths.basename(paths.dirname(src))
job = paths.basename(paths.dirname(paths.dirname(src)))
instance_group = paths.basename(paths.dirname(paths.dirname(paths.dirname(src))))
src_target = ':{}'.format(src)
src_basename = paths.basename(src)
return struct(job=job, instance_group=instance_group, script_type=script_type, src_target=src_target, ops_file_target_name='{}_{}_{}'.format(instance_group, job, src_basename.replace('.', '_'))) |
# # # # # a = 215
# # # # a = int(input("Input a"))
# # # a = 9000
# # # a = 3
# # #
# if True: # after : is the code block, must be indented
# print("True")
# print("This always runs because if statement is True")
# print("Still working in if block")
# # # if block has ended
# print("This runs no matter what because we are outside if ")
# # # # # after we go back to our normal indentation the if block is ended
# # # #
# a = 25
# if a > 10: # in Python when you see : next line will be indented
# # runs only when statement after if is True
# print("Do this when a is larger than 10")
# print(f"Still only runs when a > {a}")
# # we can keep doing things when a > 10 here
# # #
# # # # # here we have exited if
# print("This will always print no matter what")
# # # # # # # #
# # # # # # #
# # # # # # a = -333
# # # # # # a = 200
# a = 44
# a = 15
# if a > 10: # in Python when you see : next line will be indented
# # runs only when statement after if is True
# print("Again Do this when a is larger than 10")
# print("Indeed a is", a)
# else: # when a is <= 10
# print("Only when a is less or equal to 10")
# print("Indeed a is only", a)
# # we could do more stuff here when a is not larger than 10
# # # #
# # # # # # a = 10
a = 200
a = -95
a = 10
# # a = -355
# if we need more than 2 distinct paths
# if a > 10: # in Python when you see : next line will be indented
# # runs only when statement after if is True
# print("Again Do this when a is larger than 10", a)
# elif a < 10:
# print("ahh a is less than 10", a)
# else: # so a must be 10 no other choices you do not need to check, after all other choices are exhausted
# print("Only when a is equal to 10 since we checked other cases", a)
# # we could do more stuff here when a is not larger than 10
# # # # # # #
# print("Back to normal program flow which always runs no matter what a is")
# # # # # #
# # # # # #
# # # # # # #
# # # # without else both of these could run
# a = 20
# # # a = 7
# if a > 5:
# print("a is larger than 5")
# # the below if statement is not related to the if statement above
# if a > 10:
# print("a is larger than 10")
# # # # # #
# # # # # #
# # # # # #
# # # # # #
# # # # # #
# # # # # #
# # # # # #
# # # # # #
# # # # # # # if else elif
# # # # # a = 190
# a = int(input("give me an a! "))
# if a > 10:
# print("a is larger than 10")
# print("This will only happen when a > 10")
# if a >= 200: # so we can nest ifs inside another if
# print("a is truly big over or equal 200")
# else:
# print("a is more than 10 but no more than 199")
# elif a < 10:
# print("a is less than 10", a)
# else: # if a == 10
# print("a is equal to 10", a)
# # #
# # # # print("This will always happen no matter the a value")
# # # #
# # # # b = -8500
# # # # b = 6
# # # # b = 555
# # # # b = 9000
# # # # if b < 0:
# # # # print("Less than 0", b)
# # # # elif b < 10:
# # # # print("Less than 10 but more or equal to 0", b)
# # # # elif b < 9000:
# # # # pass # empty operation
# # # # # print("At least 10 but less than 9000", b)
# # # # else:
# # # # print("9000 or more!", b)
# # # # #
# # # # if b < 0:
# # # # print("Less than 0", b)
# # # #
# # # # if b < 10:
# # # # print("Less than 10", b)
# # # #
# # # # if b < 9000:
# # # # print("less than 9000", b)
# # # # else:
# # # # print("9000 or more!", b)
# # # # # #
# # # # c = None
# # # # c = 5
# # # # if c == None:
# # # # print("There is Nothing")
# # # # else:
# # # # print("There is something")
# # # # # #
# # # #
a = -100
if 2 < 3 < 8 < a:
print(f"2 < 3 < 8 < {a} is it a True statement? ", 2 < 3 < 8 < a)
else:
print(f"2 < 3 < 8 < {a} is it a True statement?", 2 < 3 < 8 < a) | a = 200
a = -95
a = 10
a = -100
if 2 < 3 < 8 < a:
print(f'2 < 3 < 8 < {a} is it a True statement? ', 2 < 3 < 8 < a)
else:
print(f'2 < 3 < 8 < {a} is it a True statement?', 2 < 3 < 8 < a) |
def test1():
l = []
for i in range(1000):
l = l + [i]
def test2():
l = []
for i in range(1000):
l.append(i)
def test3():
l = [i for i in range(1000)]
def test4():
l = list(range(1000))
| def test1():
l = []
for i in range(1000):
l = l + [i]
def test2():
l = []
for i in range(1000):
l.append(i)
def test3():
l = [i for i in range(1000)]
def test4():
l = list(range(1000)) |
print("Listing Primes")
prime_list = []
i = 0
while i < 10000:
if i % 1000 == 0:
print("Processed %d primes" % i)
i += 1
prime = True
for n in range(i):
if n != 0 and n!= 1 and n!= i:
if i % n == 0: prime = False
if prime == True:
prime_list.append(i)
for i in range(10):
count = 0
for n in prime_list:
if n == 0: continue
if n % 10 == i: count += 1
print("Number of primes ending in %d: %d" % (i, count))
| print('Listing Primes')
prime_list = []
i = 0
while i < 10000:
if i % 1000 == 0:
print('Processed %d primes' % i)
i += 1
prime = True
for n in range(i):
if n != 0 and n != 1 and (n != i):
if i % n == 0:
prime = False
if prime == True:
prime_list.append(i)
for i in range(10):
count = 0
for n in prime_list:
if n == 0:
continue
if n % 10 == i:
count += 1
print('Number of primes ending in %d: %d' % (i, count)) |
class Verb(object):
def __init__(self, verb, subject):
self.verb = verb
self.subject = subject
self.value = ""
def format(self, context):
subject = self.subject(context).value
if subject.player:
self.value = self.verb
else:
self.value = third_personify(self.verb)
return self
def __str__(self):
return self.value
mapping = {
"y": lambda verb: verb[:-1] + "ies",
"s": lambda verb: verb + "es",
"z": lambda verb: verb + "es",
"h": lambda verb: verb + "es",
"x": lambda verb: verb + "es",
"o": lambda verb: verb + "es",
}
direct_mapping = {
"are": "is"
}
def third_personify(verb):
direct = direct_mapping.get(verb)
if direct is not None:
return direct
last_letter = verb[-1]
result = mapping.get(last_letter, lambda v: v + "s")
return result(verb)
| class Verb(object):
def __init__(self, verb, subject):
self.verb = verb
self.subject = subject
self.value = ''
def format(self, context):
subject = self.subject(context).value
if subject.player:
self.value = self.verb
else:
self.value = third_personify(self.verb)
return self
def __str__(self):
return self.value
mapping = {'y': lambda verb: verb[:-1] + 'ies', 's': lambda verb: verb + 'es', 'z': lambda verb: verb + 'es', 'h': lambda verb: verb + 'es', 'x': lambda verb: verb + 'es', 'o': lambda verb: verb + 'es'}
direct_mapping = {'are': 'is'}
def third_personify(verb):
direct = direct_mapping.get(verb)
if direct is not None:
return direct
last_letter = verb[-1]
result = mapping.get(last_letter, lambda v: v + 's')
return result(verb) |
""" Module with functionalities for blocking based on a dictionary of records,
where a blocking function must return a dictionary with block identifiers
as keys and values being sets or lists of record identifiers in that block.
"""
# =============================================================================
def noBlocking(rec_dict):
"""A function which does no blocking but simply puts all records from the
given dictionary into one block.
Parameter Description:
rec_dict : Dictionary that holds the record identifiers as keys and
corresponding list of record values
"""
print("Run 'no' blocking:")
print(' Number of records to be blocked: '+str(len(rec_dict)))
print('')
rec_id_list = list(rec_dict.keys())
block_dict = {'all_rec':rec_id_list}
return block_dict
# -----------------------------------------------------------------------------
def simpleBlocking(rec_dict, blk_attr_list):
"""Build the blocking index data structure (dictionary) to store blocking
key values (BKV) as keys and the corresponding list of record identifiers.
A blocking is implemented that simply concatenates attribute values.
Parameter Description:
rec_dict : Dictionary that holds the record identifiers as keys
and corresponding list of record values
blk_attr_list : List of blocking key attributes to use
This method returns a dictionary with blocking key values as its keys and
list of record identifiers as its values (one list for each block).
Examples:
If the blocking is based on 'postcode' then:
block_dict = {'2000': [rec1_id, rec2_id, rec3_id, ...],
'2600': [rec4_id, rec5_id, ...],
...
}
while if the blocking is based on 'postcode' and 'gender' then:
block_dict = {'2000f': [rec1_id, rec3_id, ...],
'2000m': [rec2_id, ...],
'2600f': [rec5_id, ...],
'2600m': [rec4_id, ...],
...
}
"""
block_dict = {} # The dictionary with blocks to be generated and returned
print('Run simple blocking:')
print(' List of blocking key attributes: '+str(blk_attr_list))
print(' Number of records to be blocked: '+str(len(rec_dict)))
print('')
for (rec_id, rec_values) in rec_dict.items():
rec_bkv = '' # Initialise the blocking key value for this record
# Process selected blocking attributes
#
for attr in blk_attr_list:
attr_val = rec_values[attr]
rec_bkv += attr_val
# Insert the blocking key value and record into blocking dictionary
#
if (rec_bkv in block_dict): # Block key value in block index
# Only need to add the record
#
rec_id_list = block_dict[rec_bkv]
rec_id_list.append(rec_id)
else: # Block key value not in block index
# Create a new block and add the record identifier
#
rec_id_list = [rec_id]
block_dict[rec_bkv] = rec_id_list # Store the new block
return block_dict
# -----------------------------------------------------------------------------
def phoneticBlocking(rec_dict, blk_attr_list):
"""Build the blocking index data structure (dictionary) to store blocking
key values (BKV) as keys and the corresponding list of record identifiers.
A blocking is implemented that concatenates Soundex encoded values of
attribute values.
Parameter Description:
rec_dict : Dictionary that holds the record identifiers as keys
and corresponding list of record values
blk_attr_list : List of blocking key attributes to use
This method returns a dictionary with blocking key values as its keys and
list of record identifiers as its values (one list for each block).
"""
block_dict = {} # The dictionary with blocks to be generated and returned
print('Run phonetic blocking:')
print(' List of blocking key attributes: '+str(blk_attr_list))
print(' Number of records to be blocked: '+str(len(rec_dict)))
print('')
for (rec_id, rec_values) in rec_dict.items():
rec_bkv = '' # Initialise the blocking key value for this record
# Process selected blocking attributes
#
for attr in blk_attr_list:
attr_val = rec_values[attr]
if (attr_val == ''):
rec_bkv += 'z000' # Often used as Soundex code for empty values
else: # Convert the value into its Soundex code
attr_val = attr_val.lower()
sndx_val = attr_val[0] # Keep first letter
for c in attr_val[1:]: # Loop over all other letters
if (c in 'aehiouwy'): # Not inlcuded into Soundex code
pass
elif (c in 'bfpv'):
if (sndx_val[-1] != '1'): # Don't add duplicates of digits
sndx_val += '1'
elif (c in 'cgjkqsxz'):
if (sndx_val[-1] != '2'): # Don't add duplicates of digits
sndx_val += '2'
elif (c in 'dt'):
if (sndx_val[-1] != '3'): # Don't add duplicates of digits
sndx_val += '3'
elif (c in 'l'):
if (sndx_val[-1] != '4'): # Don't add duplicates of digits
sndx_val += '4'
elif (c in 'mn'):
if (sndx_val[-1] != '5'): # Don't add duplicates of digits
sndx_val += '5'
elif (c in 'r'):
if (sndx_val[-1] != '6'): # Don't add duplicates of digits
sndx_val += '6'
if (len(sndx_val) < 4):
sndx_val += '000' # Ensure enough digits
sndx_val = sndx_val[:4] # Maximum length is 4
rec_bkv += sndx_val
# Insert the blocking key value and record into blocking dictionary
#
if (rec_bkv in block_dict): # Block key value in block index
# Only need to add the record
#
rec_id_list = block_dict[rec_bkv]
rec_id_list.append(rec_id)
else: # Block key value not in block index
# Create a new block and add the record identifier
#
rec_id_list = [rec_id]
block_dict[rec_bkv] = rec_id_list # Store the new block
return block_dict
# -----------------------------------------------------------------------------
def slkBlocking(rec_dict, fam_name_attr_ind, giv_name_attr_ind,
dob_attr_ind, gender_attr_ind):
"""Build the blocking index data structure (dictionary) to store blocking
key values (BKV) as keys and the corresponding list of record identifiers.
This function should implement the statistical linkage key (SLK-581)
blocking approach as used in real-world linkage applications:
http://www.aihw.gov.au/WorkArea/DownloadAsset.aspx?id=60129551915
A SLK-581 blocking key is the based on the concatenation of:
- 3 letters of family name
- 2 letters of given name
- Date of birth
- Sex
Parameter Description:
rec_dict : Dictionary that holds the record identifiers as
keys and corresponding list of record values
fam_name_attr_ind : The number (index) of the attribute that contains
family name (last name)
giv_name_attr_ind : The number (index) of the attribute that contains
given name (first name)
dob_attr_ind : The number (index) of the attribute that contains
date of birth
gender_attr_ind : The number (index) of the attribute that contains
gender (sex)
This method returns a dictionary with blocking key values as its keys and
list of record identifiers as its values (one list for each block).
"""
block_dict = {} # The dictionary with blocks to be generated and returned
print('Run SLK-581 blocking:')
print(' Number of records to be blocked: '+str(len(rec_dict)))
print('')
for (rec_id, rec_values) in rec_dict.items():
rec_bkv = '' # Initialise the blocking key value for this record
# Get family name value
#
fam_name = rec_values[fam_name_attr_ind]
if (fam_name == ''):
rec_bkv += '999'
else:
fam_nam = fam_name.replace('-','') # Remove non letter characters
fam_nam = fam_name.replace(",",'')
fam_nam = fam_name.replace('_','')
if (len(fam_name) >= 5):
rec_bkv += (fam_name[1]+fam_name[2]+fam_name[4])
elif (len(fam_name) >= 3):
rec_bkv += (fam_name[1]+fam_name[2]+'2')
elif (len(fam_name) >= 2):
rec_bkv += (fam_name[1]+'22')
# Get given name value
#
giv_name = rec_values[giv_name_attr_ind]
if (giv_name == ''):
rec_bkv += '99'
else:
giv_nam = giv_name.replace('-','') # Remove non letter characters
giv_nam = giv_name.replace(",",'')
giv_nam = giv_name.replace('_','')
if (len(giv_name) >= 3):
rec_bkv += (giv_name[1]+giv_name[2])
elif (len(giv_name) >= 2):
rec_bkv += (giv_name[1]+'2')
# DoB structure we use: dd/mm/yyyy
# Get date of birth
#
dob = rec_values[dob_attr_ind]
dob_list = rec_values[dob_attr_ind].split('/')
# Add some checks
#
if (len(dob_list[0]) < 2):
dob_list[0] = '0' + dob_list[0] # Add leading zero for days < 10
if (len(dob_list[1]) < 2):
dob_list[1] = '0' + dob_list[1] # Add leading zero for months < 10
dob = ''.join(dob_list) # Create: ddmmyyyy
assert len(dob) == 8, dob
rec_bkv += dob
# Get gender
#
gender = rec_values[gender_attr_ind].lower()
if (gender == 'm'):
rec_bkv += '1'
elif (gender == 'f'):
rec_bkv += '2'
else:
rec_bkv += '9'
# Insert the blocking key value and record into blocking dictionary
#
if (rec_bkv in block_dict): # Block key value in block index
# Only need to add the record
#
rec_id_list = block_dict[rec_bkv]
rec_id_list.append(rec_id)
else: # Block key value not in block index
# Create a new block and add the record identifier
#
rec_id_list = [rec_id]
block_dict[rec_bkv] = rec_id_list # Store the new block
return block_dict
# -----------------------------------------------------------------------------
# Extra task: Implement canopy clustering based blocking as described in
# the Data Matching book
# -----------------------------------------------------------------------------
def printBlockStatistics(blockA_dict, blockB_dict):
"""Calculate and print some basic statistics about the generated blocks
"""
print('Statistics of the generated blocks:')
numA_blocks = len(blockA_dict)
numB_blocks = len(blockB_dict)
block_sizeA_list = []
for rec_id_list in blockA_dict.values(): # Loop over all blocks
block_sizeA_list.append(len(rec_id_list))
block_sizeB_list = []
for rec_id_list in blockB_dict.values(): # Loop over all blocks
block_sizeB_list.append(len(rec_id_list))
print('Dataset A number of blocks generated: %d' % (numA_blocks))
print(' Minimum block size: %d' % (min(block_sizeA_list)))
print(' Average block size: %.2f' % \
(float(sum(block_sizeA_list)) / len(block_sizeA_list)))
print(' Maximum block size: %d' % (max(block_sizeA_list)))
print('')
print('Dataset B number of blocks generated: %d' % (numB_blocks))
print(' Minimum block size: %d' % (min(block_sizeB_list)))
print(' Average block size: %.2f' % \
(float(sum(block_sizeB_list)) / len(block_sizeB_list)))
print(' Maximum block size: %d' % (max(block_sizeB_list)))
print('')
# -----------------------------------------------------------------------------
# End of program.
| """ Module with functionalities for blocking based on a dictionary of records,
where a blocking function must return a dictionary with block identifiers
as keys and values being sets or lists of record identifiers in that block.
"""
def no_blocking(rec_dict):
"""A function which does no blocking but simply puts all records from the
given dictionary into one block.
Parameter Description:
rec_dict : Dictionary that holds the record identifiers as keys and
corresponding list of record values
"""
print("Run 'no' blocking:")
print(' Number of records to be blocked: ' + str(len(rec_dict)))
print('')
rec_id_list = list(rec_dict.keys())
block_dict = {'all_rec': rec_id_list}
return block_dict
def simple_blocking(rec_dict, blk_attr_list):
"""Build the blocking index data structure (dictionary) to store blocking
key values (BKV) as keys and the corresponding list of record identifiers.
A blocking is implemented that simply concatenates attribute values.
Parameter Description:
rec_dict : Dictionary that holds the record identifiers as keys
and corresponding list of record values
blk_attr_list : List of blocking key attributes to use
This method returns a dictionary with blocking key values as its keys and
list of record identifiers as its values (one list for each block).
Examples:
If the blocking is based on 'postcode' then:
block_dict = {'2000': [rec1_id, rec2_id, rec3_id, ...],
'2600': [rec4_id, rec5_id, ...],
...
}
while if the blocking is based on 'postcode' and 'gender' then:
block_dict = {'2000f': [rec1_id, rec3_id, ...],
'2000m': [rec2_id, ...],
'2600f': [rec5_id, ...],
'2600m': [rec4_id, ...],
...
}
"""
block_dict = {}
print('Run simple blocking:')
print(' List of blocking key attributes: ' + str(blk_attr_list))
print(' Number of records to be blocked: ' + str(len(rec_dict)))
print('')
for (rec_id, rec_values) in rec_dict.items():
rec_bkv = ''
for attr in blk_attr_list:
attr_val = rec_values[attr]
rec_bkv += attr_val
if rec_bkv in block_dict:
rec_id_list = block_dict[rec_bkv]
rec_id_list.append(rec_id)
else:
rec_id_list = [rec_id]
block_dict[rec_bkv] = rec_id_list
return block_dict
def phonetic_blocking(rec_dict, blk_attr_list):
"""Build the blocking index data structure (dictionary) to store blocking
key values (BKV) as keys and the corresponding list of record identifiers.
A blocking is implemented that concatenates Soundex encoded values of
attribute values.
Parameter Description:
rec_dict : Dictionary that holds the record identifiers as keys
and corresponding list of record values
blk_attr_list : List of blocking key attributes to use
This method returns a dictionary with blocking key values as its keys and
list of record identifiers as its values (one list for each block).
"""
block_dict = {}
print('Run phonetic blocking:')
print(' List of blocking key attributes: ' + str(blk_attr_list))
print(' Number of records to be blocked: ' + str(len(rec_dict)))
print('')
for (rec_id, rec_values) in rec_dict.items():
rec_bkv = ''
for attr in blk_attr_list:
attr_val = rec_values[attr]
if attr_val == '':
rec_bkv += 'z000'
else:
attr_val = attr_val.lower()
sndx_val = attr_val[0]
for c in attr_val[1:]:
if c in 'aehiouwy':
pass
elif c in 'bfpv':
if sndx_val[-1] != '1':
sndx_val += '1'
elif c in 'cgjkqsxz':
if sndx_val[-1] != '2':
sndx_val += '2'
elif c in 'dt':
if sndx_val[-1] != '3':
sndx_val += '3'
elif c in 'l':
if sndx_val[-1] != '4':
sndx_val += '4'
elif c in 'mn':
if sndx_val[-1] != '5':
sndx_val += '5'
elif c in 'r':
if sndx_val[-1] != '6':
sndx_val += '6'
if len(sndx_val) < 4:
sndx_val += '000'
sndx_val = sndx_val[:4]
rec_bkv += sndx_val
if rec_bkv in block_dict:
rec_id_list = block_dict[rec_bkv]
rec_id_list.append(rec_id)
else:
rec_id_list = [rec_id]
block_dict[rec_bkv] = rec_id_list
return block_dict
def slk_blocking(rec_dict, fam_name_attr_ind, giv_name_attr_ind, dob_attr_ind, gender_attr_ind):
"""Build the blocking index data structure (dictionary) to store blocking
key values (BKV) as keys and the corresponding list of record identifiers.
This function should implement the statistical linkage key (SLK-581)
blocking approach as used in real-world linkage applications:
http://www.aihw.gov.au/WorkArea/DownloadAsset.aspx?id=60129551915
A SLK-581 blocking key is the based on the concatenation of:
- 3 letters of family name
- 2 letters of given name
- Date of birth
- Sex
Parameter Description:
rec_dict : Dictionary that holds the record identifiers as
keys and corresponding list of record values
fam_name_attr_ind : The number (index) of the attribute that contains
family name (last name)
giv_name_attr_ind : The number (index) of the attribute that contains
given name (first name)
dob_attr_ind : The number (index) of the attribute that contains
date of birth
gender_attr_ind : The number (index) of the attribute that contains
gender (sex)
This method returns a dictionary with blocking key values as its keys and
list of record identifiers as its values (one list for each block).
"""
block_dict = {}
print('Run SLK-581 blocking:')
print(' Number of records to be blocked: ' + str(len(rec_dict)))
print('')
for (rec_id, rec_values) in rec_dict.items():
rec_bkv = ''
fam_name = rec_values[fam_name_attr_ind]
if fam_name == '':
rec_bkv += '999'
else:
fam_nam = fam_name.replace('-', '')
fam_nam = fam_name.replace(',', '')
fam_nam = fam_name.replace('_', '')
if len(fam_name) >= 5:
rec_bkv += fam_name[1] + fam_name[2] + fam_name[4]
elif len(fam_name) >= 3:
rec_bkv += fam_name[1] + fam_name[2] + '2'
elif len(fam_name) >= 2:
rec_bkv += fam_name[1] + '22'
giv_name = rec_values[giv_name_attr_ind]
if giv_name == '':
rec_bkv += '99'
else:
giv_nam = giv_name.replace('-', '')
giv_nam = giv_name.replace(',', '')
giv_nam = giv_name.replace('_', '')
if len(giv_name) >= 3:
rec_bkv += giv_name[1] + giv_name[2]
elif len(giv_name) >= 2:
rec_bkv += giv_name[1] + '2'
dob = rec_values[dob_attr_ind]
dob_list = rec_values[dob_attr_ind].split('/')
if len(dob_list[0]) < 2:
dob_list[0] = '0' + dob_list[0]
if len(dob_list[1]) < 2:
dob_list[1] = '0' + dob_list[1]
dob = ''.join(dob_list)
assert len(dob) == 8, dob
rec_bkv += dob
gender = rec_values[gender_attr_ind].lower()
if gender == 'm':
rec_bkv += '1'
elif gender == 'f':
rec_bkv += '2'
else:
rec_bkv += '9'
if rec_bkv in block_dict:
rec_id_list = block_dict[rec_bkv]
rec_id_list.append(rec_id)
else:
rec_id_list = [rec_id]
block_dict[rec_bkv] = rec_id_list
return block_dict
def print_block_statistics(blockA_dict, blockB_dict):
"""Calculate and print some basic statistics about the generated blocks
"""
print('Statistics of the generated blocks:')
num_a_blocks = len(blockA_dict)
num_b_blocks = len(blockB_dict)
block_size_a_list = []
for rec_id_list in blockA_dict.values():
block_sizeA_list.append(len(rec_id_list))
block_size_b_list = []
for rec_id_list in blockB_dict.values():
block_sizeB_list.append(len(rec_id_list))
print('Dataset A number of blocks generated: %d' % numA_blocks)
print(' Minimum block size: %d' % min(block_sizeA_list))
print(' Average block size: %.2f' % (float(sum(block_sizeA_list)) / len(block_sizeA_list)))
print(' Maximum block size: %d' % max(block_sizeA_list))
print('')
print('Dataset B number of blocks generated: %d' % numB_blocks)
print(' Minimum block size: %d' % min(block_sizeB_list))
print(' Average block size: %.2f' % (float(sum(block_sizeB_list)) / len(block_sizeB_list)))
print(' Maximum block size: %d' % max(block_sizeB_list))
print('') |
def is_valid(row, col, size):
return 0 <= row < size and 0 <= col < size
# returns True or False
def explode(row, col, size, matrix_in):
bomb = matrix[row][col]
for r in range(row - 1, row + 2):
for c in range(col - 1, col + 2):
if is_valid(r, c, size) and matrix_in[r][c] > 0:
matrix[r][c] -= bomb
n = int(input())
matrix = []
for _ in range(n):
matrix.append([int(x) for x in input().split()])
bomb_numbers = input().split()
for bomb in bomb_numbers:
tokens = [int(x) for x in bomb.split(',')]
bomb_row = tokens[0]
bomb_col = tokens[1]
if matrix[bomb_row][bomb_col] > 0:
explode(bomb_row, bomb_col, n, matrix)
matrix[bomb_row][bomb_col] = 0
alive_count = 0
alive_sum = 0
for row in range(n):
for col in range(n):
number = matrix[row][col]
if number > 0:
alive_count += 1
alive_sum += number
print(f'Alive cells: {alive_count}')
print(f'Sum: {alive_sum}')
for row in matrix:
print(' '.join([str(x) for x in row]))
| def is_valid(row, col, size):
return 0 <= row < size and 0 <= col < size
def explode(row, col, size, matrix_in):
bomb = matrix[row][col]
for r in range(row - 1, row + 2):
for c in range(col - 1, col + 2):
if is_valid(r, c, size) and matrix_in[r][c] > 0:
matrix[r][c] -= bomb
n = int(input())
matrix = []
for _ in range(n):
matrix.append([int(x) for x in input().split()])
bomb_numbers = input().split()
for bomb in bomb_numbers:
tokens = [int(x) for x in bomb.split(',')]
bomb_row = tokens[0]
bomb_col = tokens[1]
if matrix[bomb_row][bomb_col] > 0:
explode(bomb_row, bomb_col, n, matrix)
matrix[bomb_row][bomb_col] = 0
alive_count = 0
alive_sum = 0
for row in range(n):
for col in range(n):
number = matrix[row][col]
if number > 0:
alive_count += 1
alive_sum += number
print(f'Alive cells: {alive_count}')
print(f'Sum: {alive_sum}')
for row in matrix:
print(' '.join([str(x) for x in row])) |
# -*- coding: utf-8 -*-
""" Train models module. """
#from modules.models.pytorch.alex_net import AlexNet
#__all__ = ['AlexNet']
| """ Train models module. """ |
test_cases = int(input())
for i in range(test_cases):
text = input()
new_text = ''
for l in text:
if l.isalpha():
new_text += chr(ord(l) + 3)
else:
new_text += l
new_text = new_text[::-1]
half = int((len(new_text) / 2))
first_part = new_text[0:half]
second_part = new_text[half:]
third_part = ''
for l in second_part:
third_part += chr(ord(l) - 1)
encrypted_text = first_part + third_part
print(encrypted_text)
| test_cases = int(input())
for i in range(test_cases):
text = input()
new_text = ''
for l in text:
if l.isalpha():
new_text += chr(ord(l) + 3)
else:
new_text += l
new_text = new_text[::-1]
half = int(len(new_text) / 2)
first_part = new_text[0:half]
second_part = new_text[half:]
third_part = ''
for l in second_part:
third_part += chr(ord(l) - 1)
encrypted_text = first_part + third_part
print(encrypted_text) |
# Ported from python 3.7 contextlib.py
class nullcontext(object):
"""Context manager that does no additional processing.
Used as a stand-in for a normal context manager, when a particular
block of code is only sometimes used with a normal context manager:
cm = optional_cm if condition else nullcontext()
with cm:
# Perform operation, using optional_cm if condition is True
"""
def __init__(self, enter_result=None):
self.enter_result = enter_result
def __enter__(self):
return self.enter_result
def __exit__(self, *excinfo):
pass
| class Nullcontext(object):
"""Context manager that does no additional processing.
Used as a stand-in for a normal context manager, when a particular
block of code is only sometimes used with a normal context manager:
cm = optional_cm if condition else nullcontext()
with cm:
# Perform operation, using optional_cm if condition is True
"""
def __init__(self, enter_result=None):
self.enter_result = enter_result
def __enter__(self):
return self.enter_result
def __exit__(self, *excinfo):
pass |
name = "signalfx-azure-function-python"
version = "1.0.1"
user_agent = f"signalfx_azure_function/{version}"
| name = 'signalfx-azure-function-python'
version = '1.0.1'
user_agent = f'signalfx_azure_function/{version}' |
# Implement the singleton pattern with a twist. First, instead of storing one
# instance, store two instances. And in every even call of getInstance(), return
# the first instance and in every odd call of getInstance(), return the second
# instance.
class Singleton(type):
_instance = []
odd = True
def __call__(cls, *args, **kwargs):
if len(cls._instance) < 2:
instance = super(Singleton, cls).__call__(*args, **kwargs)
cls._instance.append(instance)
cls.odd = True if not cls.odd else False
return cls._instance[cls.odd]
class Twist(metaclass=Singleton):
def __init__(self, name):
self.name = name
if __name__ == '__main__':
twists = [Twist(i) for i in range(5)]
for t in twists:
print(t.name)
| class Singleton(type):
_instance = []
odd = True
def __call__(cls, *args, **kwargs):
if len(cls._instance) < 2:
instance = super(Singleton, cls).__call__(*args, **kwargs)
cls._instance.append(instance)
cls.odd = True if not cls.odd else False
return cls._instance[cls.odd]
class Twist(metaclass=Singleton):
def __init__(self, name):
self.name = name
if __name__ == '__main__':
twists = [twist(i) for i in range(5)]
for t in twists:
print(t.name) |
# Messages issued by the bot to the user
REMOVAL_MESSAGE = """
The message by {username} was deleted as it violated the channel's moral guidelines.
Multiple such violations may lead to a temporary or even a permanent ban
"""
PERSONAL_MESSAGE_AFTER_REMOVAL = """
We deleted your message because it was found to be toxic. Please refrain from using such messages in the channels. \
Repeated violations will result in stricter actions.
"""
INFO_MESSAGE = """
Hi {username}, this is Mr Toxic Bot. \
I'm responsible for ensuring a friendly environment that promotes social well being in the channel. \
Any message that I deem toxic, insulting, threatening etc will be removed and the author will be given a warning. \
"""
HELP_MESSAGE = """
Hi {username}, this is Mr Toxic Bot. \n
Here's a list of commands you can use to chat with me: \n
- /report - With this command you'll receive the link of the GitHub page of the project to report bugs and issues \n
- /info - You'll receive some info about me :D \n
- /help - You'll receive the list of commands
"""
WELCOME_MESSAGE = """
Hi {0.mention}, Welcome to {1.name} Server.
"""
WELCOME_DM_MESSAGE = """
Hi {0.mention}, Welcome to {1.name} Server.
Use /help to see various commands you can use to chat with me
"""
REPORT_MESSAGE = """
Need something wrong? Here's a link to report that!
"""
ONLY_PRIVATE_DMS = """
Hey {user}, the command can only be invoked by a private DM to ToxicBot.
"""
ADMIN_MESSAGE_AFTER_BOT_JOIN = """
Howdy, I am Mr. Toxic Bot. I am responsible for ensuring that the server is family friendly. \
Any obscene or toxic message sent to the server will be immediately deleted and the author will be warned. \
Here's some configuration that is applied to the server:\n
- Toxic Count before suspending an user : 20\n
- Number of days before previous toxic count history is erased for an user : 14 days
The above configurations can be modified by the server administrator.
"""
REQUISITE_PERMISSION = """
Sorry {user}, you do not have the requisite priviledges to execute the above command.
"""
NOT_BOT_OWNER = """
Sorry {user}, only the owner of the bot can run the above command.
"""
ADMIN_REQUEST_SERVER_ID = """
It seems that you are part of multiple servers. Please select which server you want to get the information about.
"""
ADMIN_CONFIG = """
Here are the current configurations for the server {guild}:\n
- Toxic Count before suspending an user : {count}\n
- Number of days before previous toxic count history is erased for an user : {time} days
"""
REQUIRE_NUMERICAL_VALUE = """
{entity} must be a numerical value
"""
SUCCESSFUL_UPDATE = """
{entity} updated for server {server}
"""
ADMIN_HELP_MESSAGE = """
Hi {username}, this is Mr Toxic Bot. \n
Here's a list of commands you can use to chat with me: \n
- /report - With this command you'll receive the link of the GitHub page of the project to report bugs and issues \n
- /info - You'll receive some info about me :D \n
- /help - You'll receive the list of commands \n
Here's a list of admin commands which only server owners can use: \n
- /config - Current configurations of toxic bot \n
- /setcount 10 - Set the toxic comment count before suspending an user ( 10 is just an arbitrary number ) \n
- /setdays 15 - Set the number of days before resetting toxic count for an user ( 15 is just an arbitrary number ) \n
- /toptoxic 5 - Get the top toxic comments by users for a server ( 5 is just an arbitrary number )
"""
BAD_ARGUMENT = """
Improper arguments passed.
"""
REQUEST_TIMEOUT = """
Oopsies. Looks like the request timed out. Please try again
"""
| removal_message = "\nThe message by {username} was deleted as it violated the channel's moral guidelines.\nMultiple such violations may lead to a temporary or even a permanent ban\n"
personal_message_after_removal = '\nWe deleted your message because it was found to be toxic. Please refrain from using such messages in the channels. Repeated violations will result in stricter actions.\n'
info_message = "\nHi {username}, this is Mr Toxic Bot. I'm responsible for ensuring a friendly environment that promotes social well being in the channel. Any message that I deem toxic, insulting, threatening etc will be removed and the author will be given a warning. "
help_message = "\nHi {username}, this is Mr Toxic Bot. \n\nHere's a list of commands you can use to chat with me: \n\n- /report - With this command you'll receive the link of the GitHub page of the project to report bugs and issues \n\n- /info - You'll receive some info about me :D \n\n- /help - You'll receive the list of commands\n"
welcome_message = '\nHi {0.mention}, Welcome to {1.name} Server.\n'
welcome_dm_message = '\nHi {0.mention}, Welcome to {1.name} Server.\nUse /help to see various commands you can use to chat with me\n'
report_message = "\nNeed something wrong? Here's a link to report that!\n"
only_private_dms = '\nHey {user}, the command can only be invoked by a private DM to ToxicBot.\n'
admin_message_after_bot_join = "\nHowdy, I am Mr. Toxic Bot. I am responsible for ensuring that the server is family friendly. Any obscene or toxic message sent to the server will be immediately deleted and the author will be warned. Here's some configuration that is applied to the server:\n\n- Toxic Count before suspending an user : 20\n\n- Number of days before previous toxic count history is erased for an user : 14 days\n\nThe above configurations can be modified by the server administrator.\n"
requisite_permission = '\nSorry {user}, you do not have the requisite priviledges to execute the above command.\n'
not_bot_owner = '\nSorry {user}, only the owner of the bot can run the above command.\n'
admin_request_server_id = '\nIt seems that you are part of multiple servers. Please select which server you want to get the information about.\n'
admin_config = '\nHere are the current configurations for the server {guild}:\n\n- Toxic Count before suspending an user : {count}\n\n- Number of days before previous toxic count history is erased for an user : {time} days\n'
require_numerical_value = '\n{entity} must be a numerical value\n'
successful_update = '\n{entity} updated for server {server}\n'
admin_help_message = "\nHi {username}, this is Mr Toxic Bot. \n\nHere's a list of commands you can use to chat with me: \n\n- /report - With this command you'll receive the link of the GitHub page of the project to report bugs and issues \n\n- /info - You'll receive some info about me :D \n\n- /help - You'll receive the list of commands \n\nHere's a list of admin commands which only server owners can use: \n\n- /config - Current configurations of toxic bot \n\n- /setcount 10 - Set the toxic comment count before suspending an user ( 10 is just an arbitrary number ) \n\n- /setdays 15 - Set the number of days before resetting toxic count for an user ( 15 is just an arbitrary number ) \n\n- /toptoxic 5 - Get the top toxic comments by users for a server ( 5 is just an arbitrary number )\n"
bad_argument = '\nImproper arguments passed.\n'
request_timeout = '\nOopsies. Looks like the request timed out. Please try again\n' |
def _get_ratio(ratio):
if isinstance(ratio, str):
if ':' in ratio:
r_w, r_h = ratio.split(':')
try:
ratio = float(r_w) / float(r_h)
except:
raise
if not isinstance(ratio, float):
ratio = float(ratio)
return ratio
def centerratio(size, center, ratio=1.0):
def _calc_max_size(size, center):
w, h = size
x_l, y_t = center
x_r, y_b = w-x_l, h-y_t
return min(x_l, x_r)*2, min(y_t, y_b)*2
ratio = _get_ratio(ratio)
width_max, height_max = _calc_max_size(size, center)
width_ratio = round(height_max * ratio)
width = min(width_max, width_ratio)
height = round(width / ratio)
return center[0] - round(width/2), \
center[1] - round(height/2), \
width, height
def expandratio(size, ratio=1.0):
ratio = _get_ratio(ratio)
w, h = size
w_r = round(h * ratio)
if w > w_r:
h_r = round(w / ratio)
return w, h_r, 0, int((h_r-h)/2)
elif w < w_r:
return w_r, h, int((w_r-w)/2), 0
else:
return w, h, 0, 0
| def _get_ratio(ratio):
if isinstance(ratio, str):
if ':' in ratio:
(r_w, r_h) = ratio.split(':')
try:
ratio = float(r_w) / float(r_h)
except:
raise
if not isinstance(ratio, float):
ratio = float(ratio)
return ratio
def centerratio(size, center, ratio=1.0):
def _calc_max_size(size, center):
(w, h) = size
(x_l, y_t) = center
(x_r, y_b) = (w - x_l, h - y_t)
return (min(x_l, x_r) * 2, min(y_t, y_b) * 2)
ratio = _get_ratio(ratio)
(width_max, height_max) = _calc_max_size(size, center)
width_ratio = round(height_max * ratio)
width = min(width_max, width_ratio)
height = round(width / ratio)
return (center[0] - round(width / 2), center[1] - round(height / 2), width, height)
def expandratio(size, ratio=1.0):
ratio = _get_ratio(ratio)
(w, h) = size
w_r = round(h * ratio)
if w > w_r:
h_r = round(w / ratio)
return (w, h_r, 0, int((h_r - h) / 2))
elif w < w_r:
return (w_r, h, int((w_r - w) / 2), 0)
else:
return (w, h, 0, 0) |
# Copyright (c) 2015, Sofiat Olaosebikan. All Rights Reserved
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class HopcroftKarp(object):
def __init__(self, graph):
"""
:param graph: an unweighted bipartite graph represented as a dictionary.
Vertices in the left and right vertex set must have different labelling
:return: a maximum matching of the given graph represented as a dictionary.
"""
self._matching = {}
self._dfs_paths = []
self._dfs_parent = {}
self._left = set(graph.keys())
self._right = set()
for value in graph.values():
self._right.update(value)
for vertex in self._left:
for neighbour in graph[vertex]:
if neighbour not in graph:
graph[neighbour] = set()
graph[neighbour].add(vertex)
else:
graph[neighbour].add(vertex)
self._graph = graph
def __bfs(self):
layers = []
layer = set()
for vertex in self._left: # for each free vertex in the left vertex set
if vertex not in self._matching: # confirms that the vertex is free
layer.add(vertex)
layers.append(layer)
visited = set() # to keep track of the visited vertices
while True:
# we take the most recent layer in the partitioning on every repeat
layer = layers[-1]
new_layer = set() # new list for subsequent layers
for vertex in layer:
if vertex in self._left: # if true, we traverse unmatched edges to vertices in right
visited.add(vertex)
for neighbour in self._graph[vertex]:
# check if the neighbour is not already visited
# check if vertex is matched or the edge between neighbour and vertex is not matched
if neighbour not in visited and (vertex not in self._matching or neighbour != self._matching[vertex]):
new_layer.add(neighbour)
else: # we traverse matched edges to vertices in left
visited.add(vertex) # we don't want to traverse the vertex again
for neighbour in self._graph[vertex]:
# check if the neighbour is not already visited
# check if vertex is in the matching and if the edge between vertex and neighbour is matched
if neighbour not in visited and (vertex in self._matching and neighbour == self._matching[vertex]):
new_layer.add(neighbour)
layers.append(new_layer) # we add the new layer to the set of layers
# if new_layer is empty, we have to break the BFS while loop....
if len(new_layer) == 0:
return layers # break
# else, we terminate search at the first layer k where one or more free vertices in V are reached
if any(vertex in self._right and vertex not in self._matching for vertex in new_layer):
return layers # break
# break
# --------------------------------------------------------------------------------------------------------------
# if we are able to collate these free vertices, we run DFS recursively on each of them
# this algorithm finds a maximal set of vertex disjoint augmenting paths of length k (shortest path),
# stores them in P and increments M...
# --------------------------------------------------------------------------------------------------------------
def __dfs(self, v, index, layers):
"""
we recursively run dfs on each vertices in free_vertex,
:param v: vertices in free_vertex
:return: True if P is not empty (i.e., the maximal set of vertex-disjoint alternating path of length k)
and false otherwise.
"""
if index == 0:
path = [v]
while self._dfs_parent[v] != v:
path.append(self._dfs_parent[v])
v = self._dfs_parent[v]
self._dfs_paths.append(path)
return True
for neighbour in self._graph[v]: # check the neighbours of vertex
if neighbour in layers[index - 1]:
# if neighbour is in left, we are traversing unmatched edges..
if neighbour in self._dfs_parent:
continue
if (neighbour in self._left and (v not in self._matching or neighbour != self._matching[v])) or \
(neighbour in self._right and (v in self._matching and neighbour == self._matching[v])):
self._dfs_parent[neighbour] = v
if self.__dfs(neighbour, index-1, layers):
return True
return False
def maximum_matching(self):
while True:
layers = self.__bfs()
# we break out of the whole while loop if the most recent layer added to layers is empty
# since if there are no vertices in the recent layer, then there is no way augmenting paths can be found
if len(layers[-1]) == 0:
break
free_vertex = set([vertex for vertex in layers[-1] if vertex not in self._matching])
# the maximal set of vertex-disjoint augmenting path and parent dictionary
# has to be cleared each time the while loop runs
# self._dfs_paths.clear() - .clear() and .copy() attribute works for python 3.3 and above
del self._dfs_paths[:]
self._dfs_parent.clear()
for vertex in free_vertex: # O(m) - every vertex considered once, each edge considered once
# this creates a loop of the vertex to itself in the parent dictionary,
self._dfs_parent[vertex] = vertex
self.__dfs(vertex, len(layers)-1, layers)
# if the set of paths is empty, nothing to add to the matching...break
if len(self._dfs_paths) == 0:
break
# if not, we swap the matched and unmatched edges in the paths formed and add them to the existing matching.
# the paths are augmenting implies the first and start vertices are free. Edges 1, 3, 5, .. are thus matched
for path in self._dfs_paths:
for i in range(len(path)):
if i % 2 == 0:
self._matching[path[i]] = path[i+1]
self._matching[path[i+1]] = path[i]
return self._matching
if __name__ == "__main__":
#graph = {'a': {1}, 'b': {1, 2}, 'c': {1, 2}, 'd': {2, 3, 4}, 'e': {3, 4}, 'f': {4, 5, 6}, 'g': {5, 6, 7}, 'h': {8}}
#print(HopcroftKarp(graph).maximum_matching())
graph = {'a': {2,3}, 'b': {1, 2,4,5}, 'c': {2, 3}, 'd': {2, 3}, 'e': {4, 5}}
print(HopcroftKarp(graph).maximum_matching())
| class Hopcroftkarp(object):
def __init__(self, graph):
"""
:param graph: an unweighted bipartite graph represented as a dictionary.
Vertices in the left and right vertex set must have different labelling
:return: a maximum matching of the given graph represented as a dictionary.
"""
self._matching = {}
self._dfs_paths = []
self._dfs_parent = {}
self._left = set(graph.keys())
self._right = set()
for value in graph.values():
self._right.update(value)
for vertex in self._left:
for neighbour in graph[vertex]:
if neighbour not in graph:
graph[neighbour] = set()
graph[neighbour].add(vertex)
else:
graph[neighbour].add(vertex)
self._graph = graph
def __bfs(self):
layers = []
layer = set()
for vertex in self._left:
if vertex not in self._matching:
layer.add(vertex)
layers.append(layer)
visited = set()
while True:
layer = layers[-1]
new_layer = set()
for vertex in layer:
if vertex in self._left:
visited.add(vertex)
for neighbour in self._graph[vertex]:
if neighbour not in visited and (vertex not in self._matching or neighbour != self._matching[vertex]):
new_layer.add(neighbour)
else:
visited.add(vertex)
for neighbour in self._graph[vertex]:
if neighbour not in visited and (vertex in self._matching and neighbour == self._matching[vertex]):
new_layer.add(neighbour)
layers.append(new_layer)
if len(new_layer) == 0:
return layers
if any((vertex in self._right and vertex not in self._matching for vertex in new_layer)):
return layers
def __dfs(self, v, index, layers):
"""
we recursively run dfs on each vertices in free_vertex,
:param v: vertices in free_vertex
:return: True if P is not empty (i.e., the maximal set of vertex-disjoint alternating path of length k)
and false otherwise.
"""
if index == 0:
path = [v]
while self._dfs_parent[v] != v:
path.append(self._dfs_parent[v])
v = self._dfs_parent[v]
self._dfs_paths.append(path)
return True
for neighbour in self._graph[v]:
if neighbour in layers[index - 1]:
if neighbour in self._dfs_parent:
continue
if neighbour in self._left and (v not in self._matching or neighbour != self._matching[v]) or (neighbour in self._right and (v in self._matching and neighbour == self._matching[v])):
self._dfs_parent[neighbour] = v
if self.__dfs(neighbour, index - 1, layers):
return True
return False
def maximum_matching(self):
while True:
layers = self.__bfs()
if len(layers[-1]) == 0:
break
free_vertex = set([vertex for vertex in layers[-1] if vertex not in self._matching])
del self._dfs_paths[:]
self._dfs_parent.clear()
for vertex in free_vertex:
self._dfs_parent[vertex] = vertex
self.__dfs(vertex, len(layers) - 1, layers)
if len(self._dfs_paths) == 0:
break
for path in self._dfs_paths:
for i in range(len(path)):
if i % 2 == 0:
self._matching[path[i]] = path[i + 1]
self._matching[path[i + 1]] = path[i]
return self._matching
if __name__ == '__main__':
graph = {'a': {2, 3}, 'b': {1, 2, 4, 5}, 'c': {2, 3}, 'd': {2, 3}, 'e': {4, 5}}
print(hopcroft_karp(graph).maximum_matching()) |
class DefaultAlias(object):
''' unless explicitly assigned, this attribute aliases to another. '''
def __init__(self, name):
self.name = name
def __get__(self, inst, cls):
if inst is None:
# attribute accessed on class, return `self' descriptor
return self
return getattr(inst, self.name)
class Alias(DefaultAlias):
''' this attribute unconditionally aliases to another. '''
def __set__(self, inst, value):
setattr(inst, self.name, value)
def __delete__(self, inst):
delattr(inst, self.name)
| class Defaultalias(object):
""" unless explicitly assigned, this attribute aliases to another. """
def __init__(self, name):
self.name = name
def __get__(self, inst, cls):
if inst is None:
return self
return getattr(inst, self.name)
class Alias(DefaultAlias):
""" this attribute unconditionally aliases to another. """
def __set__(self, inst, value):
setattr(inst, self.name, value)
def __delete__(self, inst):
delattr(inst, self.name) |
f1 = open("unprocessed/Cit-HepTh-dates.csv", "w")
f2 = open("unprocessed/Cit-HepTh.csv", "w")
with open("unprocessed/Cit-HepTh-dates.txt", "r") as file:
c = 0
for line in file:
if not c == 0:
l = line.split()
nl = l[0] + "," + l[1] + '\n'
f1.write(nl)
else:
c += 1
with open("unprocessed/Cit-HepTh.txt", "r") as file:
for line in file:
if c > 4:
l = line.split()
nl = l[0] + "," + l[1] + '\n'
f2.write(nl)
else:
c += 1
class Paper:
def __init__(self, id, date, citates):
self.id = id
self.date = date
| f1 = open('unprocessed/Cit-HepTh-dates.csv', 'w')
f2 = open('unprocessed/Cit-HepTh.csv', 'w')
with open('unprocessed/Cit-HepTh-dates.txt', 'r') as file:
c = 0
for line in file:
if not c == 0:
l = line.split()
nl = l[0] + ',' + l[1] + '\n'
f1.write(nl)
else:
c += 1
with open('unprocessed/Cit-HepTh.txt', 'r') as file:
for line in file:
if c > 4:
l = line.split()
nl = l[0] + ',' + l[1] + '\n'
f2.write(nl)
else:
c += 1
class Paper:
def __init__(self, id, date, citates):
self.id = id
self.date = date |
def color_analysis(img):
# obtain the color palatte of the image
palatte = defaultdict(int)
for pixel in img.getdata():
palatte[pixel] += 1
# sort the colors present in the image
sorted_x = sorted(palatte.items(), key=operator.itemgetter(1), reverse = True)
light_shade, dark_shade, shade_count, pixel_limit = 0, 0, 0, 25
for i, x in enumerate(sorted_x[:pixel_limit]):
if all(xx <= 20 for xx in x[0][:3]): ## dull : too much darkness
dark_shade += x[1]
if all(xx >= 240 for xx in x[0][:3]): ## bright : too much whiteness
light_shade += x[1]
shade_count += x[1]
light_percent = round((float(light_shade)/shade_count)*100, 2)
dark_percent = round((float(dark_shade)/shade_count)*100, 2)
return light_percent, dark_percent
| def color_analysis(img):
palatte = defaultdict(int)
for pixel in img.getdata():
palatte[pixel] += 1
sorted_x = sorted(palatte.items(), key=operator.itemgetter(1), reverse=True)
(light_shade, dark_shade, shade_count, pixel_limit) = (0, 0, 0, 25)
for (i, x) in enumerate(sorted_x[:pixel_limit]):
if all((xx <= 20 for xx in x[0][:3])):
dark_shade += x[1]
if all((xx >= 240 for xx in x[0][:3])):
light_shade += x[1]
shade_count += x[1]
light_percent = round(float(light_shade) / shade_count * 100, 2)
dark_percent = round(float(dark_shade) / shade_count * 100, 2)
return (light_percent, dark_percent) |
#Write a program using while loops that asks the user for a positive integer 'n' and prints
#a triangle using numbers from 1 to 'n'.
number = int(input("Give me a number: "))
count = 0
for x in range(1, number+1):
count += 1
dibujar = str(x)
print (dibujar*count)
| number = int(input('Give me a number: '))
count = 0
for x in range(1, number + 1):
count += 1
dibujar = str(x)
print(dibujar * count) |
"""
factorial() is function factorial(number),
take the number parameter been passed and
return the factorial of it
"""
def factorial(number):
if number == 0:
return 1
else:
ans = number * factorial(number - 1)
return ans
| """
factorial() is function factorial(number),
take the number parameter been passed and
return the factorial of it
"""
def factorial(number):
if number == 0:
return 1
else:
ans = number * factorial(number - 1)
return ans |
class Solution:
def getFactors(self, n):
"""
:type n: int
:rtype: List[List[int]]
"""
ans = []
self.helper(n, [], n, ans)
return ans
def helper(self, n, factors, left, ans):
if left == 1:
if factors:
ans.append(factors)
else:
lo = 2 if not factors else factors[-1]
for i in range(lo, min(n, left + 1)):
if left % i == 0:
self.helper(n, factors + [i], left // i, ans)
| class Solution:
def get_factors(self, n):
"""
:type n: int
:rtype: List[List[int]]
"""
ans = []
self.helper(n, [], n, ans)
return ans
def helper(self, n, factors, left, ans):
if left == 1:
if factors:
ans.append(factors)
else:
lo = 2 if not factors else factors[-1]
for i in range(lo, min(n, left + 1)):
if left % i == 0:
self.helper(n, factors + [i], left // i, ans) |
class Board(object):
def __init__(self, boards):
if len(boards) != 25:
raise Exception(f"Invalid board : {len(boards)}")
self.boards = boards
self.marked = [False] * 25
@staticmethod
def parse(lines):
boards = []
for line in lines:
boards.extend([int(x) for x in line.split()])
return Board(boards)
def mark(self, nb):
for i, n in enumerate(self.boards):
if n == nb:
self.marked[i] = True
def won(self):
# row
for i in range(0, 25, 5):
if all(self.marked[i: i + 5]):
return True
# col
for i in range(5):
if all(self.marked[i::5]):
return True
return False
def unmarked(self):
u = []
for i, m in enumerate(self.marked):
if not m:
u.append(self.boards[i])
return u
class Bingo(object):
def __init__(self, order, boards):
self.order = order
self.boards = boards
@staticmethod
def parse(text: str):
lines = text.splitlines()
order = [int(x) for x in lines[0].split(",")]
boards = []
l = 2
while True:
boards.append(Board.parse(lines[l: l + 5]))
l += 6
if l >= len(lines):
break
return Bingo(order, boards)
def play(self):
for o in self.order:
for board in self.boards:
board.mark(o)
if board.won():
return sum(board.unmarked()) * o
if __name__ == "__main__":
little = """7,4,9,5,11,17,23,2,0,14,21,24,10,16,13,6,15,25,12,22,18,20,8,19,3,26,1
22 13 17 11 0
8 2 23 4 24
21 9 14 16 7
6 10 3 18 5
1 12 20 15 19
3 15 0 2 22
9 18 13 17 5
19 8 7 25 23
20 11 10 24 4
14 21 16 12 6
14 21 17 24 4
10 16 15 9 19
18 8 23 26 20
22 11 13 6 5
2 0 12 3 7
"""
bingo = Bingo.parse(little)
print(bingo.play())
with open("../input", "r") as f:
bingo = Bingo.parse(f.read())
print(bingo.play())
| class Board(object):
def __init__(self, boards):
if len(boards) != 25:
raise exception(f'Invalid board : {len(boards)}')
self.boards = boards
self.marked = [False] * 25
@staticmethod
def parse(lines):
boards = []
for line in lines:
boards.extend([int(x) for x in line.split()])
return board(boards)
def mark(self, nb):
for (i, n) in enumerate(self.boards):
if n == nb:
self.marked[i] = True
def won(self):
for i in range(0, 25, 5):
if all(self.marked[i:i + 5]):
return True
for i in range(5):
if all(self.marked[i::5]):
return True
return False
def unmarked(self):
u = []
for (i, m) in enumerate(self.marked):
if not m:
u.append(self.boards[i])
return u
class Bingo(object):
def __init__(self, order, boards):
self.order = order
self.boards = boards
@staticmethod
def parse(text: str):
lines = text.splitlines()
order = [int(x) for x in lines[0].split(',')]
boards = []
l = 2
while True:
boards.append(Board.parse(lines[l:l + 5]))
l += 6
if l >= len(lines):
break
return bingo(order, boards)
def play(self):
for o in self.order:
for board in self.boards:
board.mark(o)
if board.won():
return sum(board.unmarked()) * o
if __name__ == '__main__':
little = '7,4,9,5,11,17,23,2,0,14,21,24,10,16,13,6,15,25,12,22,18,20,8,19,3,26,1\n\n22 13 17 11 0\n 8 2 23 4 24\n21 9 14 16 7\n 6 10 3 18 5\n 1 12 20 15 19\n\n 3 15 0 2 22\n 9 18 13 17 5\n19 8 7 25 23\n20 11 10 24 4\n14 21 16 12 6\n\n14 21 17 24 4\n10 16 15 9 19\n18 8 23 26 20\n22 11 13 6 5\n 2 0 12 3 7\n '
bingo = Bingo.parse(little)
print(bingo.play())
with open('../input', 'r') as f:
bingo = Bingo.parse(f.read())
print(bingo.play()) |
{
"format_version": "1.16.0",
"minecraft:entity": {
"description": {
"identifier": f"{namespace}:pig_{color}",
"is_spawnable": true,
"is_summonable": true,
"is_experimental": false
},
"components": {
"minecraft:type_family": {
"family": [
f"pig_{color}",
"pig",
"mob"
]
},
"minecraft:breathable": {
"total_supply": 15,
"suffocate_time": 0
},
"minecraft:health": {
"value": 10,
"max": 10
},
"minecraft:hurt_on_condition": {
"damage_conditions": [
{
"filters": {
"test": "in_lava",
"subject": "self",
"operator": "==",
"value": true
},
"cause": "lava",
"damage_per_tick": 4
}
]
},
"minecraft:movement": {
"value": 0.25
},
"minecraft:navigation.walk": {
"can_path_over_water": true,
"avoid_water": true,
"avoid_damage_blocks": true
},
"minecraft:movement.basic": {},
"minecraft:jump.static": {},
"minecraft:can_climb": {},
"minecraft:collision_box": {
"width": 0.9,
"height": 0.9
},
"minecraft:despawn": {
"despawn_from_distance": {}
},
"minecraft:behavior.float": {
"priority": 2
},
"minecraft:behavior.panic": {
"priority": 3,
"speed_multiplier": 1.25
},
"minecraft:behavior.random_stroll": {
"priority": 7,
"speed_multiplier": 1.0
},
"minecraft:behavior.look_at_player": {
"priority": 8,
"look_distance": 6.0,
"probability": 0.02
},
"minecraft:behavior.random_look_around": {
"priority": 9
},
"minecraft:physics": {},
"minecraft:pushable": {
"is_pushable": true,
"is_pushable_by_piston": true
},
"minecraft:conditional_bandwidth_optimization": {}
}
}
} | {'format_version': '1.16.0', 'minecraft:entity': {'description': {'identifier': f'{namespace}:pig_{color}', 'is_spawnable': true, 'is_summonable': true, 'is_experimental': false}, 'components': {'minecraft:type_family': {'family': [f'pig_{color}', 'pig', 'mob']}, 'minecraft:breathable': {'total_supply': 15, 'suffocate_time': 0}, 'minecraft:health': {'value': 10, 'max': 10}, 'minecraft:hurt_on_condition': {'damage_conditions': [{'filters': {'test': 'in_lava', 'subject': 'self', 'operator': '==', 'value': true}, 'cause': 'lava', 'damage_per_tick': 4}]}, 'minecraft:movement': {'value': 0.25}, 'minecraft:navigation.walk': {'can_path_over_water': true, 'avoid_water': true, 'avoid_damage_blocks': true}, 'minecraft:movement.basic': {}, 'minecraft:jump.static': {}, 'minecraft:can_climb': {}, 'minecraft:collision_box': {'width': 0.9, 'height': 0.9}, 'minecraft:despawn': {'despawn_from_distance': {}}, 'minecraft:behavior.float': {'priority': 2}, 'minecraft:behavior.panic': {'priority': 3, 'speed_multiplier': 1.25}, 'minecraft:behavior.random_stroll': {'priority': 7, 'speed_multiplier': 1.0}, 'minecraft:behavior.look_at_player': {'priority': 8, 'look_distance': 6.0, 'probability': 0.02}, 'minecraft:behavior.random_look_around': {'priority': 9}, 'minecraft:physics': {}, 'minecraft:pushable': {'is_pushable': true, 'is_pushable_by_piston': true}, 'minecraft:conditional_bandwidth_optimization': {}}}} |
"""
The :mod:`stan.proc_functions.merge` module is the proc merge function
"""
def merge(dt_left, dt_right, how='inner', on=None, left_on=None, right_on=None, left_index=False, right_index=False, sort=False, suffixes=('_x', '_y'), copy=True):
return dt_left.merge(dt_right, how='inner', on=None, left_on=None, right_on=None, left_index=False, right_index=False, sort=False, suffixes=('_x', '_y'), copy=True)
| """
The :mod:`stan.proc_functions.merge` module is the proc merge function
"""
def merge(dt_left, dt_right, how='inner', on=None, left_on=None, right_on=None, left_index=False, right_index=False, sort=False, suffixes=('_x', '_y'), copy=True):
return dt_left.merge(dt_right, how='inner', on=None, left_on=None, right_on=None, left_index=False, right_index=False, sort=False, suffixes=('_x', '_y'), copy=True) |
"""
Constants
Author: Brady Volkmann
Date: 6/21/2019
Constants file for the Tektroniks TTR506 VNA
"""
# INITIALIZE CONSTANTS ======================================================
# configure acquisition parameters
startFreqSweep = '50 MHz'
stopFreqSweep = '6 GHz'
sweepDelay = '1s'
snpFilename = 'test.s1p'
sParam = 'S21'
| """
Constants
Author: Brady Volkmann
Date: 6/21/2019
Constants file for the Tektroniks TTR506 VNA
"""
start_freq_sweep = '50 MHz'
stop_freq_sweep = '6 GHz'
sweep_delay = '1s'
snp_filename = 'test.s1p'
s_param = 'S21' |
description = 'The outside temperature on the campus'
group = 'lowlevel'
devices = dict(
OutsideTemp = device('nicos.devices.entangle.Sensor',
description = 'Outdoor air temperature',
tangodevice = 'tango://ictrlfs.ictrl.frm2:10000/frm2/meteo/temp',
),
)
| description = 'The outside temperature on the campus'
group = 'lowlevel'
devices = dict(OutsideTemp=device('nicos.devices.entangle.Sensor', description='Outdoor air temperature', tangodevice='tango://ictrlfs.ictrl.frm2:10000/frm2/meteo/temp')) |
#This is just a demo server file to demonstrate the working of Telopy Backend
#This program consist the last line of Telopy
#import time
#import sys
#cursor = ['|','/','-','\\']
print('Telopy Server is Live ',end="")
#while True:
# for i in cursor:
# print(i+"\x08",end="")
# sys.stdout.flush()
# time.sleep(0.1) | print('Telopy Server is Live ', end='') |
class VersioningError(Exception):
pass
class ClassNotVersioned(VersioningError):
pass
class ImproperlyConfigured(VersioningError):
pass
| class Versioningerror(Exception):
pass
class Classnotversioned(VersioningError):
pass
class Improperlyconfigured(VersioningError):
pass |
def create_groups(items, n):
"""Splits items into n groups of equal size, although the last one may be shorter."""
# determine the size each group should be
try:
# this line could cause a ZeroDivisionError exception
size = len(items) // n
except ZeroDivisionError:
print('WARNING: Returning empty list. Please use a nonzero number.')
return []
else:
# create each group and append to a new list
groups = []
for i in range(0, len(items), size):
groups.append(items[i:i + size])
return groups
finally:
# print the number of groups and return groups
print("{} groups returned.".format(n))
print("Creating 6 groups...")
for group in create_groups(range(32), 6):
print(list(group))
print("\nCreating 0 groups...")
for group in create_groups(range(32), 0):
print(list(group))
| def create_groups(items, n):
"""Splits items into n groups of equal size, although the last one may be shorter."""
try:
size = len(items) // n
except ZeroDivisionError:
print('WARNING: Returning empty list. Please use a nonzero number.')
return []
else:
groups = []
for i in range(0, len(items), size):
groups.append(items[i:i + size])
return groups
finally:
print('{} groups returned.'.format(n))
print('Creating 6 groups...')
for group in create_groups(range(32), 6):
print(list(group))
print('\nCreating 0 groups...')
for group in create_groups(range(32), 0):
print(list(group)) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@created: 22.01.20
@author: felix
"""
NORMALIZE = False
class EasyDict(dict):
def __init__(self, *args, **kwargs):
global NORMALIZE
NORMALIZE = kwargs.pop('normalize', False)
super(EasyDict, self).__init__(*args, **kwargs)
def __getattr__(self, item):
if NORMALIZE and item.replace('_', ' ') in self.keys():
return self.get(item)
elif item not in self.keys():
raise AttributeError()
else:
return self.get(item)
def __setattr__(self, key, value):
if NORMALIZE:
key = key.replace('_', ' ')
self.__setitem__(key, value)
def get(self, k, *args):
if NORMALIZE:
k = k.replace('_', ' ')
if k not in self.keys() and len(args) == 1:
return args[0]
else:
return super().get(k)
| """
@created: 22.01.20
@author: felix
"""
normalize = False
class Easydict(dict):
def __init__(self, *args, **kwargs):
global NORMALIZE
normalize = kwargs.pop('normalize', False)
super(EasyDict, self).__init__(*args, **kwargs)
def __getattr__(self, item):
if NORMALIZE and item.replace('_', ' ') in self.keys():
return self.get(item)
elif item not in self.keys():
raise attribute_error()
else:
return self.get(item)
def __setattr__(self, key, value):
if NORMALIZE:
key = key.replace('_', ' ')
self.__setitem__(key, value)
def get(self, k, *args):
if NORMALIZE:
k = k.replace('_', ' ')
if k not in self.keys() and len(args) == 1:
return args[0]
else:
return super().get(k) |
fig, axs = plt.subplots(1, 2, figsize=(20,5))
p1=boroughs4.plot(column='Controlled drugs',ax=axs[0],cmap='Blues',legend=True);
p2=boroughs4.plot(column='Stolen goods',ax=axs[1], cmap='Reds',legend=True);
axs[0].set_title('Controlled drugs', fontdict={'fontsize': '12', 'fontweight' : '5'});
axs[1].set_title('Stolen goods', fontdict={'fontsize': '12', 'fontweight' : '5'});
| (fig, axs) = plt.subplots(1, 2, figsize=(20, 5))
p1 = boroughs4.plot(column='Controlled drugs', ax=axs[0], cmap='Blues', legend=True)
p2 = boroughs4.plot(column='Stolen goods', ax=axs[1], cmap='Reds', legend=True)
axs[0].set_title('Controlled drugs', fontdict={'fontsize': '12', 'fontweight': '5'})
axs[1].set_title('Stolen goods', fontdict={'fontsize': '12', 'fontweight': '5'}) |
class Reaction:
def __init__(self):
pass
def from_json(json):
reaction = Reaction()
reaction.reaction = json["reaction"].encode("unicode-escape")
reaction.actor = json["actor"]
return reaction
def list_from_json(json):
reactions = []
for child in json:
reactions.append(Reaction.from_json(child))
return reactions | class Reaction:
def __init__(self):
pass
def from_json(json):
reaction = reaction()
reaction.reaction = json['reaction'].encode('unicode-escape')
reaction.actor = json['actor']
return reaction
def list_from_json(json):
reactions = []
for child in json:
reactions.append(Reaction.from_json(child))
return reactions |
class Triangulo():
def __init__(self, a, b, c):
self.a = a
self.b = b
self.c = c
def semelhantes(self, triangulo):
a, b, c = triangulo.a, triangulo.b, triangulo.c
if ((a % self.a) == 0 ) and ((b % self.b) == 0) and ((c % self.c) == 0):
return True
elif ((a // self.a) == 0 ) and ((b // self.b) == 0) and ((c // self.c) == 0):
return True
return False
# t1 = Triangulo(2, 2, 2)
# t2 = Triangulo(4, 4, 4)
# print(t1.semelhantes(t2))
# t3 = Triangulo(3, 4, 5)
# t4 = Triangulo(3, 4, 5)
# print(t3.semelhantes(t4))
# t5 = Triangulo(6, 8, 10)
# t6 = Triangulo(3, 4, 5)
# print(t5.semelhantes(t6)) | class Triangulo:
def __init__(self, a, b, c):
self.a = a
self.b = b
self.c = c
def semelhantes(self, triangulo):
(a, b, c) = (triangulo.a, triangulo.b, triangulo.c)
if a % self.a == 0 and b % self.b == 0 and (c % self.c == 0):
return True
elif a // self.a == 0 and b // self.b == 0 and (c // self.c == 0):
return True
return False |
age = input("Please enter your age: ")
if age.isdigit():
print(age)
age = int(input("Please enter your age: "))
while(True):
try:
age = int(input("Please enter your age: "))
except ValueError:
print("Sorry, I didn't understand that.")
continue
else:
break
raise Exception("arguments") | age = input('Please enter your age: ')
if age.isdigit():
print(age)
age = int(input('Please enter your age: '))
while True:
try:
age = int(input('Please enter your age: '))
except ValueError:
print("Sorry, I didn't understand that.")
continue
else:
break
raise exception('arguments') |
def fasttsq(M,psi,Y,y,m,c,o,plm):
#TODO
raise NotImplementedError
def fasttsq3d(M,psi,Y,y,m,c,o,plm):
#TODO
raise NotImplementedError
def fasttsqp(M,psi,Y,y,m,c,o,plm):
#TODO
raise NotImplementedError
def fastq(M,psi,Y,y,m,c,o,plm):
#TODO
raise NotImplementedError
def fastq3d(M,psi,Y,y,m,c,o,plm):
#TODO
raise NotImplementedError | def fasttsq(M, psi, Y, y, m, c, o, plm):
raise NotImplementedError
def fasttsq3d(M, psi, Y, y, m, c, o, plm):
raise NotImplementedError
def fasttsqp(M, psi, Y, y, m, c, o, plm):
raise NotImplementedError
def fastq(M, psi, Y, y, m, c, o, plm):
raise NotImplementedError
def fastq3d(M, psi, Y, y, m, c, o, plm):
raise NotImplementedError |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.