content
stringlengths
7
1.05M
r=s=t=1 #--- I1 print(r + s + t) r=s=t='1' #--- I2 print(r + s + t)
"""Genetic Programming in Python, with a scikit-learn inspired API ``gplearn`` is a set of algorithms for learning genetic programming models. """ __version__ = '0.4.dev0' __all__ = ['genetic', 'functions', 'fitness'] print("GPLEARN MOD")
n = int(input('Digite o número que deseja a tabuada: ')) for c in range(1, 11): print('{} X {} = {}'.format(n, c, n*c))
_base_ = '../../base.py' # model settings model = dict( type='Classification', pretrained=None, backbone=dict( type='ResNet', depth=50, out_indices=[4], # 4: stage-4 norm_cfg=dict(type='BN')), head=dict( type='ClsHead', with_avg_pool=True, in_channels=2048, num_classes=10)) # dataset settings data_source_cfg = dict(type='Cifar10', root='/root/data/zq/data/cifar/') dataset_type = 'ClassificationDataset' img_norm_cfg = dict(mean=[0.4914, 0.4822, 0.4465], std=[0.2023, 0.1994, 0.201]) train_pipeline = [ dict(type='RandomCrop', size=32, padding=4), dict(type='RandomHorizontalFlip'), dict(type='ToTensor'), dict(type='Normalize', **img_norm_cfg), ] test_pipeline = [ dict(type='ToTensor'), dict(type='Normalize', **img_norm_cfg), ] data = dict( imgs_per_gpu=128, workers_per_gpu=2, train=dict( type=dataset_type, data_source=dict(split='train', **data_source_cfg), pipeline=train_pipeline), val=dict( type=dataset_type, data_source=dict(split='test', **data_source_cfg), pipeline=test_pipeline), test=dict( type=dataset_type, data_source=dict(split='test', **data_source_cfg), pipeline=test_pipeline)) # additional hooks custom_hooks = [ dict( type='ValidateHook', dataset=data['val'], initial=True, interval=10, imgs_per_gpu=128, workers_per_gpu=8, eval_param=dict(topk=(1, 5))) ] # optimizer optimizer = dict(type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0005) # learning policy lr_config = dict(policy='step', step=[150, 250]) checkpoint_config = dict(interval=50) # runtime settings total_epochs = 350
class BruteForceProtectionException(Exception): pass class BruteForceProtectionBanException(BruteForceProtectionException): pass class BruteForceProtectionCaptchaException(BruteForceProtectionException): pass
string = "John Doe lives at 221B Baker Street." pattern = re.compile(r""" ([a-zA-Z ]+) # Save as many letters and spaces as possible to group 1 \ lives\ at\ # Match " lives at " (?P<address>.*) # Save everything in between as a group named `address` \. # Match the period at the end """, re.VERBOSE) new_string = re.sub(pattern, r"\g<address> is occupied by \1.", string) print("New string is '{0}'".format(new_string))
class LanguageModel: def infer(x): """Run language model on input x Args: x (str): Prompt to run inference on Returns: (str) Output of inference """ return prompt
# Copyright 2016 x620 <https://github.com/x620> # Copyright 2016,2020 Ivan Yelizariev <https://it-projects.info/team/yelizariev> # Copyright 2018 Ruslan Ronzhin # Copyright 2019 Artem Rafailov <https://it-projects.info/team/Ommo73/> # License LGPL-3.0 (https://www.gnu.org/licenses/lgpl.html). { "name": """Show message recipients""", "summary": """Allows you be sure, that all discussion participants were notified""", "category": "Discuss", "images": ["images/1.png"], "version": "12.0.1.1.1", "author": "IT-Projects LLC, Pavel Romanchenko", "support": "apps@itpp.dev", "website": "https://itpp.dev", "license": "LGPL-3", "price": 40.00, "currency": "EUR", "depends": ["mail"], "external_dependencies": {"python": [], "bin": []}, "data": ["templates.xml"], "qweb": ["static/src/xml/recipient.xml"], "demo": [], "installable": True, "auto_install": False, }
"""Config file tools for edx_lint.""" def merge_configs(main, tweaks): """Merge tweaks into a main config file.""" for section in tweaks.sections(): for option in tweaks.options(section): value = tweaks.get(section, option) if option.endswith("+"): option = option[:-1] value = main.get(section, option) + value main.set(section, option, value)
model = Model() i1 = Input("input", "TENSOR_FLOAT32", "{1, 2, 2, 1}") axis = Parameter("axis", "TENSOR_INT32", "{1}", [2]) keepDims = False output = Output("output", "TENSOR_FLOAT32", "{1, 2, 1}") model = model.Operation("REDUCE_MIN", i1, axis, keepDims).To(output) # Example 1. Input in operand 0, input0 = {i1: # input 0 [2.0, 1.0, 3.0, 4.0]} output0 = {output: # output 0 [1.0, 3.0]} # Instantiate an example Example((input0, output0))
# https://leetcode.com/problems/subrectangle-queries class SubrectangleQueries: def __init__(self, rectangle): self.rectangle = rectangle def updateSubrectangle(self, row1, col1, row2, col2, newValue): for row in range(row1, row2 + 1): for col in range(col1, col2 + 1): self.rectangle[row][col] = newValue def getValue(self, row, col): return self.rectangle[row][col]
entries = [ { "env-title": "atari-alien", "env-variant": "No-op start", "score": 6482.10, }, { "env-title": "atari-amidar", "env-variant": "No-op start", "score": 833, }, { "env-title": "atari-assault", "env-variant": "No-op start", "score": 11013.50, }, { "env-title": "atari-asterix", "env-variant": "No-op start", "score": 36238.50, }, { "env-title": "atari-asteroids", "env-variant": "No-op start", "score": 2780.40, }, { "env-title": "atari-atlantis", "env-variant": "No-op start", "score": 308258, }, { "env-title": "atari-bank-heist", "env-variant": "No-op start", "score": 988.70, }, { "env-title": "atari-battle-zone", "env-variant": "No-op start", "score": 61220, }, { "env-title": "atari-beam-rider", "env-variant": "No-op start", "score": 8566.50, }, { "env-title": "atari-berzerk", "env-variant": "No-op start", "score": 1641.40, }, { "env-title": "atari-bowling", "env-variant": "No-op start", "score": 75.40, }, { "env-title": "atari-boxing", "env-variant": "No-op start", "score": 99.40, }, { "env-title": "atari-breakout", "env-variant": "No-op start", "score": 518.40, }, { "env-title": "atari-centipede", "env-variant": "No-op start", "score": 3402.80, }, { "env-title": "atari-chopper-command", "env-variant": "No-op start", "score": 37568, }, { "env-title": "atari-crazy-climber", "env-variant": "No-op start", "score": 194347, }, { "env-title": "atari-defender", "env-variant": "No-op start", "score": 113128, }, { "env-title": "atari-demon-attack", "env-variant": "No-op start", "score": 100189, }, { "env-title": "atari-double-dunk", "env-variant": "No-op start", "score": 11.40, }, { "env-title": "atari-enduro", "env-variant": "No-op start", "score": 2230.10, }, { "env-title": "atari-fishing-derby", "env-variant": "No-op start", "score": 23.20, }, { "env-title": "atari-freeway", "env-variant": "No-op start", "score": 31.40, }, { "env-title": "atari-frostbite", "env-variant": "No-op start", "score": 8042.10, }, { "env-title": "atari-gopher", "env-variant": "No-op start", "score": 69135.10, }, { "env-title": "atari-gravitar", "env-variant": "No-op start", "score": 1073.80, }, { "env-title": "atari-hero", "env-variant": "No-op start", "score": 35542.20, }, { "env-title": "atari-ice-hockey", "env-variant": "No-op start", "score": 3.40, }, { "env-title": "atari-jamesbond", "env-variant": "No-op start", "score": 7869.20, }, { "env-title": "atari-kangaroo", "env-variant": "No-op start", "score": 10484.50, }, { "env-title": "atari-krull", "env-variant": "No-op start", "score": 9930.80, }, { "env-title": "atari-kung-fu-master", "env-variant": "No-op start", "score": 59799.50, }, { "env-title": "atari-montezuma-revenge", "env-variant": "No-op start", "score": 2643.50, }, { "env-title": "atari-ms-pacman", "env-variant": "No-op start", "score": 2724.30, }, { "env-title": "atari-name-this-game", "env-variant": "No-op start", "score": 9907.20, }, { "env-title": "atari-phoenix", "env-variant": "No-op start", "score": 40092.20, }, { "env-title": "atari-pitfall", "env-variant": "No-op start", "score": -3.50, }, { "env-title": "atari-pong", "env-variant": "No-op start", "score": 20.70, }, { "env-title": "atari-private-eye", "env-variant": "No-op start", "score": 15177.10, }, { "env-title": "atari-qbert", "env-variant": "No-op start", "score": 22956.50, }, { "env-title": "atari-riverraid", "env-variant": "No-op start", "score": 16608.30, }, { "env-title": "atari-road-runner", "env-variant": "No-op start", "score": 71168, }, { "env-title": "atari-robotank", "env-variant": "No-op start", "score": 68.50, }, { "env-title": "atari-seaquest", "env-variant": "No-op start", "score": 8425.80, }, { "env-title": "atari-skiing", "env-variant": "No-op start", "score": -10753.40, }, { "env-title": "atari-solaris", "env-variant": "No-op start", "score": 2760, }, { "env-title": "atari-space-invaders", "env-variant": "No-op start", "score": 2448.60, }, { "env-title": "atari-star-gunner", "env-variant": "No-op start", "score": 70038, }, { "env-title": "atari-surround", "env-variant": "No-op start", "score": 6.70, }, { "env-title": "atari-tennis", "env-variant": "No-op start", "score": 23.30, }, { "env-title": "atari-time-pilot", "env-variant": "No-op start", "score": 19401, }, { "env-title": "atari-tutankham", "env-variant": "No-op start", "score": 272.60, }, { "env-title": "atari-up-n-down", "env-variant": "No-op start", "score": 64354.20, }, { "env-title": "atari-venture", "env-variant": "No-op start", "score": 1597.50, }, { "env-title": "atari-video-pinball", "env-variant": "No-op start", "score": 469366, }, { "env-title": "atari-wizard-of-wor", "env-variant": "No-op start", "score": 13170.50, }, { "env-title": "atari-yars-revenge", "env-variant": "No-op start", "score": 102760, }, { "env-title": "atari-zaxxon", "env-variant": "No-op start", "score": 25215.50, }, ]
class BaseViewTemplate(): def get_template(self): if self.request.user.is_authenticated: template = "core/base.html" else: template = "core/base-nav.html" return template
class Parent(object): """A simple example class""" # 클래스 정의 시작부분에 """...""" 도큐먼트 스트링 def __init__(self): # 컨스트럭터 (생성자) self.name = "Kim" def override(self): # override() 메소드 print("PARENT override()") def implicit(self): # implicit() 메소드 print("PARENT implicit()") def altered(self): # altered() 메소드 print("PARENT altered()") class Child(Parent): def __init__(self): # 자식클래스 생성자 super().__init__() # 부모클래스 생성자 self.blood = "O" # blood 추가 def override(self): print("CHILD override()") # override() 메소드 def altered(self): # altered() 메소드 print("CHILD, BEFORE PARENT altered()") super(Child, self).altered() # super(자식클래스, self)로 부모클래스의 메서드 호출 print("CHILD, AFTER PARENT altered()") dad = Parent() # 클래스의 인스턴스 생성 son = Child() # 클래스의 인스턴스 생성 dad.implicit() son.implicit() dad.override() son.override() dad.altered() son.altered()
{ "roadMapId" : "2", "mapIds" : { "1" : { "parameter" : [ "-l" ], "code" : "import json\ndef eventHandler(event, context, callback):\n\tjsonString = json.dumps(event)\n\tprint(jsonString)\n\tif event[\"present\"] == \"person\":\n\t\tprint(\"OK\")\n\telse:\n\t\tprint(\"None\")", "deviceId" : "deviceId1", "serverId" : "serverId1", "brokerId" : "brokerId1" } } }
# program to converte API text file to markdown for wiki. all_apis = [] with open("stepspy_api.txt","rt") as fid_api: api = [] line = fid_api.readline().strip() api.append(line) while True: line = fid_api.readline() if len(line)==0: if len(api) != 0: all_apis.append(tuple(api)) break else: line = line.strip() if not line.startswith("API "): api.append(line) else: all_apis.append(tuple(api)) api = [] api.append(line) with open("stepspy_api.md","wt") as fid_md: for api in all_apis: current_cat = "count" for line in api: if line.startswith("API "): #fid_md.write("# "+line+": ") fid_md.write("# ") current_cat = "format" continue if current_cat == "format": api_name = line.strip("Format:").strip() api_name = api_name[:api_name.find("(")] fid_md.write(api_name+"\n") fid_md.write(line+" \n") current_cat = "description" continue if current_cat == "description": fid_md.write(line+" \n") if line.startswith("Args:"): current_cat = "args" continue if current_cat == "args": if not line.startswith("Rets:"): fid_md.write("> "+line+" \n") else: fid_md.write("\n"+line+" \n") current_cat = "rets" continue if current_cat == "rets": if not line.startswith("Example:") and not line.startswith("Tips:"): fid_md.write("> "+line+" \n") else: fid_md.write("\n"+line+" \n") if line.startswith("Tips:"): current_cat = "tips" else: fid_md.write("```python\n") current_cat = "example" continue if current_cat == "tips": if not line.startswith("Example:"): fid_md.write("> "+line+" \n") else: fid_md.write("\n"+line+" \n") fid_md.write("```python\n") current_cat = "example" continue if current_cat == "example": if len(line)!=0: fid_md.write(line+" \n") continue if current_cat == "example": fid_md.write("```\n\n") else: fid_md.write("\n\n")
type_input = input() symbol = input() def int_type(num): number = int(num) result = number * 2 print(result) def real_type(num): number = float(num) result = number * 1.5 print(f"{result:.2f}") def string_type(text): string = "$" + text + "$" print(string) if type_input == "int": int_type(symbol) elif type_input == "real": real_type(symbol) else: string_type(symbol)
class Settings: '''存储设置的类''' def __init__(self): # 屏幕设置 self.screen_width = 1200 self.screen_height = 800 self.bg_color = (230, 230, 230) self.score_scale = 1.5 # 子弹设置 self.bullet_width = 300 self.bullet_height = 15 self.bullet_color = (60, 60, 60) self.bullets_allowed = 3 # 外星人设置 self.fleet_drop_speed = 10 # 飞船设置 self.ship_limit = 3 # 动态设置 self.speedup_scale_dict = {"Easy" : 1.05, "Medium" : 1.2, "Hard": 1.5} self.score_scale_dict = {"Easy" : 1.5, "Medium" : 2.0, "Hard": 3.0} self.alien_points_dict = {"Easy" : 50, "Medium" : 75, "Hard": 125} self.alien_speed_dict = {"Easy" : 1.0, "Medium" : 1.2, "Hard": 1.5} self.initialize_dynamic_settings() def initialize_dynamic_settings(self, mode="Easy"): self.ship_speed = 1.5 self.bullet_speed = 3.0 self.alien_speed = self.alien_speed_dict[mode] # 1代表右移 -1代表左移 self.fleet_direction = 1 self.alien_points = self.alien_points_dict[mode] self.score_scale = self.score_scale_dict[mode] self.speedup_scale = self.speedup_scale_dict[mode] def increase_speed(self): self.ship_speed *= self.speedup_scale self.bullet_speed *= self.speedup_scale self.alien_speed *= self.speedup_scale self.alien_points = int(self.alien_points * self.score_scale)
class Wrapper: "Wrapper to disable commit in sqla" def __init__(self, obj): self.obj = obj def __getattr__(self, attr): if attr in ["commit", "rollback"]: return lambda *args, **kwargs: None obj = getattr(self.obj, attr) if attr not in ["cursor", "execute"]: return obj if attr == "cursor": return type(self)(obj) return self.wrapper(obj) def wrapper(self, obj): "Implement if you need to make your customized wrapper" return obj def __call__(self, *args, **kwargs): self.obj = self.obj(*args, **kwargs) return self
load("@bazel_skylib//lib:paths.bzl", "paths") load("@bazel_skylib//lib:shell.bzl", "shell") load("@fbcode_macros//build_defs/config:read_configs.bzl", "read_choice") load("@fbcode_macros//build_defs/lib:allocators.bzl", "allocators") load("@fbcode_macros//build_defs/lib:build_info.bzl", "build_info") load("@fbcode_macros//build_defs/lib:cpp_common.bzl", "cpp_common") load("@fbcode_macros//build_defs/lib:label_utils.bzl", "label_utils") load("@fbcode_macros//build_defs/lib:python_typing.bzl", "gen_typing_config", "get_typing_config_target") load("@fbcode_macros//build_defs/lib:python_versioning.bzl", "python_versioning") load("@fbcode_macros//build_defs/lib:src_and_dep_helpers.bzl", "src_and_dep_helpers") load("@fbcode_macros//build_defs/lib:string_macros.bzl", "string_macros") load("@fbcode_macros//build_defs/lib:target_utils.bzl", "target_utils") load("@fbcode_macros//build_defs/lib:third_party.bzl", "third_party") load("@fbcode_macros//build_defs:compiler.bzl", "compiler") load("@fbcode_macros//build_defs:config.bzl", "config") load("@fbcode_macros//build_defs:coverage.bzl", "coverage") load("@fbcode_macros//build_defs:platform_utils.bzl", "platform_utils") load("@fbcode_macros//build_defs:sanitizers.bzl", "sanitizers") load("@fbsource//tools/build_defs:buckconfig.bzl", "read_bool") load("@fbsource//tools/build_defs:fb_native_wrapper.bzl", "fb_native") load("@fbsource//tools/build_defs:type_defs.bzl", "is_dict", "is_list") _INTERPRETERS = [ # name suffix, main module, dependencies ("interp", "libfb.py.python_interp", "//libfb/py:python_interp"), ("ipython", "libfb.py.ipython_interp", "//libfb/py:ipython_interp"), ("vs_debugger", "libfb.py.vs_debugger", "//libfb/py:vs_debugger"), ] _MANIFEST_TEMPLATE = """\ import sys class Manifest(object): def __init__(self): self._modules = None self.__file__ = __file__ self.__name__ = __name__ @property def modules(self): if self._modules is None: import os, sys modules = set() for root, dirs, files in os.walk(sys.path[0]): rel_root = os.path.relpath(root, sys.path[0]) if rel_root == '.': package_prefix = '' else: package_prefix = rel_root.replace(os.sep, '.') + '.' for name in files: base, ext = os.path.splitext(name) # Note that this loop includes all *.so files, regardless # of whether they are actually python modules or just # regular dynamic libraries if ext in ('.py', '.pyc', '.pyo', '.so'): if rel_root == "." and base == "__manifest__": # The manifest generation logic for normal pars # does not include the __manifest__ module itself continue modules.add(package_prefix + base) # Skip __pycache__ directories try: dirs.remove("__pycache__") except ValueError: pass self._modules = sorted(modules) return self._modules fbmake = {{ {fbmake} }} sys.modules[__name__] = Manifest() """ def _get_version_universe(python_version): """ Get the version universe for a specific python version Args: python_version: A `PythonVersion` that the universe should be fetched for Returns: The first third-party version universe string that corresponds to the python version """ return third_party.get_version_universe([("python", python_version.version_string)]) def _interpreter_binaries( name, buck_cxx_platform, python_version, python_platform, deps, platform_deps, preload_deps, visibility): """ Generate rules to build intepreter helpers. Args: name: The base name for the interpreter rules buck_cxx_platform: The buck-formatted cxx_platform to use for the interpreter binary python_version: A `PythonVersion` struct for the version of python to use python_platform: The python platform to pass to buck deps: The deps to pass to the binary in addition to interpeter deps platform_deps: The platform deps to pass to buck preload_deps: The preload deps to pass to buck visibility: The visibilty of the rule Returns: The list of names of all generated rules """ rule_names = [] for interp, interp_main_module, interp_dep in _INTERPRETERS: rule_name = name + "-" + interp fb_native.python_binary( name = rule_name, visibility = visibility, main_module = interp_main_module, cxx_platform = buck_cxx_platform, platform = python_platform, version_universe = _get_version_universe(python_version), deps = [interp_dep] + deps, platform_deps = platform_deps, preload_deps = preload_deps, package_style = "inplace", ) rule_names.append(rule_name) return rule_names def _get_interpreter_for_platform(python_platform): """ Get the interpreter to use for a buck-native python platform """ return native.read_config("python#" + python_platform, "interpreter") def _get_build_info( base_path, name, fbconfig_rule_type, main_module, fbcode_platform, python_platform): """ Return the build info attributes to install for python rules. Args: base_path: The package for the current build file name: The name of the rule being built fbconfig_rule_type: The name of the main rule being built; used for build_info main_module: The python main module of the binary/test fbcode_platform: The fbcode platform used for the binary/test python_platform: The buck-compatible python_platform that is being used Returns: A dictionary of key/value strings to put into a build manifest """ interpreter = _get_interpreter_for_platform(python_platform) # Iteration order is deterministic for dictionaries in buck/skylark py_build_info = { "build_tool": "buck", "main_module": main_module, "par_style": "live", "python_command": interpreter, "python_home": paths.dirname(paths.dirname(interpreter)), } # Include the standard build info, converting the keys to the names we # use for python. key_mappings = { "package_name": "package", "package_version": "version", "rule": "build_rule", "rule_type": "build_rule_type", } info = build_info.get_build_info( base_path, name, fbconfig_rule_type, fbcode_platform, ) for key in build_info.BUILD_INFO_KEYS: py_build_info[key_mappings.get(key, key)] = getattr(info, key) return py_build_info def _manifest_library( base_path, name, fbconfig_rule_type, main_module, fbcode_platform, python_platform, visibility): """ Build the rules that create the `__manifest__` module. Args: base_path: The package of this rule name: The name of the primary rule that was generated fbconfig_rule_type: The name of the main rule being built; used for build_info main_module: The main module of the python binary/test fbcode_platform: The fbcode platform to use in build info python_platform: The buck-compatible python platform to use visibility: The visiblity for the main python_library Returns: The name of a library that contains a __mainfest__.py with build information in it. """ build_info = _get_build_info( base_path, name, fbconfig_rule_type, main_module, fbcode_platform, python_platform, ) fbmake = "\n ".join([ "{}: {},".format(repr(k), repr(v)) for k, v in build_info.items() ]) manifest = _MANIFEST_TEMPLATE.format(fbmake = fbmake) manifest_name = name + "-manifest" manifest_lib_name = name + "-manifest-lib" fb_native.genrule( name = manifest_name, labels = ["generated"], visibility = None, out = name + "-__manifest__.py", cmd = "echo -n {} > $OUT".format(shell.quote(manifest)), ) fb_native.python_library( name = manifest_lib_name, labels = ["generated"], visibility = visibility, base_module = "", srcs = {"__manifest__.py": ":" + manifest_name}, ) return manifest_lib_name def _file_to_python_module(src, base_module): """Python implementation of Buck's toModuleName(). Original in com.facebook.buck.python.PythonUtil.toModuleName. """ src = paths.join(base_module, src) src, _ext = paths.split_extension(src) return src.replace("/", ".") # sic, not os.sep def _test_modules_library( base_path, library_name, library_srcs, library_base_module, visibility, generate_test_modules): """" Create the rule that generates a __test_modules__.py file for a library Args: base_path: The package for the current build file library_name: The name of the original library that was built library_srcs: The list of srcs (files or labels) that were given to the original library that this test_modules_library is for library_base_module: The base_module of the original library visibility: The visibility for this rule generate_test_modules: Whether to actually materialize the rule. If False, just return the name of the rule Returns: The name of the generated python library that contains __test_modules__.py """ testmodules_library_name = library_name + "-testmodules-lib" # If we don't actually want to generate the library (generate_test_modules), # at least return the name if not generate_test_modules: return testmodules_library_name lines = ["TEST_MODULES = ["] for src in sorted(library_srcs): lines.append( ' "{}",'.format( _file_to_python_module(src, library_base_module or base_path), ), ) lines.append("]") genrule_name = library_name + "-testmodules" fb_native.genrule( name = genrule_name, visibility = None, out = library_name + "-__test_modules__.py", cmd = " && ".join([ "echo {} >> $OUT".format(shell.quote(line)) for line in lines ]), ) fb_native.python_library( name = testmodules_library_name, visibility = visibility, base_module = "", deps = ["//python:fbtestmain", ":" + library_name], srcs = {"__test_modules__.py": ":" + genrule_name}, ) return testmodules_library_name def _typecheck_test( name, main_module, buck_cxx_platform, python_platform, python_version, deps, platform_deps, preload_deps, typing_options, visibility, emails, library_target, library_versioned_srcs, library_srcs, library_resources, library_base_module): """ Create a test and associated libraries for running typechecking Args: name: The name of the original binary/test to run typechecks on main_module: The main module of hte binary/test buck_cxx_platform: The buck-formatted cxx_platform to use for the interpreter binary python_version: A `PythonVersion` struct for the version of python to use python_platform: The python platform to pass to buck deps: The deps to pass to the binary in addition to interpeter deps platform_deps: The platform deps to pass to buck preload_deps: The preload deps to pass to buck typing_options: A comma delimited list of strings that configure typing for this binary/library visibility: The visibilty of the rule library_target: The fully qualified target for the original library used in the binary/test. This is used to determine whether the following library_* properties are used in the final test rule library_versioned_srcs: The versioned_srcs property from the library used to create the original binary/test. This should be the final value passed to buck: No intermediate representations library_srcs: The srcs property from the library used to create the original binary/test. This should be the final value passed to buck: No intermediate representations library_resources: The resources property from the library used to create the original binary/test. This should be the final value passed to buck: No intermediate representations library_base_module: The base_module property from the library used to create the original binary/test. This should be the final value passed to buck: No intermediate representations Returns: The name of the test library that was created """ typing_config = get_typing_config_target() typecheck_deps = deps[:] if ":python_typecheck-library" not in typecheck_deps: # Buck doesn't like duplicate dependencies. typecheck_deps.append("//libfb/py:python_typecheck-library") if not typing_config: typecheck_deps.append("//python/typeshed_internal:global_mypy_ini") env = {} # If the passed library is not a dependency, add its sources here. # This enables python_unittest targets to be type-checked, too. add_library_attrs = library_target not in typecheck_deps if not add_library_attrs: library_versioned_srcs = None library_srcs = None library_resources = None library_base_module = None if main_module not in ("__fb_test_main__", "libfb.py.testslide.unittest"): # Tests are properly enumerated from passed sources (see above). # For binary targets, we need this subtle hack to let # python_typecheck know where to start type checking the program. env["PYTHON_TYPECHECK_ENTRY_POINT"] = main_module typing_options_list = [ option.strip() for option in typing_options.split(",") ] if typing_options else [] use_pyre = typing_options and "pyre" in typing_options_list if use_pyre: typing_options_list.remove("pyre") typing_options = ",".join(typing_options_list) env["PYRE_ENABLED"] = "1" if typing_config: cmd = "$(exe {}) gather ".format(typing_config) if use_pyre: genrule_name = name + "-typing=pyre.json" genrule_out = "pyre.json" cmd += "--pyre=True " else: genrule_name = name + "-typing=mypy.ini" genrule_out = "mypy.ini" if typing_options: cmd += '--options="{}" '.format(typing_options) cmd += "$(location {}-typing) $OUT".format(library_target) fb_native.genrule( name = genrule_name, out = genrule_out, cmd = cmd, visibility = visibility, ) if use_pyre: typing_library_name = name + "-pyre_json" else: typing_library_name = name + "-mypy_ini" fb_native.python_library( name = typing_library_name, visibility = visibility, base_module = "", srcs = [":" + genrule_name], ) typecheck_deps.append(":" + typing_library_name) typecheck_rule_name = name + "-typecheck" fb_native.python_test( name = typecheck_rule_name, main_module = "python_typecheck", cxx_platform = buck_cxx_platform, platform = python_platform, deps = typecheck_deps, platform_deps = platform_deps, preload_deps = preload_deps, package_style = "inplace", # TODO(ambv): labels here shouldn't be hard-coded. labels = ["buck", "python"], version_universe = _get_version_universe(python_version), contacts = emails, visibility = visibility, env = env, versioned_srcs = library_versioned_srcs, srcs = library_srcs, resources = library_resources, base_module = library_base_module, ) return typecheck_rule_name def _monkeytype_binary( rule_type, attributes, library_name): """ Create a python binary/test that enables monkeytype but otherwise looks like another binary/test Args: rule_type: The type of rule to create (python_binary or python_test) attributes: The attributes of the original binary/test that we are enabling monkeytype for. These should be final values passed to buck, not intermediaries, as they are copied directly into a library_name: The name of the implicit library created for the binary/test """ name = attributes["name"] visibility = attributes.get("visibility") lib_main_module_attrs_name = None if "main_module" in attributes: # we need to preserve the original main_module, so we inject a # library with a module for it that the main wrapper picks up main_module_name = name + "-monkeytype_main_module" script = ( "#!/usr/bin/env python3\n\n" + "def monkeytype_main_module() -> str:\n" + " return '{}'\n".format(attributes["main_module"]) ) fb_native.genrule( name = main_module_name, visibility = visibility, out = name + "-__monkeytype_main_module__.py", cmd = "echo {} > $OUT".format(shell.quote(script)), ) lib_main_module_attrs_name = name + "-monkeytype_main_module-lib" fb_native.python_library( name = lib_main_module_attrs_name, visibility = visibility, base_module = "", deps = ["//python:fbtestmain", ":" + name], srcs = { "__monkeytype_main_module__.py": ":" + main_module_name, }, ) # Create a variant of the target that is running with monkeytype if rule_type == "python_binary": wrapper_rule_constructor = fb_native.python_binary elif rule_type == "python_test": wrapper_rule_constructor = fb_native.python_test else: fail("Invalid rule type specified: " + rule_type) wrapper_attrs = dict(attributes) wrapper_attrs["name"] = name + "-monkeytype" wrapper_attrs["visibility"] = visibility if "deps" in wrapper_attrs: wrapper_deps = list(wrapper_attrs["deps"]) else: wrapper_deps = [] library_target = ":" + library_name if library_target not in wrapper_deps: wrapper_deps.append(library_target) stub_gen_deps = list(wrapper_deps) if "//python/monkeytype:main_wrapper" not in wrapper_deps: wrapper_deps.append("//python/monkeytype/tools:main_wrapper") if lib_main_module_attrs_name != None: wrapper_deps.append(":" + lib_main_module_attrs_name) wrapper_attrs["deps"] = wrapper_deps wrapper_attrs["base_module"] = "" wrapper_attrs["main_module"] = "python.monkeytype.tools.main_wrapper" wrapper_rule_constructor(**wrapper_attrs) if "//python/monkeytype/tools:stubs_lib" not in wrapper_deps: stub_gen_deps.append("//python/monkeytype/tools:stubs_lib") # And create a target that can be used for stub creation fb_native.python_binary( name = name + "-monkeytype-gen-stubs", visibility = visibility, main_module = "python.monkeytype.tools.get_stub", cxx_platform = attributes["cxx_platform"], platform = attributes["platform"], deps = stub_gen_deps, platform_deps = attributes["platform_deps"], preload_deps = attributes["preload_deps"], package_style = "inplace", version_universe = attributes["version_universe"], ) def _analyze_import_binary( name, buck_cxx_platform, python_platform, python_version, deps, platform_deps, preload_deps, visibility): """ Generate a binary to analyze the imports of a given python library """ generate_imports_deps = list(deps) if ":generate_par_imports" not in generate_imports_deps: generate_imports_deps.append("//libfb/py:generate_par_imports") if ":parutil" not in generate_imports_deps: generate_imports_deps.append("//libfb/py:parutil") version_universe = _get_version_universe(python_version) generate_par_name = name + "-generate-imports" fb_native.python_binary( name = generate_par_name, main_module = "libfb.py.generate_par_imports", cxx_platform = buck_cxx_platform, platform = python_platform, deps = generate_imports_deps, platform_deps = platform_deps, preload_deps = preload_deps, # TODO(ambv): labels here shouldn't be hard-coded. labels = ["buck", "python"], version_universe = version_universe, visibility = visibility, ) genrule_name = name + "-gen-rule" fb_native.genrule( name = genrule_name, srcs = [":" + generate_par_name], out = "{}-imports_file.py".format(name), cmd = '$(exe :{}) >"$OUT"'.format(generate_par_name), ) lib_name = name + "-analyze-lib" fb_native.python_library( name = lib_name, srcs = {"imports_file.py": ":" + genrule_name}, base_module = "", deps = [":" + genrule_name], ) analyze_deps = list(deps) analyze_deps.append(":" + lib_name) if ":analyze_par_imports" not in analyze_deps: analyze_deps.append("//libfb/py:analyze_par_imports") fb_native.python_binary( name = name + "-analyze-imports", main_module = "libfb.py.analyze_par_imports", cxx_platform = buck_cxx_platform, platform = python_platform, deps = analyze_deps, platform_deps = platform_deps, preload_deps = preload_deps, # TODO(ambv): labels here shouldn't be hard-coded. labels = ["buck", "python"], version_universe = version_universe, visibility = visibility, ) _GEN_SRCS_LINK = "https://fburl.com/203312823" def _parse_srcs(base_path, param, srcs): # type: (str, str, Union[List[str], Dict[str, str]]) -> Dict[str, Union[str, RuleTarget]] """ Converts `srcs` to a `srcs` dictionary for use in python_* rule Fails if a RuleTarget object is passed in, but a source file name cannot be determined Args: base_path: The package for the rule param: The name of the parameter being parsed. Used in error messages srcs: Either a dictionary of file/target -> destination in the library, or a list of source files or RuleTarget objects that the source named can be divined from. Returns: A mapping of destination filename -> file str / RuleTarget """ # Parse sources in dict form. if is_dict(srcs) or hasattr(srcs, "items"): out_srcs = ( src_and_dep_helpers.parse_source_map( base_path, {v: k for k, v in srcs.items()}, ) ) # Parse sources in list form. else: out_srcs = {} # Format sources into a dict of logical name of value. for src in src_and_dep_helpers.parse_source_list(base_path, srcs): # Path names are the same as path values. if not target_utils.is_rule_target(src): out_srcs[src] = src continue # If the source comes from a `custom_rule`/`genrule`, and the # user used the `=` notation which encodes the source's "name", # we can extract and use that. if "=" in src.name: name = src.name.rsplit("=", 1)[1] out_srcs[name] = src continue # Otherwise, we don't have a good way of deducing the name. # This actually looks to be pretty rare, so just throw a useful # error prompting the user to use the `=` notation above, or # switch to an explicit `dict`. fail( 'parameter `{}`: cannot infer a "name" to use for ' + "`{}`. If this is an output from a `custom_rule`, " + "consider using the `<rule-name>=<out>` notation instead. " + "Otherwise, please specify this parameter as `dict` " + 'mapping sources to explicit "names" (see {} for details).' .format(param, target_utils.target_to_label(src), _GEN_SRCS_LINK), ) return out_srcs def _parse_gen_srcs(base_path, srcs): # type: (str, Union[List[str], Dict[str, str]]) -> Dict[str, Union[str, RuleTarget]] """ Parse the given sources as input to the `gen_srcs` parameter. """ out_srcs = _parse_srcs(base_path, "gen_srcs", srcs) # Do a final pass to verify that all sources in `gen_srcs` are rule # references. for src in out_srcs.values(): if not target_utils.is_rule_target(src): fail( "parameter `gen_srcs`: `{}` must be a reference to rule " + "that generates a source (e.g. `//foo:bar`, `:bar`) " + " (see {} for details)." .format(src, GEN_SRCS_LINK), ) return out_srcs def _get_par_build_args( base_path, name, rule_type, platform, argcomplete = None, strict_tabs = None, compile = None, par_style = None, strip_libpar = None, needed_coverage = None, python = None): """ Return the arguments we need to pass to the PAR builder wrapper. """ build_args = [] build_mode = config.get_build_mode() if config.get_use_custom_par_args(): # Arguments that we wanted directly threaded into `make_par`. passthrough_args = [] if argcomplete == True: passthrough_args.append("--argcomplete") if strict_tabs == False: passthrough_args.append("--no-strict-tabs") if compile == False: passthrough_args.append("--no-compile") passthrough_args.append("--store-source") elif compile == "with-source": passthrough_args.append("--store-source") elif compile != True and compile != None: fail( ( "Invalid value {} for `compile`, must be True, False, " + '"with-source", or None (default)' ).format(compile), ) if par_style != None: passthrough_args.append("--par-style=" + par_style) if needed_coverage != None or coverage.get_coverage(): passthrough_args.append("--store-source") if build_mode.startswith("opt"): passthrough_args.append("--optimize") # Add arguments to populate build info. mode = build_info.get_build_info_mode(base_path, name) if mode == "none": fail("Invalid build info mode specified") info = ( build_info.get_explicit_build_info( base_path, name, mode, rule_type, platform, compiler.get_compiler_for_current_buildfile(), ) ) passthrough_args.append( "--build-info-build-mode=" + info.build_mode, ) passthrough_args.append("--build-info-build-tool=buck") if info.package_name != None: passthrough_args.append( "--build-info-package-name=" + info.package_name, ) if info.package_release != None: passthrough_args.append( "--build-info-package-release=" + info.package_release, ) if info.package_version != None: passthrough_args.append( "--build-info-package-version=" + info.package_version, ) passthrough_args.append("--build-info-platform=" + info.platform) passthrough_args.append("--build-info-rule-name=" + info.rule) passthrough_args.append("--build-info-rule-type=" + info.rule_type) build_args.extend(["--passthrough=" + a for a in passthrough_args]) # Arguments for stripping libomnibus. dbg builds should never strip. if not build_mode.startswith("dbg"): if strip_libpar == True: build_args.append("--omnibus-debug-info=strip") elif strip_libpar == "extract": build_args.append("--omnibus-debug-info=extract") else: build_args.append("--omnibus-debug-info=separate") # Set an explicit python interpreter. if python != None: build_args.append("--python-override=" + python) return build_args def _associated_targets_library(base_path, name, deps, visibility): """ Associated Targets are buck rules that need to be built, when This target is built, but are not a code dependency. Which is why we wrap them in a cxx_library so they could never be a code dependency TODO: Python just needs the concept of runtime deps if it doesn't have it. Also, what is the actual use case for this? """ rule_name = name + "-build_also" buck_platform = platform_utils.get_buck_platform_for_base_path(base_path) fb_native.cxx_library( name = rule_name, visibility = visibility, deps = deps, default_platform = buck_platform, defaults = {"platform": buck_platform}, ) return rule_name def _jemalloc_malloc_conf_library(base_path, name, malloc_conf, deps, visibility): """ Build a rule which wraps the JEMalloc allocator and links default configuration via the `jemalloc_conf` variable. """ buck_platform = platform_utils.get_buck_platform_for_base_path(base_path) jemalloc_config_line = ",".join([ "{}:{}".format(k, v) for k, v in sorted(malloc_conf.items()) ]) src_rule_name = "__{}_jemalloc_conf_src__".format(name) fb_native.genrule( name = src_rule_name, visibility = visibility, out = "jemalloc_conf.c", cmd = 'echo \'const char* malloc_conf = "{}";\' > "$OUT"'.format(jemalloc_config_line), ) deps, platform_deps = src_and_dep_helpers.format_all_deps(deps) lib_rule_name = "__{}_jemalloc_conf_lib__".format(name) fb_native.cxx_library( name = lib_rule_name, visibility = visibility, srcs = [":" + src_rule_name], default_platform = buck_platform, defaults = {"platform": buck_platform}, deps = deps, platform_deps = platform_deps, ) return target_utils.RootRuleTarget(base_path, lib_rule_name) def _convert_needed_coverage_spec(base_path, spec): """ Converts `needed_coverage` from fbcode's spec into the buck native spec Args: base_path: The base path for this rule; used to get fully qualified targets spec: A tuple of (<needed percentage as int>, <target as a string>) Returns: A buck-compatible spec. This is a tuple of two elements if no source name is detected in the target name (with an =) or three elements if it is detected in the form of (<percentage as int>, <full target as string>, <file as string>?) """ if len(spec) != 2: fail(( "parameter `needed_coverage`: `{}` must have exactly 2 " + "elements, a ratio and a target." ).format(spec)) ratio, target = spec if "=" not in target: return ( ratio, src_and_dep_helpers.convert_build_target(base_path, target), ) target, path = target.rsplit("=", 1) return (ratio, src_and_dep_helpers.convert_build_target(base_path, target), path) def _should_generate_interp_rules(helper_deps): """ Return whether we should generate the interp helpers. This is controlled by both the mode, the property, and buckconfig settings Args: helper_deps: The value of the `helper_deps` attribute on the users rule. Should be True or False """ # We can only work in @mode/dev if not config.get_build_mode().startswith("dev"): return False # Our current implementation of the interp helpers is costly when using # omnibus linking, only generate these if explicitly set via config or TARGETS config_setting = read_bool("python", "helpers", required = False) if config_setting == None: # No CLI option is set, respect the TARGETS file option. return helper_deps return config_setting def _preload_deps(base_path, name, allocator, jemalloc_conf = None, visibility = None): """ Add C/C++ deps which need to preloaded by Python binaries. Returns: A list of additional dependencies (as strings) which should be added to the python binary """ deps = [] sanitizer = sanitizers.get_sanitizer() # If we're using sanitizers, add the dep on the sanitizer-specific # support library. if sanitizer != None: sanitizer = sanitizers.get_short_name(sanitizer) deps.append( target_utils.RootRuleTarget( "tools/build/sanitizers", "{}-py".format(sanitizer), ), ) # Generate sanitizer configuration even if sanitizers are not used deps.append( cpp_common.create_sanitizer_configuration( base_path, name, enable_lsan = False, ), ) # If we're using an allocator, and not a sanitizer, add the allocator- # specific deps. if allocator != None and sanitizer == None: allocator_deps = allocators.get_allocator_deps(allocator) if allocator.startswith("jemalloc") and jemalloc_conf != None: conf_dep = _jemalloc_malloc_conf_library( base_path, name, jemalloc_conf, allocator_deps, visibility, ) allocator_deps = [conf_dep] deps.extend(allocator_deps) return deps def _get_ldflags(base_path, name, fbconfig_rule_type, strip_libpar = True): """ Return ldflags to use when linking omnibus libraries in python binaries. """ # We override stripping for python binaries unless we're in debug mode # (which doesn't get stripped by default). If either `strip_libpar` # is set or any level of stripping is enabled via config, we do full # stripping. strip_mode = cpp_common.get_strip_mode(base_path, name) if (not config.get_build_mode().startswith("dbg") and (strip_mode != "none" or strip_libpar == True)): strip_mode = "full" return cpp_common.get_ldflags( base_path, name, fbconfig_rule_type, strip_mode = strip_mode, ) def _get_package_style(): """ Get the package_style to use for binary rules from the configuration See https://buckbuild.com/rule/python_binary.html#package_style """ return read_choice( "python", "package_style", ("inplace", "standalone"), "standalone", ) def _implicit_python_library( name, is_test_companion, base_module = None, srcs = (), versioned_srcs = (), gen_srcs = (), deps = (), tests = (), tags = (), external_deps = (), visibility = None, resources = (), cpp_deps = (), py_flavor = "", version_subdirs = None): # Not used for now, will be used in a subsequent diff """ Creates a python_library and all supporting libraries This library may or may not be consumed as a companion library to a python_binary, or a python_test. The attributes returned vary based on how it will be used. Args: name: The name of this library is_test_companion: Whether this library is being created and consumed directly by a test rule base_module: The basemodule for the library (https://buckbuild.com/rule/python_library.html#base_module) srcs: A sequence of sources/targets to use as srcs. Note that only files ending in .py are considered sources. All other srcs are added as resources. Note if this is a dictionary, the key and value are swapped from the official buck implementation. That is,this rule expects {<src>: <destination in the library>} versioned_srcs: If provided, a list of tuples of (<python version constraint string>, <srcs as above>) These sources are then added to the versioned_srcs attribute in the library gen_srcs: DEPRECATED A list of srcs that come from `custom_rule`s to be merged into the final srcs list. deps: A sequence of dependencies for the library. These should only be python libraries, as python's typing support assumes that dependencies also have a companion -typing rule tests: The targets that test this library tags: Arbitrary metadata to attach to this library. See https://buckbuild.com/rule/python_library.html#labels external_deps: A sequence of tuples of external dependencies visibility: The visibility of the library resources: A sequence of sources/targets that should be explicitly added as resoruces. Note that if a dictionary is used, the key and value are swapped from the official buck implementation. That is, this rule expects {<src>: <destination in the library>} cpp_deps: A sequence of C++ library depenencies that will be loaded at runtime py_flavor: The flavor of python to use. By default ("") this is cpython version_subdirs: A sequence of tuples of (<buck version constring>, <version subdir>). This points to the subdirectory (or "") that each version constraint uses. This helps us rewrite things like versioned_srcs for third-party2 targets. Returns: The kwargs to pass to a native.python_library rule """ base_path = native.package_name() attributes = {} attributes["name"] = name # Normalize all the sources from the various parameters. parsed_srcs = {} # type: Dict[str, Union[str, RuleTarget]] parsed_srcs.update(_parse_srcs(base_path, "srcs", srcs)) parsed_srcs.update(_parse_gen_srcs(base_path, gen_srcs)) # Parse the version constraints and normalize all source paths in # `versioned_srcs`: parsed_versioned_srcs = [ ( python_versioning.python_version_constraint(pvc), _parse_srcs(base_path, "versioned_srcs", vs), ) for pvc, vs in versioned_srcs ] # Contains a mapping of platform name to sources to use for that # platform. all_versioned_srcs = [] # If we're TP project, install all sources via the `versioned_srcs` # parameter. `py_flavor` is ignored since flavored Pythons are only # intended for use by internal projects. if third_party.is_tp2(base_path): if version_subdirs == None: fail("`version_subdirs` must be specified on third-party projects") # TP2 projects have multiple "pre-built" source dirs, so we install # them via the `versioned_srcs` parameter along with the versions # of deps that was used to build them, so that Buck can select the # correct one based on version resolution. for constraints, subdir in version_subdirs: build_srcs = [parsed_srcs] if parsed_versioned_srcs: py_vers = None for target, constraint_version in constraints.items(): if target.endswith("/python:__project__"): py_vers = python_versioning.python_version(constraint_version) # 'is None' can become == None when the custom version classes # go away if py_vers == None: fail("Could not get python version for versioned_srcs") build_srcs.extend([ dict(vs) for vc, vs in parsed_versioned_srcs if python_versioning.constraint_matches(vc, py_vers, check_minor = True) ]) vsrc = {} for build_src in build_srcs: for name, src in build_src.items(): if target_utils.is_rule_target(src): vsrc[name] = src else: vsrc[name] = paths.join(subdir, src) all_versioned_srcs.append((constraints, vsrc)) # Reset `srcs`, since we're using `versioned_srcs`. parsed_srcs = {} # If we're an fbcode project, and `py_flavor` is not specified, then # keep the regular sources parameter and only use the `versioned_srcs` # parameter for the input parameter of the same name; if `py_flavor` is # specified, then we have to install all sources via `versioned_srcs` else: pytarget = third_party.get_tp2_project_target("python") platforms = platform_utils.get_platforms_for_host_architecture() # Iterate over all potential Python versions and collect srcs for # each version: for pyversion in python_versioning.get_all_versions(): if not python_versioning.version_supports_flavor(pyversion, py_flavor): continue ver_srcs = {} if py_flavor: ver_srcs.update(parsed_srcs) for constraint, pvsrcs in parsed_versioned_srcs: constraint = python_versioning.normalize_constraint(constraint) if python_versioning.constraint_matches(constraint, pyversion): ver_srcs.update(pvsrcs) if ver_srcs: all_versioned_srcs.append( ( { target_utils.target_to_label(pytarget, fbcode_platform = p): pyversion.version_string for p in platforms if python_versioning.platform_has_version(p, pyversion) }, ver_srcs, ), ) if py_flavor: parsed_srcs = {} attributes["base_module"] = base_module if parsed_srcs: # Need to split the srcs into srcs & resources as Buck # expects all test srcs to be python modules. if is_test_companion: formatted_srcs = src_and_dep_helpers.format_source_map({ k: v for k, v in parsed_srcs.items() if k.endswith(".py") }) formatted_resources = src_and_dep_helpers.format_source_map({ k: v for k, v in parsed_srcs.items() if not k.endswith(".py") }) attributes["resources"] = formatted_resources.value attributes["platform_resources"] = formatted_resources.platform_value else: formatted_srcs = src_and_dep_helpers.format_source_map(parsed_srcs) attributes["srcs"] = formatted_srcs.value attributes["platform_srcs"] = formatted_srcs.platform_value # Emit platform-specific sources. We split them between the # `platform_srcs` and `platform_resources` parameter based on their # extension, so that directories with only resources don't end up # creating stray `__init__.py` files for in-place binaries. out_versioned_srcs = [] out_versioned_resources = [] for vcollection, ver_srcs in all_versioned_srcs: out_srcs = {} out_resources = {} non_platform_ver_srcs = src_and_dep_helpers.without_platforms( src_and_dep_helpers.format_source_map(ver_srcs), ) for dst, src in non_platform_ver_srcs.items(): if dst.endswith(".py") or dst.endswith(".so"): out_srcs[dst] = src else: out_resources[dst] = src out_versioned_srcs.append((vcollection, out_srcs)) out_versioned_resources.append((vcollection, out_resources)) if out_versioned_srcs: attributes["versioned_srcs"] = \ python_versioning.add_flavored_versions(out_versioned_srcs) if out_versioned_resources: attributes["versioned_resources"] = \ python_versioning.add_flavored_versions(out_versioned_resources) dependencies = [] if third_party.is_tp2(base_path): dependencies.append( target_utils.target_to_label( third_party.get_tp2_project_target( third_party.get_tp2_project_name(base_path), ), fbcode_platform = third_party.get_tp2_platform(base_path), ), ) for target in deps: dependencies.append( src_and_dep_helpers.convert_build_target(base_path, target), ) if cpp_deps: dependencies.extend(cpp_deps) if dependencies: attributes["deps"] = dependencies attributes["tests"] = tests if visibility != None: attributes["visibility"] = visibility if external_deps: attributes["platform_deps"] = ( src_and_dep_helpers.format_platform_deps( [ src_and_dep_helpers.normalize_external_dep( dep, lang_suffix = "-py", parse_version = True, ) for dep in external_deps ], # We support the auxiliary versions hack for neteng/Django. deprecated_auxiliary_deps = True, ) ) attributes["labels"] = tags # The above code does a magical dance to split `gen_srcs`, `srcs`, # and `versioned_srcs` into pure-Python `srcs` and "everything else" # `resources`. In practice, it drops `__init__.py` into non-Python # data included with Python libraries, whereas `resources` does not. attributes.setdefault("resources", {}).update({ # For resources of the form {":target": "dest/path"}, we have to # format the parsed `RuleTarget` struct as a string before # passing it to Buck. k: src_and_dep_helpers.format_source(v) for k, v in _parse_srcs( base_path, "resources", resources, ).items() }) return attributes def _convert_library( is_test, is_library, base_path, name, base_module, check_types, cpp_deps, deps, external_deps, gen_srcs, py_flavor, resources, runtime_deps, srcs, tags, tests, typing, typing_options, version_subdirs, versioned_srcs, visibility): """ Gathers the attributes implicit python_library and creates associated rules This is suitable for usage by either python_binary, python_unittest or python_library. See `implicit_python_library` for more details Returns: Attributes for a native.python_library, """ # for binary we need a separate library if is_library: library_name = name else: library_name = name + "-library" if is_library and check_types: fail( "parameter `check_types` is not supported for libraries, did you " + "mean to specify `typing`?", ) if get_typing_config_target(): gen_typing_config( library_name, base_module if base_module != None else base_path, srcs, [src_and_dep_helpers.convert_build_target(base_path, dep) for dep in deps], typing or check_types, typing_options, visibility, ) if runtime_deps: associated_targets_name = _associated_targets_library( base_path, library_name, runtime_deps, visibility, ) deps = list(deps) + [":" + associated_targets_name] extra_tags = [] if not is_library: extra_tags.append("generated") if is_test: extra_tags.append("unittest-library") return _implicit_python_library( library_name, is_test_companion = is_test, base_module = base_module, srcs = srcs, versioned_srcs = versioned_srcs, gen_srcs = gen_srcs, deps = deps, tests = tests, tags = list(tags) + extra_tags, external_deps = external_deps, visibility = visibility, resources = resources, cpp_deps = cpp_deps, py_flavor = py_flavor, version_subdirs = version_subdirs, ) def _single_binary_or_unittest( base_path, name, implicit_library_target, implicit_library_attributes, fbconfig_rule_type, buck_rule_type, is_test, tests, py_version, py_flavor, main_module, strip_libpar, tags, par_style, emails, needed_coverage, argcomplete, strict_tabs, compile, args, env, python, allocator, check_types, preload_deps, jemalloc_conf, # TODO: This does not appear to be used anywhere typing_options, helper_deps, visibility, analyze_imports, additional_coverage_targets, generate_test_modules): if is_test and par_style == None: par_style = "xar" dependencies = [] platform_deps = [] out_preload_deps = [] platform = platform_utils.get_platform_for_base_path(base_path) python_version = python_versioning.get_default_version( platform = platform, constraint = py_version, flavor = py_flavor, ) if python_version == None: fail( ( "Unable to find Python version matching constraint" + "'{}' and flavor '{}' on '{}'." ).format(py_version, py_flavor, platform), ) python_platform = platform_utils.get_buck_python_platform( platform, major_version = python_version.major, flavor = py_flavor, ) if allocator == None: allocator = allocators.normalize_allocator(allocator) attributes = {} attributes["name"] = name if is_test and additional_coverage_targets: attributes["additional_coverage_targets"] = additional_coverage_targets if visibility != None: attributes["visibility"] = visibility # If this is a test, we need to merge the library rule into this # one and inherit its deps. if is_test: for param in ("versioned_srcs", "srcs", "resources", "base_module"): val = implicit_library_attributes.get(param) if val != None: attributes[param] = val dependencies.extend(implicit_library_attributes.get("deps", [])) platform_deps.extend(implicit_library_attributes.get("platform_deps", [])) # Add the "coverage" library as a dependency for all python tests. platform_deps.extend( src_and_dep_helpers.format_platform_deps( [target_utils.ThirdPartyRuleTarget("coverage", "coverage-py")], ), ) # Otherwise, this is a binary, so just the library portion as a dep. else: dependencies.append(":" + implicit_library_attributes["name"]) # Sanitize the main module, so that it's a proper module reference. if main_module != None: main_module = main_module.replace("/", ".") if main_module.endswith(".py"): main_module = main_module[:-3] attributes["main_module"] = main_module elif is_test: main_module = "__fb_test_main__" attributes["main_module"] = main_module # Add in the PAR build args. if _get_package_style() == "standalone": build_args = ( _get_par_build_args( base_path, name, buck_rule_type, platform, argcomplete = argcomplete, strict_tabs = strict_tabs, compile = compile, par_style = par_style, strip_libpar = strip_libpar, needed_coverage = needed_coverage, python = python, ) ) if build_args: attributes["build_args"] = build_args # Add any special preload deps. default_preload_deps = ( _preload_deps(base_path, name, allocator, jemalloc_conf, visibility) ) out_preload_deps.extend(src_and_dep_helpers.format_deps(default_preload_deps)) # Add user-provided preloaded deps. for dep in preload_deps: out_preload_deps.append(src_and_dep_helpers.convert_build_target(base_path, dep)) # Add the C/C++ build info lib to preload deps. cxx_build_info = cpp_common.cxx_build_info_rule( base_path, name, fbconfig_rule_type, platform, static = False, visibility = visibility, ) out_preload_deps.append(target_utils.target_to_label(cxx_build_info)) # Provide a standard set of backport deps to all binaries platform_deps.extend( src_and_dep_helpers.format_platform_deps( [ target_utils.ThirdPartyRuleTarget("typing", "typing-py"), target_utils.ThirdPartyRuleTarget("python-future", "python-future-py"), ], ), ) # Provide a hook for the nuclide debugger in @mode/dev builds, so # that one can have `PYTHONBREAKPOINT=nuclide.set_trace` in their # environment (eg .bashrc) and then simply write `breakpoint()` # to launch a debugger with no fuss if _get_package_style() == "inplace": dependencies.append("//nuclide:debugger-hook") # Add in a specialized manifest when building inplace binaries. # # TODO(#11765906): We shouldn't need to create this manifest rule for # standalone binaries. However, since target determinator runs in dev # mode, we sometimes pass these manifest targets in the explicit target # list into `opt` builds, which then fails with a missing build target # error. So, for now, just always generate the manifest library, but # only use it when building inplace binaries. manifest_name = _manifest_library( base_path, name, fbconfig_rule_type, main_module, platform, python_platform, visibility, ) if _get_package_style() == "inplace": dependencies.append(":" + manifest_name) buck_cxx_platform = platform_utils.get_buck_platform_for_base_path(base_path) attributes["cxx_platform"] = buck_cxx_platform attributes["platform"] = python_platform attributes["version_universe"] = _get_version_universe(python_version) attributes["linker_flags"] = ( _get_ldflags(base_path, name, fbconfig_rule_type, strip_libpar = strip_libpar) ) attributes["labels"] = list(tags) if is_test: attributes["labels"].extend(label_utils.convert_labels(platform, "python")) attributes["tests"] = tests if args: attributes["args"] = ( string_macros.convert_args_with_macros( base_path, args, platform = platform, ) ) if env: attributes["env"] = ( string_macros.convert_env_with_macros( env, platform = platform, ) ) if emails: attributes["contacts"] = emails if out_preload_deps: attributes["preload_deps"] = out_preload_deps if needed_coverage: attributes["needed_coverage"] = [ _convert_needed_coverage_spec(base_path, s) for s in needed_coverage ] # Generate the interpreter helpers, and add them to our deps. Note that # we must do this last, so that the interp rules get the same deps as # the main binary which we've built up to this point. # We also do this based on an attribute so that we don't have to dedupe # rule creation. We'll revisit this in the near future. # TODO: Better way to not generate duplicates if _should_generate_interp_rules(helper_deps): interp_deps = list(dependencies) if is_test: testmodules_library_name = _test_modules_library( base_path, implicit_library_attributes["name"], implicit_library_attributes.get("srcs") or (), implicit_library_attributes.get("base_module"), visibility, generate_test_modules = generate_test_modules, ) interp_deps.append(":" + testmodules_library_name) interp_rules = _interpreter_binaries( name, buck_cxx_platform, python_version, python_platform, interp_deps, platform_deps, out_preload_deps, visibility, ) dependencies.extend([":" + interp_rule for interp_rule in interp_rules]) if check_types: if python_version.major != 3: fail("parameter `check_types` is only supported on Python 3.") typecheck_rule_name = _typecheck_test( name, main_module, buck_cxx_platform, python_platform, python_version, dependencies, platform_deps, out_preload_deps, typing_options, visibility, emails, implicit_library_target, implicit_library_attributes.get("versioned_srcs"), implicit_library_attributes.get("srcs"), implicit_library_attributes.get("resources"), implicit_library_attributes.get("base_module"), ) attributes["tests"] = ( list(attributes["tests"]) + [":" + typecheck_rule_name] ) if analyze_imports: _analyze_import_binary( name, buck_cxx_platform, python_platform, python_version, dependencies, platform_deps, out_preload_deps, visibility, ) if is_test: if not dependencies: dependencies = [] dependencies.append("//python:fbtestmain") if dependencies: attributes["deps"] = dependencies if platform_deps: attributes["platform_deps"] = platform_deps if ( read_bool("fbcode", "monkeytype", False) and python_version.major == 3 ): _monkeytype_binary(buck_rule_type, attributes, implicit_library_attributes["name"]) return attributes def _convert_binary( is_test, fbconfig_rule_type, buck_rule_type, base_path, name, py_version, py_flavor, base_module, main_module, strip_libpar, srcs, versioned_srcs, tags, gen_srcs, deps, tests, par_style, emails, external_deps, needed_coverage, argcomplete, strict_tabs, compile, args, env, python, allocator, check_types, preload_deps, visibility, resources, jemalloc_conf, typing, typing_options, check_types_options, runtime_deps, cpp_deps, helper_deps, analyze_imports, additional_coverage_targets, version_subdirs): """ Generate binary rules and library rules for a python_binary or python_unittest Returns: A list of kwargs for all unittests/binaries that need to be created """ library_attributes = _convert_library( is_test = is_test, is_library = False, base_path = base_path, name = name, base_module = base_module, check_types = check_types, cpp_deps = cpp_deps, deps = deps, external_deps = external_deps, gen_srcs = gen_srcs, py_flavor = py_flavor, resources = resources, runtime_deps = runtime_deps, srcs = srcs, tags = tags, tests = tests, typing = typing, typing_options = typing_options, version_subdirs = version_subdirs, versioned_srcs = versioned_srcs, visibility = visibility, ) # People use -library of unittests fb_native.python_library(**library_attributes) # For binary rules, create a separate library containing the sources. # This will be added as a dep for python binaries and merged in for # python tests. if is_list(py_version) and len(py_version) == 1: py_version = py_version[0] if not is_list(py_version): versions = {py_version: name} else: versions = {} platform = platform_utils.get_platform_for_base_path(base_path) for py_ver in py_version: python_version = python_versioning.get_default_version(platform, py_ver) new_name = name + "-" + python_version.version_string versions[py_ver] = new_name # There are some sub-libraries that get generated based on the # name of the original library, not the binary. Make sure they're only # generated once. is_first_binary = True all_binary_attributes = [] for py_ver, py_name in sorted(versions.items()): # Turn off check types for py2 targets when py3 is in versions # so we can have the py3 parts type check without a separate target if ( check_types and python_versioning.constraint_matches_major(py_ver, version = 2) and any([ python_versioning.constraint_matches_major(v, version = 3) for v in versions ]) ): _check_types = False print( base_path + ":" + py_name, "will not be typechecked because it is the python 2 part", ) else: _check_types = check_types binary_attributes = _single_binary_or_unittest( base_path, py_name, implicit_library_target = ":" + library_attributes["name"], implicit_library_attributes = library_attributes, fbconfig_rule_type = fbconfig_rule_type, buck_rule_type = buck_rule_type, is_test = is_test, tests = tests, py_version = py_ver, py_flavor = py_flavor, main_module = main_module, strip_libpar = strip_libpar, tags = tags, par_style = par_style, emails = emails, needed_coverage = needed_coverage, argcomplete = argcomplete, strict_tabs = strict_tabs, compile = compile, args = args, env = env, python = python, allocator = allocator, check_types = _check_types, preload_deps = preload_deps, jemalloc_conf = jemalloc_conf, typing_options = check_types_options, helper_deps = helper_deps, visibility = visibility, analyze_imports = analyze_imports, additional_coverage_targets = additional_coverage_targets, generate_test_modules = is_first_binary, ) is_first_binary = False all_binary_attributes.append(binary_attributes) return all_binary_attributes python_common = struct( convert_binary = _convert_binary, convert_library = _convert_library, )
''' Given a list of non negative integers, arrange them such that they form the largest number. Example 1: Input: [10,2] Output: "210" Example 2: Input: [3,30,34,5,9] Output: "9534330" Note: The result may be very large, so you need to return a string instead of an integer. ''' class Solution(object): def largestNumber(self, nums): """ :type nums: List[int] :rtype: str """ for i in xrange(len(nums)): for j in xrange(i + 1, len(nums)): if str(nums[i]) + str(nums[j]) < str(nums[j]) + str(nums[i]): nums[i], nums[j] = nums[j], nums[i] res = [str(x) for x in nums] while len(res) > 1 and res[0] == '0': res.pop(0) return ''.join(res)
class Pessoa: olhos = 2 # atributo de classe def __init__(self, *filhos, nome=None, idade=35): # atributos de instância self.idade = idade self.nome = nome self.filhos = list(filhos) def cumprimentar(self): return f'Olá {id(self)}.' @staticmethod def metodo_estatico(): return 42 @classmethod def metodo_de_classe(cls): return f'{cls} - olhos: {cls.olhos}' class Homem(Pessoa): def cumprimentar(self): return f'{super().cumprimentar()} Aperto de mão' class Mutante(Pessoa): olhos = 3 if __name__ == "__main__": luciano = Homem(nome="Luciano") renzo = Mutante("Luciano", "Luciana", "Pedro", nome="Renzo") print(f"{renzo.nome} tem os filhos {', '.join(str(x) for x in renzo.filhos[:-1])} e {renzo.filhos[-1]}.") print(luciano.cumprimentar()) print(Pessoa.metodo_estatico()) print(luciano.metodo_de_classe())
class Solution: def findAndReplacePattern(self, words: List[str], pattern: str) -> List[str]: result = [] if not words or not pattern: return result for word in words: mapping = {} isMapped = True for i, c in enumerate(word): p = pattern[i] if p in mapping and mapping[p] != c: isMapped = False break mapping[p] = c values = mapping.values() if len(values) != len(set(values)): isMapped = False if isMapped: result.append(word) return result
CONSUMER_KEY = 'WVQrIJcorH11hQoP6mHKvXIZJ' CONSUMER_SECRET = 'Ui3V1dEsa5owJnhu3nLNyqdz2hFf6HmvICPObiShmkzBszKnah' ACCESS_TOKEN = '218405160-0iabe9XqpwAJ4z4BYsaXwH3ydKpFZhnzj5xpHxpI' ACCESS_SECRET = 'PdPNfcgkc5x7TO54cxVjGOjSrqY2jbcaayV46ys9IkLj3'
var.nexus_allowAllDigitNames = True # put it somewhere else var.doCheckForDuplicateSequences = False t = var.trees[0] a = var.alignments[0] t.data = Data() t.model.dump() print('\nAfter optimizing, the composition of the model for the non-root nodes is:') print(t.model.parts[0].comps[0].val) print('...and:') print(t.model.parts[0].comps[1].val) print('and ...') print(t.model.parts[0].comps[2].val) print('and root comp...') print(t.model.parts[0].comps[3].val) t.write() t.draw() print(dir(t.model.parts[0])) print(t.model.parts[0].ndch2_writeComps) func.reseedCRandomizer(os.getpid()) print (t.model.parts[0]) # The char "symbols", AAs in this case, are available as a.symbols; that is why # I gave a name to var.alignments[0]. Also available as # d.parts[partNum].symbols, so d.parts[0].symbols are also 'arndcqeghilkmfpstwyv' print(a.symbols) counts = [0] * 2 for rep in range(1000): ancSt = t.ancestralStateDraw() for i in range(2): ch = a.symbols[i] # '01' cnt = ancSt.count(ch) counts[i] += cnt mySum = float(sum(counts)) print("\nsymbol optimized draws") for i in range(2): print(" %s %.5f %.4f" % (a.symbols[i], t.model.parts[0].comps[2].val[i], counts[i]/mySum)) #calculate predicted OGT according to Zeldovich for i in range(4): print("For composition " + str(i)) print(t.model.parts[0].comps[i].nNodes) f_ivywrel = 0 f_ivywrel = t.model.parts[0].comps[i].val[1] print("F(IVYWREL) = " + str(f_ivywrel)) print("T_opt estimate according to Zeldovich: " + str(937.0*float(f_ivywrel) - 335.0))
""" A very basic study on variables, their types and some operators """ # defining variables and values integer_value, floatValue, boolean_value = 36, 5.3, True adition = integer_value + floatValue division = integer_value / floatValue exponention1 = 3 ** 3 exponention2 = 27 ** (1 / 3) floor = integer_value // floatValue modulo = 50 % 9 string1 = 'I never liked the song "Nothing Else Matters" by Metallica.' string2 = "I've always loved the song \"Until It Sleeps\", also by Metallica." quote = string1[23] substring1 = string2[:17] substring2 = string1[24:44] substring3 = string2[51:] # printing variables print(integer_value, floatValue, boolean_value) print(adition, division, exponention1, exponention2, floor, modulo) print(string1, string2) print(substring1, quote ,substring2, quote, substring3) print(len(string1), str(floatValue)[1], string1.lower(), string2.upper()) print(type(string1), type(integer_value), type(floatValue)) print(id(string1), id(integer_value), id(floatValue)) # input values from user name = input('What is your name? ') age = input('How old are you? ') occupation = input('What do yo do for living? ') print('User %s is %s years old and works like a %s.' %(name, age, occupation)) # older = age * 1.5 - this is WRONG # print('older =', older)
''' 给定一个仅包含大小写字母和空格 ' ' 的字符串 s,返回其最后一个单词的长度。如果字符串从左向右滚动显示,那么最后一个单词就是最后出现的单词。 如果不存在最后一个单词,请返回 0 。 说明:一个单词是指仅由字母组成、不包含任何空格字符的 最大子字符串。   示例: 输入: "Hello World" 输出: 5 来源:力扣(LeetCode) 链接:https://leetcode-cn.com/problems/length-of-last-word ''' class Solution: def lengthOfLastWord(self, s: str) -> int: if not s: return 0 ans = len(s.strip().split(" ")[-1]) return ans
# # PySNMP MIB module HUAWEI-DATASYNC-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HUAWEI-DATASYNC-MIB # Produced by pysmi-0.3.4 at Wed May 1 13:43:58 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection") hwDatacomm, = mibBuilder.importSymbols("HUAWEI-MIB", "hwDatacomm") NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup") Integer32, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, Unsigned32, Counter64, MibIdentifier, NotificationType, Gauge32, IpAddress, ModuleIdentity, iso, Bits, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "Unsigned32", "Counter64", "MibIdentifier", "NotificationType", "Gauge32", "IpAddress", "ModuleIdentity", "iso", "Bits", "ObjectIdentity") RowStatus, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "TextualConvention", "DisplayString") hwDataSync = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191)) hwDataSync.setRevisions(('2015-07-16 13:49', '2014-09-04 17:10', '2009-03-17 10:27',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: hwDataSync.setRevisionsDescriptions(('Add hwCfgLastSaveFailNotify .', 'The MIB module for Data sync between host and netmanager.', 'The initial revision of this MIB module .',)) if mibBuilder.loadTexts: hwDataSync.setLastUpdated('201507161349Z') if mibBuilder.loadTexts: hwDataSync.setOrganization('Huawei Technologies Co.,Ltd.') if mibBuilder.loadTexts: hwDataSync.setContactInfo("Huawei Industrial Base Bantian, Longgang Shenzhen 518129 People's Republic of China Website: http://www.huawei.com Email: support@huawei.com ") if mibBuilder.loadTexts: hwDataSync.setDescription('Modified hwCfgChgTerminalID.') class DateAndTime(TextualConvention, OctetString): description = "A date-time specification. field octets contents range ----- ------ -------- ----- 1 1-2 year* 0..65536 2 3 month 1..12 3 4 day 1..31 4 5 hour 0..23 5 6 minutes 0..59 6 7 seconds 0..60 (use 60 for leap-second) 7 8 deci-seconds 0..9 8 9 direction from UTC '+' / '-' 9 10 hours from UTC* 0..13 10 11 minutes from UTC 0..59 * Notes: - the value of year is in network-byte order - daylight saving time in New Zealand is +13 For example, Tuesday May 26, 1992 at 1:30:15 PM EDT would be displayed as: 1992-5-26,13:30:15.0,-4:0 Note that if only local time is known, then timezone information (fields 8-10) is not present." status = 'current' displayHint = '2d-1d-1d,1d:1d:1d.1d,1a1d:1d' subtypeSpec = OctetString.subtypeSpec + ConstraintsUnion(ValueSizeConstraint(8, 8), ValueSizeConstraint(11, 11), ) hwDataSyncScalarObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1)) hwDataSyncTableObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2)) hwDataSyncNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 3)) hwDataSyncConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4)) hwCurrentCfgChgSeqID = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1, 1), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCurrentCfgChgSeqID.setStatus('current') if mibBuilder.loadTexts: hwCurrentCfgChgSeqID.setDescription('The value of this object identifies the ID of the current configuration change. The value ranges from 0 to 65535. After the ID of the configuration change reaches the maximum value, the value of the ID starts from 1 again. After the device is restarted, the value of the ID becomes 0.') hwCfgChgSeqIDReveralCount = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgSeqIDReveralCount.setStatus('current') if mibBuilder.loadTexts: hwCfgChgSeqIDReveralCount.setDescription('The value of this object identifies the cycle count of the index of configuration change table.') hwCfgChgTableMaxItem = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgTableMaxItem.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTableMaxItem.setDescription('The value of this object identifies the maximum number of entries in hwCfgChgTable. ') hwCfgBaselineTime = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 20))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgBaselineTime.setStatus('current') if mibBuilder.loadTexts: hwCfgBaselineTime.setDescription('Specifies the time of system confiuration was baseline.') hwDataSyncGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 1)) hwDataSyncScalarObjectsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 1, 1)).setObjects(("HUAWEI-DATASYNC-MIB", "hwCurrentCfgChgSeqID"), ("HUAWEI-DATASYNC-MIB", "hwCfgChgSeqIDReveralCount"), ("HUAWEI-DATASYNC-MIB", "hwCfgChgTableMaxItem"), ("HUAWEI-DATASYNC-MIB", "hwCfgBaselineTime")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwDataSyncScalarObjectsGroup = hwDataSyncScalarObjectsGroup.setStatus('current') if mibBuilder.loadTexts: hwDataSyncScalarObjectsGroup.setDescription('A collection of objects on DataSync ScalarObjects Information.') hwCfgChgNotifyGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 1, 2)).setObjects(("HUAWEI-DATASYNC-MIB", "hwCfgChgNotify")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwCfgChgNotifyGroup = hwCfgChgNotifyGroup.setStatus('current') if mibBuilder.loadTexts: hwCfgChgNotifyGroup.setDescription('A collection of objects on Configuration Change Information.') hwDataSyncNotifyGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 1, 3)).setObjects(("HUAWEI-DATASYNC-MIB", "hwCfgLastSaveFailNotify")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwDataSyncNotifyGroup = hwDataSyncNotifyGroup.setStatus('current') if mibBuilder.loadTexts: hwDataSyncNotifyGroup.setDescription('A collection of objects on synchronization Configuration Notify Information.') hwDataSyncCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 2)) hwDataSyncCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 2, 1)).setObjects() if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwDataSyncCompliance = hwDataSyncCompliance.setStatus('current') if mibBuilder.loadTexts: hwDataSyncCompliance.setDescription('The compliance statement for entities that support the huawei DataSync MIB.') hwCfgChgTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1), ) if mibBuilder.loadTexts: hwCfgChgTable.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTable.setDescription('This table is used to record configuration changes. In this table, you can find the configuration change based on the specific index.') hwCfgChgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1), ).setIndexNames((0, "HUAWEI-DATASYNC-MIB", "hwCfgChgSeqID")) if mibBuilder.loadTexts: hwCfgChgEntry.setStatus('current') if mibBuilder.loadTexts: hwCfgChgEntry.setDescription('Entry of hwCfgChgTable.') hwCfgChgSeqID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgSeqID.setStatus('current') if mibBuilder.loadTexts: hwCfgChgSeqID.setDescription('The value of this object identifies the configuration change ID. When configuration is changed, the sequence id will plus 1.') hwCfgChgTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 2), DateAndTime()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgTime.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTime.setDescription('This object indicates the configuration change time.') hwCfgChgTerminalType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("snmp", 1), ("telnet", 2), ("netconf", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgTerminalType.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTerminalType.setDescription('This object indicates the type of the terminal.') hwCfgChgTerminalID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgTerminalID.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTerminalID.setDescription('The value of this object identifies the terminal ID.') hwCfgChgType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("create", 1), ("modify", 2), ("delete", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgType.setStatus('current') if mibBuilder.loadTexts: hwCfgChgType.setDescription('This object indicates the configuration change type.') hwCfgChgViewName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 6), OctetString()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgViewName.setStatus('current') if mibBuilder.loadTexts: hwCfgChgViewName.setDescription('This object indicates the name of the view in which the configuration change occurs. For the command operation, the object is the name of the view in which the command is run. For the SNMP operation, the object is the OID of the MIB table or the scalar object.') hwCfgChgCmdID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 7), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgCmdID.setStatus('current') if mibBuilder.loadTexts: hwCfgChgCmdID.setDescription('The value of this object identifies the ID of the configuration change command. For the SNMP operation, the value is 0.') hwCfgChgDetailInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 8), OctetString()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgDetailInfo.setStatus('current') if mibBuilder.loadTexts: hwCfgChgDetailInfo.setDescription('This object indicates detailed configuration change information. For the command operation, the object is the command line. For the SNMP operation, the object is the index of the MIB table. When there are multiple indexes, the format of index1.index2.index3 is adopted.') hwCollectTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2), ) if mibBuilder.loadTexts: hwCollectTable.setStatus('current') if mibBuilder.loadTexts: hwCollectTable.setDescription('This table is used to enable the NMS to send the collecting script to the device to trigger the collection, and then monitor the collection status.') hwCollectEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1), ).setIndexNames((0, "HUAWEI-DATASYNC-MIB", "hwCollectIndex")) if mibBuilder.loadTexts: hwCollectEntry.setStatus('current') if mibBuilder.loadTexts: hwCollectEntry.setDescription('Entry of hwCollectTable.') hwCollectIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 1), Integer32()) if mibBuilder.loadTexts: hwCollectIndex.setStatus('current') if mibBuilder.loadTexts: hwCollectIndex.setDescription('The value of this object identifies the collection index.') hwCollectNetManageId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 2), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwCollectNetManageId.setStatus('current') if mibBuilder.loadTexts: hwCollectNetManageId.setDescription('The value of this object identifies the NMS ID.') hwCollectOperation = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("begin", 1), ("stop", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwCollectOperation.setStatus('current') if mibBuilder.loadTexts: hwCollectOperation.setDescription('This object indicates the instruction for the collection operation. Default value is stop.') hwCollectInScriptFile = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwCollectInScriptFile.setStatus('current') if mibBuilder.loadTexts: hwCollectInScriptFile.setDescription('This object indicates the name of the script file. T he length of the file name ranges from 1 character to 255 characters.') hwCollectInResultFile = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwCollectInResultFile.setStatus('current') if mibBuilder.loadTexts: hwCollectInResultFile.setDescription('This object indicates the name of the result file. The length of the file name ranges from 1 character to 255 characters.') hwCollectState = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("idle", 1), ("collecting", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCollectState.setStatus('current') if mibBuilder.loadTexts: hwCollectState.setDescription('This object indicates the collection status.') hwCollectRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 7), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwCollectRowStatus.setStatus('current') if mibBuilder.loadTexts: hwCollectRowStatus.setDescription('This object indicates the row status.') hwCfgChgNotify = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 3, 1)).setObjects(("HUAWEI-DATASYNC-MIB", "hwCurrentCfgChgSeqID"), ("HUAWEI-DATASYNC-MIB", "hwCfgChgSeqIDReveralCount"), ("HUAWEI-DATASYNC-MIB", "hwCfgChgTableMaxItem"), ("HUAWEI-DATASYNC-MIB", "hwCfgBaselineTime")) if mibBuilder.loadTexts: hwCfgChgNotify.setStatus('current') if mibBuilder.loadTexts: hwCfgChgNotify.setDescription('This trap is generated when a configuration change occurs on the device within a specified period.') hwCfgLastSaveFailNotify = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 3, 2)) if mibBuilder.loadTexts: hwCfgLastSaveFailNotify.setStatus('current') if mibBuilder.loadTexts: hwCfgLastSaveFailNotify.setDescription('The last save operation failed, please check the configuration.') mibBuilder.exportSymbols("HUAWEI-DATASYNC-MIB", hwDataSyncNotifications=hwDataSyncNotifications, hwCfgChgNotifyGroup=hwCfgChgNotifyGroup, DateAndTime=DateAndTime, hwDataSync=hwDataSync, hwCollectIndex=hwCollectIndex, hwDataSyncScalarObjectsGroup=hwDataSyncScalarObjectsGroup, hwCfgChgSeqID=hwCfgChgSeqID, hwCollectState=hwCollectState, hwCfgBaselineTime=hwCfgBaselineTime, hwCfgChgViewName=hwCfgChgViewName, hwDataSyncScalarObjects=hwDataSyncScalarObjects, hwCfgChgType=hwCfgChgType, hwCfgChgCmdID=hwCfgChgCmdID, hwCfgChgEntry=hwCfgChgEntry, hwCollectTable=hwCollectTable, hwDataSyncConformance=hwDataSyncConformance, hwCfgChgTerminalID=hwCfgChgTerminalID, hwCurrentCfgChgSeqID=hwCurrentCfgChgSeqID, hwCollectEntry=hwCollectEntry, hwCollectNetManageId=hwCollectNetManageId, hwCfgChgDetailInfo=hwCfgChgDetailInfo, hwCollectInScriptFile=hwCollectInScriptFile, hwCfgChgTable=hwCfgChgTable, hwCollectInResultFile=hwCollectInResultFile, PYSNMP_MODULE_ID=hwDataSync, hwDataSyncNotifyGroup=hwDataSyncNotifyGroup, hwDataSyncTableObjects=hwDataSyncTableObjects, hwCfgChgNotify=hwCfgChgNotify, hwCollectOperation=hwCollectOperation, hwCfgChgSeqIDReveralCount=hwCfgChgSeqIDReveralCount, hwCfgLastSaveFailNotify=hwCfgLastSaveFailNotify, hwCfgChgTableMaxItem=hwCfgChgTableMaxItem, hwDataSyncCompliance=hwDataSyncCompliance, hwDataSyncCompliances=hwDataSyncCompliances, hwCfgChgTime=hwCfgChgTime, hwCfgChgTerminalType=hwCfgChgTerminalType, hwDataSyncGroups=hwDataSyncGroups, hwCollectRowStatus=hwCollectRowStatus)
def main(name="User", name2="Your Pal"): print(f"Hello, {name}! I am {name2}!") if __name__=="__main__": main()
# 変数宣言 name = 1234 # 整数型、文字列型、浮動小数点型 123 'hello' "world" "I can't drive." 1.23 # コメント # プログラムの意味を記述するもの # デバッグポイント・コードの実行に支障はありません # print()関数 # 関数はミニプログラム
def _dup(file,mode,checked=True): """Replacement for perl built-in open function when the mode contains '&'.""" global OS_ERROR, TRACEBACK, AUTODIE try: if isinstance(file, io.IOBase): # file handle file.flush() return os.fdopen(os.dup(file.fileno()), mode, encoding=file.encoding, errors=file.errors) if (_m:=re.match(r'=?(\d+)', file)): file = int(_m.group(1)) elif file in _DUP_MAP: file = _DUP_MAP[file] return _create_fh_methods(os.fdopen(os.dup(file), mode)) except Exception as _e: OS_ERROR = str(_e) if TRACEBACK: _cluck(f"dup failed: {OS_ERROR}",skip=2) if AUTODIE: raise if checked: return None fh = io.StringIO() fh.close() return _create_fh_methods(fh)
# -*- coding: utf-8 -*- """ @Time : 2020/12/27 23:45 @Auth : 高冷Aloof @File :__init__.py @IDE :PyCharm @Motto:ABC(Always Be Coding) """
# -*- coding: utf-8 -*- """ Created on Tue Jul 14 16:15:05 2020 Logical condition: If statement @author: Ashish """ day_of_week = input("What day of the week is it today? ") if day_of_week == "Monday": print("Have a great start to your week!") elif day_of_week == "Friday": print("It's ok to finish a bit early!") else: print("Full speed ahead!") # -- Problem: user not entering what we expect -- day_of_week = input("What day of the week is it today? ").lower() if day_of_week == "monday": print("Have a great start to your week!") elif day_of_week == "friday": print("It's ok to finish a bit early!") else: print("Full speed ahead!")
class PeekableIterator: def __init__(self, nums): self.nums = nums self.i = 0 def peek(self): return self.nums[self.i] def next(self): self.i += 1 return self.nums[self.i-1] def hasnext(self): return self.i < len(self.nums)
# DADSA - Assignment 1 # Reece Benson class Player(): _id = None _name = None _gender = None _score = None _points = None def __init__(self, _name, _gender, _id): self._id = _id self._name = _name self._gender = _gender self._score = { } self._points = 0 def __cmp__(self, other): """Compare Override""" if(self._points < other._points): return -1 elif(self._points > other._points): return 1 else: return 0 # Comparison Overrides def __eq__(self, other): return not self._points < other._points and not other._points < self._points def __ne__(self, other): return self._points < other._points or other._points < self._points def __gt__(self, other): return other._points < self._points def __ge__(self, other): return not self._points < other._points def __le__(self, other): return not other._points < self._points def get_name(self): return self._name def get_gender(self): return self._gender def get_score(self, _match): return self._score[_match] def set_score(self, _match, _score): self.score[_match] = _score return self._score[_match] def get_points(self): return self._points def set_points(self, _points, append = False): if(append): self._points += _points else: self._points = _points return self._points
_champernownes_constant = "" def _calculate_champernownes_nth_decimal(length): res = [] curr_length = 0 i = 1 while curr_length < length: res += [str(i)] curr_length += len(res[-1]) i += 1 return "".join(res) def champernownes_nth_decimal(n): global _champernownes_constant if len(_champernownes_constant) >= n: return int(_champernownes_constant[n - 1]) _champernownes_constant = _calculate_champernownes_nth_decimal(2 * n) return champernownes_nth_decimal(n)
class Solution: solution = [] def inorderTraversal(self, root: TreeNode) -> List[int]: if (root == None): return self.solution = [] self.inorderHelper(root) return self.solution def inorderHelper(self, root: TreeNode): if (root == None): return self.inorderHelper(root.left) self.solution.append(root.val) self.inorderHelper(root.right)
# -*- coding: utf-8 -*- """ """ #Function that checks whether a string can be converted into a float def isfloat(value): try: float(value) return True except ValueError: return False #Function that checks whether a string can be converted into an integer def isint(value): try: int(value) return True except ValueError: return False
winClass = window.get_active_class() if winClass not in ("code.Code", "emacs.Emacs"): # Regular window keyboard.send_keys('<home>') else: # VS Code keyboard.send_keys('<alt>+a')
""" Module to manage affiliate marketing links Purpose: allow managing affiliate marketing links in one place Author: Tom W. Hartung Date: Winter, 2019 Copyright: (c) 2019 Tom W. Hartung, Groja.com, and JooMoo Websites LLC. Reference: """ class AffiliateLinks: """ Use python dictionaries to make it easier to update affiliate links """ # # Source Link Dictionaries: # afl_default = { 'ah_by_ax': 'https://groja.com/conversion/afl_default', 'ah_by_chernow': 'https://groja.com/conversion/afl_default', 'bf_by_isaacson': 'https://groja.com/conversion/afl_default', 'big_sleep': 'https://groja.com/conversion/afl_default', 'chinatown': 'https://groja.com/conversion/afl_default', 'fawlty_towers': 'https://groja.com/conversion/afl_default', 'fn_encyclopedia': 'https://groja.com/conversion/afl_default', 'gw_by_chernow': 'https://groja.com/conversion/afl_default', 'ja_by_ax': 'https://groja.com/conversion/afl_default', 'ja_by_hbo': 'https://groja.com/conversion/afl_default', 'ja_by_mccullough': 'https://groja.com/conversion/afl_default', 'kiss_me_deadly': 'https://groja.com/conversion/afl_default', 'maltese_falcon': 'https://groja.com/conversion/afl_default', 'ph_by_kukla': 'https://groja.com/conversion/afl_default', 'star_trek_tos': 'https://groja.com/conversion/afl_default', 'star_wars_psych': 'https://groja.com/conversion/afl_default', 'tj_himself': 'https://groja.com/conversion/afl_default', 'tj_by_burns': 'https://groja.com/conversion/afl_default', 'white_heat': 'https://groja.com/conversion/afl_default', 'x_files': 'https://groja.com/conversion/afl_default', 'xxx': 'https://groja.com/conversion/afl_default', } afl_none = { 'ah_by_ax': '', 'ah_by_chernow': '', 'bf_by_isaacson': '', 'big_sleep': '', 'chinatown': '', 'fawlty_towers': '', 'fn_encyclopedia': '', 'gw_by_chernow': '', 'ja_by_ax': '', 'ja_by_hbo': '', 'ja_by_mccullough': '', 'kiss_me_deadly': '', 'maltese_falcon': '', 'ph_by_kukla': '', 'star_trek_tos': '', 'star_wars_psych': '', 'tj_by_burns': '', 'tj_himself': '', 'white_heat': '', 'x_files': '', 'xxx': '', } # # Active Link Dictionaries: # afl_content = {} afl_button = {} def __init__(self): """ Assign source links to active links Use expressions like this to reference the values in templates {{ afl_content.xxx }} {{ afl_button.xxx }} """ self.afl_content['ah_by_ax'] = self.afl_default['ah_by_ax'] self.afl_button['ah_by_ax'] = self.afl_default['ah_by_ax'] self.afl_content['ah_by_chernow'] = self.afl_default['ah_by_chernow'] self.afl_button['ah_by_chernow'] = self.afl_default['ah_by_chernow'] self.afl_content['bf_by_isaacson'] = self.afl_default['bf_by_isaacson'] self.afl_button['bf_by_isaacson'] = self.afl_default['bf_by_isaacson'] self.afl_content['big_sleep'] = self.afl_default['big_sleep'] self.afl_button['big_sleep'] = self.afl_default['big_sleep'] self.afl_content['chinatown'] = self.afl_default['chinatown'] self.afl_button['chinatown'] = self.afl_default['chinatown'] self.afl_content['fawlty_towers'] = self.afl_default['fawlty_towers'] self.afl_button['fawlty_towers'] = self.afl_default['fawlty_towers'] self.afl_content['fn_encyclopedia'] = self.afl_default['fn_encyclopedia'] self.afl_button['fn_encyclopedia'] = self.afl_default['fn_encyclopedia'] self.afl_content['gw_by_chernow'] = self.afl_default['gw_by_chernow'] self.afl_button['gw_by_chernow'] = self.afl_default['gw_by_chernow'] self.afl_content['ja_by_ax'] = self.afl_default['ja_by_ax'] self.afl_button['ja_by_ax'] = self.afl_default['ja_by_ax'] self.afl_content['ja_by_hbo'] = self.afl_default['ja_by_hbo'] self.afl_button['ja_by_hbo'] = self.afl_default['ja_by_hbo'] self.afl_content['ja_by_mccullough'] = self.afl_default['ja_by_mccullough'] self.afl_button['ja_by_mccullough'] = self.afl_default['ja_by_mccullough'] self.afl_content['kiss_me_deadly'] = self.afl_default['kiss_me_deadly'] self.afl_button['kiss_me_deadly'] = self.afl_default['kiss_me_deadly'] self.afl_content['maltese_falcon'] = self.afl_default['maltese_falcon'] self.afl_button['maltese_falcon'] = self.afl_default['maltese_falcon'] self.afl_content['ph_by_kukla'] = self.afl_default['ph_by_kukla'] self.afl_button['ph_by_kukla'] = self.afl_default['ph_by_kukla'] self.afl_content['star_trek_tos'] = self.afl_default['star_trek_tos'] self.afl_button['star_trek_tos'] = self.afl_default['star_trek_tos'] self.afl_content['star_wars_psych'] = self.afl_default['star_wars_psych'] self.afl_button['star_wars_psych'] = self.afl_default['star_wars_psych'] self.afl_content['tj_by_burns'] = self.afl_default['tj_by_burns'] self.afl_button['tj_by_burns'] = self.afl_default['tj_by_burns'] self.afl_content['tj_himself'] = self.afl_default['tj_himself'] self.afl_button['tj_himself'] = self.afl_default['tj_himself'] self.afl_content['white_heat'] = self.afl_default['white_heat'] self.afl_button['white_heat'] = self.afl_default['white_heat'] self.afl_content['x_files'] = self.afl_default['x_files'] self.afl_button['x_files'] = self.afl_default['x_files'] self.afl_content['xxx'] = self.afl_default['xxx'] self.afl_button['xxx'] = self.afl_default['xxx']
# GYP project file for TDesktop { 'targets': [ { 'target_name': 'libtgvoip', 'type': 'static_library', 'dependencies': [], 'defines': [ 'WEBRTC_APM_DEBUG_DUMP=0', 'TGVOIP_USE_DESKTOP_DSP', 'WEBRTC_NS_FLOAT', ], 'variables': { 'tgvoip_src_loc': '.', 'official_build_target%': '', 'linux_path_opus_include%': '<(DEPTH)/../../../Libraries/opus/include', }, 'include_dirs': [ '<(tgvoip_src_loc)/webrtc_dsp', '<(linux_path_opus_include)', ], 'direct_dependent_settings': { 'include_dirs': [ '<(tgvoip_src_loc)', ], }, 'export_dependent_settings': [], 'sources': [ '<(tgvoip_src_loc)/BlockingQueue.cpp', '<(tgvoip_src_loc)/BlockingQueue.h', '<(tgvoip_src_loc)/Buffers.cpp', '<(tgvoip_src_loc)/Buffers.h', '<(tgvoip_src_loc)/CongestionControl.cpp', '<(tgvoip_src_loc)/CongestionControl.h', '<(tgvoip_src_loc)/EchoCanceller.cpp', '<(tgvoip_src_loc)/EchoCanceller.h', '<(tgvoip_src_loc)/JitterBuffer.cpp', '<(tgvoip_src_loc)/JitterBuffer.h', '<(tgvoip_src_loc)/logging.cpp', '<(tgvoip_src_loc)/logging.h', '<(tgvoip_src_loc)/MediaStreamItf.cpp', '<(tgvoip_src_loc)/MediaStreamItf.h', '<(tgvoip_src_loc)/OpusDecoder.cpp', '<(tgvoip_src_loc)/OpusDecoder.h', '<(tgvoip_src_loc)/OpusEncoder.cpp', '<(tgvoip_src_loc)/OpusEncoder.h', '<(tgvoip_src_loc)/threading.h', '<(tgvoip_src_loc)/VoIPController.cpp', '<(tgvoip_src_loc)/VoIPGroupController.cpp', '<(tgvoip_src_loc)/VoIPController.h', '<(tgvoip_src_loc)/PrivateDefines.h', '<(tgvoip_src_loc)/VoIPServerConfig.cpp', '<(tgvoip_src_loc)/VoIPServerConfig.h', '<(tgvoip_src_loc)/audio/AudioInput.cpp', '<(tgvoip_src_loc)/audio/AudioInput.h', '<(tgvoip_src_loc)/audio/AudioOutput.cpp', '<(tgvoip_src_loc)/audio/AudioOutput.h', '<(tgvoip_src_loc)/audio/Resampler.cpp', '<(tgvoip_src_loc)/audio/Resampler.h', '<(tgvoip_src_loc)/NetworkSocket.cpp', '<(tgvoip_src_loc)/NetworkSocket.h', '<(tgvoip_src_loc)/PacketReassembler.cpp', '<(tgvoip_src_loc)/PacketReassembler.h', '<(tgvoip_src_loc)/MessageThread.cpp', '<(tgvoip_src_loc)/MessageThread.h', '<(tgvoip_src_loc)/audio/AudioIO.cpp', '<(tgvoip_src_loc)/audio/AudioIO.h', '<(tgvoip_src_loc)/video/ScreamCongestionController.cpp', '<(tgvoip_src_loc)/video/ScreamCongestionController.h', '<(tgvoip_src_loc)/video/VideoSource.cpp', '<(tgvoip_src_loc)/video/VideoSource.h', '<(tgvoip_src_loc)/video/VideoRenderer.cpp', '<(tgvoip_src_loc)/video/VideoRenderer.h', '<(tgvoip_src_loc)/video/VideoPacketSender.cpp', '<(tgvoip_src_loc)/video/VideoPacketSender.h', '<(tgvoip_src_loc)/video/VideoFEC.cpp', '<(tgvoip_src_loc)/video/VideoFEC.h', '<(tgvoip_src_loc)/json11.cpp', '<(tgvoip_src_loc)/json11.hpp', # Windows '<(tgvoip_src_loc)/os/windows/NetworkSocketWinsock.cpp', '<(tgvoip_src_loc)/os/windows/NetworkSocketWinsock.h', '<(tgvoip_src_loc)/os/windows/AudioInputWave.cpp', '<(tgvoip_src_loc)/os/windows/AudioInputWave.h', '<(tgvoip_src_loc)/os/windows/AudioOutputWave.cpp', '<(tgvoip_src_loc)/os/windows/AudioOutputWave.h', '<(tgvoip_src_loc)/os/windows/AudioOutputWASAPI.cpp', '<(tgvoip_src_loc)/os/windows/AudioOutputWASAPI.h', '<(tgvoip_src_loc)/os/windows/AudioInputWASAPI.cpp', '<(tgvoip_src_loc)/os/windows/AudioInputWASAPI.h', '<(tgvoip_src_loc)/os/windows/WindowsSpecific.cpp', '<(tgvoip_src_loc)/os/windows/WindowsSpecific.h', # macOS '<(tgvoip_src_loc)/os/darwin/AudioInputAudioUnit.cpp', '<(tgvoip_src_loc)/os/darwin/AudioInputAudioUnit.h', '<(tgvoip_src_loc)/os/darwin/AudioOutputAudioUnit.cpp', '<(tgvoip_src_loc)/os/darwin/AudioOutputAudioUnit.h', '<(tgvoip_src_loc)/os/darwin/AudioInputAudioUnitOSX.cpp', '<(tgvoip_src_loc)/os/darwin/AudioInputAudioUnitOSX.h', '<(tgvoip_src_loc)/os/darwin/AudioOutputAudioUnitOSX.cpp', '<(tgvoip_src_loc)/os/darwin/AudioOutputAudioUnitOSX.h', '<(tgvoip_src_loc)/os/darwin/AudioUnitIO.cpp', '<(tgvoip_src_loc)/os/darwin/AudioUnitIO.h', '<(tgvoip_src_loc)/os/darwin/DarwinSpecific.mm', '<(tgvoip_src_loc)/os/darwin/DarwinSpecific.h', # Linux '<(tgvoip_src_loc)/os/linux/AudioInputALSA.cpp', '<(tgvoip_src_loc)/os/linux/AudioInputALSA.h', '<(tgvoip_src_loc)/os/linux/AudioOutputALSA.cpp', '<(tgvoip_src_loc)/os/linux/AudioOutputALSA.h', '<(tgvoip_src_loc)/os/linux/AudioOutputPulse.cpp', '<(tgvoip_src_loc)/os/linux/AudioOutputPulse.h', '<(tgvoip_src_loc)/os/linux/AudioInputPulse.cpp', '<(tgvoip_src_loc)/os/linux/AudioInputPulse.h', '<(tgvoip_src_loc)/os/linux/AudioPulse.cpp', '<(tgvoip_src_loc)/os/linux/AudioPulse.h', # POSIX '<(tgvoip_src_loc)/os/posix/NetworkSocketPosix.cpp', '<(tgvoip_src_loc)/os/posix/NetworkSocketPosix.h', # WebRTC APM '<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/include/field_trial.h', '<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/include/cpu_features_wrapper.h', '<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/include/asm_defines.h', '<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/include/metrics.h', '<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/include/compile_assert_c.h', '<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/source/field_trial.cc', '<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/source/metrics.cc', '<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/source/cpu_features.cc', '<(tgvoip_src_loc)/webrtc_dsp/typedefs.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/strings/internal/memutil.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/strings/internal/memutil.cc', '<(tgvoip_src_loc)/webrtc_dsp/absl/strings/string_view.cc', '<(tgvoip_src_loc)/webrtc_dsp/absl/strings/ascii.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/strings/ascii.cc', '<(tgvoip_src_loc)/webrtc_dsp/absl/strings/string_view.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/types/optional.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/types/bad_optional_access.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/types/bad_optional_access.cc', '<(tgvoip_src_loc)/webrtc_dsp/absl/types/optional.cc', '<(tgvoip_src_loc)/webrtc_dsp/absl/memory/memory.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/meta/type_traits.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/algorithm/algorithm.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/container/inlined_vector.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/policy_checks.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/port.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/config.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/raw_logging.cc', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/throw_delegate.cc', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/invoke.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/inline_variable.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/atomic_hook.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/identity.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/raw_logging.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/throw_delegate.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/attributes.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/macros.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/optimization.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/base/log_severity.h', '<(tgvoip_src_loc)/webrtc_dsp/absl/utility/utility.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/string_to_number.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/constructormagic.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/race_checker.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/strings/string_builder.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/strings/string_builder.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/event_tracer.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/stringencode.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/memory/aligned_malloc.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/memory/aligned_malloc.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/timeutils.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/event.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/ignore_wundef.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/stringutils.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/arraysize.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/platform_file.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/swap_queue.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/string_to_number.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/trace_event.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/checks.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/deprecation.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/thread_checker_impl.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/sanitizer.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/scoped_ref_ptr.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/logging.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/timeutils.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/atomicops.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/stringencode.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/stringutils.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/checks.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/numerics/safe_minmax.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/numerics/safe_conversions.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/numerics/safe_conversions_impl.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/numerics/safe_compare.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/system/unused.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/system/inline.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/system/ignore_warnings.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/system/asm_defines.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/system/rtc_export.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/system/arch.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/platform_thread.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/platform_thread.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/platform_thread_types.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/protobuf_utils.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/thread_annotations.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/gtest_prod_util.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/function_view.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/criticalsection.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/criticalsection.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/platform_thread_types.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/refcount.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/event.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/thread_checker_impl.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/event_tracer.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/compile_assert_c.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/logging_webrtc.cc', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/type_traits.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/platform_file.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/refcounter.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/logging_mac.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/thread_checker.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/race_checker.h', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/refcountedobject.h', '<(tgvoip_src_loc)/webrtc_dsp/third_party/rnnoise/src/rnn_vad_weights.cc', '<(tgvoip_src_loc)/webrtc_dsp/third_party/rnnoise/src/rnn_activations.h', '<(tgvoip_src_loc)/webrtc_dsp/third_party/rnnoise/src/kiss_fft.h', '<(tgvoip_src_loc)/webrtc_dsp/third_party/rnnoise/src/kiss_fft.cc', '<(tgvoip_src_loc)/webrtc_dsp/third_party/rnnoise/src/rnn_vad_weights.h', '<(tgvoip_src_loc)/webrtc_dsp/api/audio/audio_frame.cc', '<(tgvoip_src_loc)/webrtc_dsp/api/audio/echo_canceller3_config.h', '<(tgvoip_src_loc)/webrtc_dsp/api/audio/echo_control.h', '<(tgvoip_src_loc)/webrtc_dsp/api/audio/audio_frame.h', '<(tgvoip_src_loc)/webrtc_dsp/api/audio/echo_canceller3_config.cc', '<(tgvoip_src_loc)/webrtc_dsp/api/audio/echo_canceller3_factory.h', '<(tgvoip_src_loc)/webrtc_dsp/api/audio/echo_canceller3_factory.cc', '<(tgvoip_src_loc)/webrtc_dsp/api/array_view.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/third_party/fft/fft.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/third_party/fft/fft.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/bandwidth_info.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/include/isac.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_estimator.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/os_specific_inline.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/filterbanks.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/entropy_coding.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/isac_vad.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/settings.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/transform.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/arith_routines.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/crc.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_filter.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/filter_functions.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/decode.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lattice.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/intialize.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_tables.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/isac_float_type.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/encode.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_analysis.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/codec.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/entropy_coding.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/isac_vad.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/structs.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/filter_functions.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_filter.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/arith_routines.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/crc.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_analysis.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/decode_bwe.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/isac.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_tables.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/rms_level.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/moving_max.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/circular_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/normalized_covariance_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/normalized_covariance_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/moving_max.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/circular_buffer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/mean_variance_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/mean_variance_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/gain_control_for_experimental_agc.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/splitting_filter.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/gain_control_impl.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/rms_level.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/ns_core.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/nsx_core.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/noise_suppression_x.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/nsx_core_c.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/defines.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/noise_suppression.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/ns_core.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/nsx_core.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/windows_private.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/noise_suppression_x.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/noise_suppression.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/nsx_defines.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/residual_echo_detector.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/audio_processing_impl.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/audio_buffer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/typing_detection.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/render_queue_item_verifier.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_generator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/config.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_frame_view.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/mock_audio_processing.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/gain_control.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_generator_factory.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_processing_statistics.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_generator_factory.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/aec_dump.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/aec_dump.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_processing_statistics.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_processing.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_processing.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/config.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/interpolated_gain_curve.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/biquad_filter.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/interpolated_gain_curve.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/agc2_common.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/agc2_testing_common.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_mode_level_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/gain_applier.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/signal_classifier.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_agc.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/limiter.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/saturation_protector.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/vector_float_frame.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/sequence_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/rnn.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/rnn.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/test_utils.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/pitch_info.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/lp_residual.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/ring_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/symmetric_matrix_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/spectral_features.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/features_extraction.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/common.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/fft_util.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/spectral_features.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/pitch_search.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/pitch_search.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/features_extraction.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/fft_util.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/lp_residual.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/fixed_gain_controller.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/vector_float_frame.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/down_sampler.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/noise_level_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/agc2_testing_common.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/fixed_digital_level_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/fixed_gain_controller.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/saturation_protector.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/vad_with_level.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/limiter_db_gain_curve.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/agc2_common.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_digital_gain_applier.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/vad_with_level.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/limiter_db_gain_curve.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/fixed_digital_level_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_agc.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/gain_applier.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/down_sampler.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/noise_level_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/signal_classifier.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/noise_spectrum_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/compute_interpolated_gain_curve.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/compute_interpolated_gain_curve.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/biquad_filter.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/noise_spectrum_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/limiter.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/moving_moments.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/transient_detector.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/wpd_tree.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/transient_suppressor.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/common.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/wpd_node.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/moving_moments.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/wpd_tree.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/wpd_node.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/transient_suppressor.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/transient_detector.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/dyadic_decimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/low_cut_filter.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/noise_suppression_impl.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/level_estimator_impl.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/three_band_filter_bank.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/echo_cancellation.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_resampler.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_resampler.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/echo_cancellation.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_core.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_core.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_core_optimized_methods.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_core_sse2.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_common.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/voice_detection_impl.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/voice_detection_impl.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_cancellation_impl.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/gain_control_for_experimental_agc.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/agc.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/loudness_histogram.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/agc_manager_direct.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/legacy/analog_agc.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/legacy/gain_control.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/legacy/digital_agc.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/legacy/analog_agc.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/legacy/digital_agc.c', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/utility.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/mock_agc.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/loudness_histogram.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/gain_map_internal.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/utility.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/agc_manager_direct.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/agc.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/common.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/audio_processing_impl.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/audio_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_control_mobile_impl.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/splitting_filter.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/low_cut_filter.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/audio_generator/file_audio_generator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/audio_generator/file_audio_generator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/gain_controller2.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/three_band_filter_bank.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/residual_echo_detector.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_cancellation_impl.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/noise_suppression_impl.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/level_estimator_impl.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/gain_controller2.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/aecm_core.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/aecm_defines.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/aecm_core.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/aecm_core_c.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/echo_control_mobile.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/echo_control_mobile.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_reverb_model.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/downsampled_render_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subtractor_output_analyzer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_model_fallback.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/residual_echo_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/shadow_filter_update_gain.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_remover_metrics.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_buffer2.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/aec_state.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/suppression_filter.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_path_variability.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/frame_blocker.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subtractor.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_delay_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/adaptive_fir_filter.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/cascaded_biquad_filter.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/matched_filter.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subtractor_output.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_signal_analyzer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/aec3_fft.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/aec3_fft.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_remover_metrics.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/fullband_erle_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/suppression_filter.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_processor.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/filter_analyzer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subtractor.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_path_delay_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subband_erle_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_controller_metrics.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_buffer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_processor_metrics.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/vector_buffer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/erl_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/aec_state.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/adaptive_fir_filter.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/fft_data.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_controller.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/skew_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_controller_metrics.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/comfort_noise_generator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_path_delay_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/erl_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_remover.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_framer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/erle_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_model.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/cascaded_biquad_filter.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/matrix_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_buffer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_model_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subtractor_output.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/stationarity_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_signal_analyzer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_path_variability.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/moving_average.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_reverb_model.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subtractor_output_analyzer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/suppression_gain.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_audibility.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_processor_metrics.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_controller.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/suppression_gain.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/moving_average.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/erle_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subband_erle_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_model_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/aec3_common.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/residual_echo_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_processor.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/fullband_erle_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/matched_filter.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/stationarity_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_canceller3.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/skew_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_decay_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_controller2.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/suppression_gain_limiter.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/main_filter_update_gain.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_remover.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_model_fallback.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/downsampled_render_buffer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/vector_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/matrix_buffer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_frequency_response.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_audibility.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/fft_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_processor2.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_canceller3.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_delay_buffer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/aec3_common.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/fft_buffer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/vector_math.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/decimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/frame_blocker.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_framer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/suppression_gain_limiter.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/delay_estimate.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/comfort_noise_generator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_model.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/main_filter_update_gain.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/matched_filter_lag_aggregator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/shadow_filter_update_gain.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/filter_analyzer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_decay_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_frequency_response.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/decimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_control_mobile_impl.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/gain_control_impl.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/typing_detection.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/logging/apm_data_dumper.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/logging/apm_data_dumper.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/voice_activity_detector.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/standalone_vad.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/vad_audio_proc_internal.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/pitch_internal.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/vad_circular_buffer.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/vad_circular_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/pitch_based_vad.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/vad_audio_proc.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/pole_zero_filter.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/pole_zero_filter.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/pitch_based_vad.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/gmm.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/common.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/vad_audio_proc.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/voice_gmm_tables.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/noise_gmm_tables.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/pitch_internal.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/gmm.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/standalone_vad.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/voice_activity_detector.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/delay_estimator_internal.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/ooura_fft.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/ooura_fft.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/delay_estimator_wrapper.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/ooura_fft_sse2.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/delay_estimator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/block_mean_calculator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/block_mean_calculator.cc', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/delay_estimator.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/ooura_fft_tables_common.h', '<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/delay_estimator_wrapper.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/mocks/mock_smoothing_filter.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/wav_file.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/window_generator.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/channel_buffer.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_factory.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/sparse_fir_filter.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_sse.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/window_generator.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/ring_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/include/audio_util.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/wav_header.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/real_fourier_ooura.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/audio_util.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/real_fourier_ooura.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_sse.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/smoothing_filter.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/push_sinc_resampler.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/sinc_resampler.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/resampler.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/sinc_resampler_sse.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/include/push_resampler.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/include/resampler.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/push_sinc_resampler.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/push_resampler.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/sinusoidal_linear_chirp_source.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/sinc_resampler.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/sinusoidal_linear_chirp_source.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_factory.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/audio_converter.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/wav_file.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/third_party/spl_sqrt_floor/spl_sqrt_floor.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/third_party/spl_sqrt_floor/spl_sqrt_floor.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/third_party/fft4g/fft4g.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/third_party/fft4g/fft4g.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/audio_converter.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/real_fourier.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/channel_buffer.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/real_fourier.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/sparse_fir_filter.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/smoothing_filter.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_c.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/ring_buffer.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_c.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/complex_fft_tables.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/complex_fft.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/filter_ma_fast_q12.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/levinson_durbin.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/dot_product_with_scale.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/auto_corr_to_refl_coef.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/resample_by_2_internal.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/energy.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/sqrt_of_one_minus_x_squared.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/downsample_fast.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/splitting_filter1.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/filter_ar_fast_q12.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/spl_init.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/lpc_to_refl_coef.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/cross_correlation.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/include/signal_processing_library.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/include/real_fft.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/include/spl_inl.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/division_operations.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/auto_correlation.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/get_scaling_square.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/dot_product_with_scale.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/resample_by_2_internal.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/resample.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/min_max_operations.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/refl_coef_to_lpc.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/filter_ar.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/vector_scaling_operations.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/resample_fractional.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/real_fft.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/ilbc_specific_functions.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/complex_bit_reverse.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/randomization_functions.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/copy_set_operations.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/resample_by_2.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/get_hanning_window.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/resample_48khz.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/spl_inl.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/spl_sqrt.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/wav_header.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_sp.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad.cc', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/webrtc_vad.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_core.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/include/vad.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/include/webrtc_vad.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_gmm.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_filterbank.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_core.c', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_sp.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_filterbank.h', '<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_gmm.c', # ARM/NEON sources # TODO check if there's a good way to make these compile with ARM ports of TDesktop #'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/nsx_core_neon.c', #'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_core_neon.cc', #'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/aecm_core_neon.cc', #'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/ooura_fft_tables_neon_sse2.h', #'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/ooura_fft_neon.cc', #'<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_neon.cc', #'<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/sinc_resampler_neon.cc', #'<(tgvoip_src_loc)/webrtc_dsp/common_audio/third_party/spl_sqrt_floor/spl_sqrt_floor_arm.S', #'<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_neon.h', #'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/downsample_fast_neon.c', #'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/complex_bit_reverse_arm.S', #'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/include/spl_inl_armv7.h', #'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/min_max_operations_neon.c', #'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/cross_correlation_neon.c', #'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/filter_ar_fast_q12_armv7.S', ], 'libraries': [], 'configurations': { 'Debug': {}, 'Release': {}, }, 'conditions': [ [ '"<(OS)" != "win"', { 'sources/': [['exclude', '<(tgvoip_src_loc)/os/windows/']], }, { 'sources/': [['exclude', '<(tgvoip_src_loc)/os/posix/']], }, ], [ '"<(OS)" != "mac"', { 'sources/': [['exclude', '<(tgvoip_src_loc)/os/darwin/']], }, ], [ '"<(OS)" != "linux"', { 'sources/': [['exclude', '<(tgvoip_src_loc)/os/linux/']], }, ], [ '"<(OS)" == "mac"', { 'xcode_settings': { 'CLANG_CXX_LANGUAGE_STANDARD': 'c++11', 'ALWAYS_SEARCH_USER_PATHS': 'NO', }, 'defines': [ 'WEBRTC_POSIX', 'WEBRTC_MAC', 'TARGET_OS_OSX', ], 'sources': [ '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/logging_mac.mm', '<(tgvoip_src_loc)/webrtc_dsp/rtc_base/logging_mac.h', ], 'conditions': [ [ '"<(official_build_target)" == "mac32"', { 'xcode_settings': { 'MACOSX_DEPLOYMENT_TARGET': '10.6', 'OTHER_CPLUSPLUSFLAGS': [ '-nostdinc++' ], }, 'include_dirs': [ '/usr/local/macold/include/c++/v1', '<(DEPTH)/../../../Libraries/macold/openssl/include', ], 'defines': [ 'TARGET_OSX32', ], }, { 'xcode_settings': { 'MACOSX_DEPLOYMENT_TARGET': '10.8', 'CLANG_CXX_LIBRARY': 'libc++', }, 'include_dirs': [ '<(DEPTH)/../../../Libraries/openssl/include', ], 'direct_dependent_settings': { 'linkflags': [ '-framework VideoToolbox', ], }, 'sources': [ '<(tgvoip_src_loc)/os/darwin/TGVVideoRenderer.mm', '<(tgvoip_src_loc)/os/darwin/TGVVideoRenderer.h', '<(tgvoip_src_loc)/os/darwin/TGVVideoSource.mm', '<(tgvoip_src_loc)/os/darwin/TGVVideoSource.h', '<(tgvoip_src_loc)/os/darwin/VideoToolboxEncoderSource.mm', '<(tgvoip_src_loc)/os/darwin/VideoToolboxEncoderSource.h', '<(tgvoip_src_loc)/os/darwin/SampleBufferDisplayLayerRenderer.mm', '<(tgvoip_src_loc)/os/darwin/SampleBufferDisplayLayerRenderer.h', ], }], ['"<(official_build_target)" == "macstore"', { 'defines': [ 'TGVOIP_NO_OSX_PRIVATE_API', ], }], ], }, ], [ '"<(OS)" == "win"', { 'msbuild_toolset': 'v141', 'defines': [ 'NOMINMAX', '_USING_V110_SDK71_', 'TGVOIP_WINXP_COMPAT', 'WEBRTC_WIN', ], 'libraries': [ 'winmm', 'ws2_32', 'kernel32', 'user32', ], 'msvs_cygwin_shell': 0, 'msvs_settings': { 'VCCLCompilerTool': { 'ProgramDataBaseFileName': '$(OutDir)\\$(ProjectName).pdb', 'DebugInformationFormat': '3', # Program Database (/Zi) 'AdditionalOptions': [ '/MP', # Enable multi process build. '/EHsc', # Catch C++ exceptions only, extern C functions never throw a C++ exception. '/wd4068', # Disable "warning C4068: unknown pragma" ], 'TreatWChar_tAsBuiltInType': 'false', }, }, 'msvs_external_builder_build_cmd': [ 'ninja.exe', '-C', '$(OutDir)', '-k0', '$(ProjectName)', ], 'configurations': { 'Debug': { 'defines': [ '_DEBUG', ], 'include_dirs': [ '<(DEPTH)/../../../Libraries/openssl/Debug/include', ], 'msvs_settings': { 'VCCLCompilerTool': { 'Optimization': '0', # Disabled (/Od) 'RuntimeLibrary': '1', # Multi-threaded Debug (/MTd) 'RuntimeTypeInfo': 'true', }, 'VCLibrarianTool': { 'AdditionalOptions': [ '/NODEFAULTLIB:LIBCMT' ] } }, }, 'Release': { 'defines': [ 'NDEBUG', ], 'include_dirs': [ '<(DEPTH)/../../../Libraries/openssl/Release/include', ], 'msvs_settings': { 'VCCLCompilerTool': { 'Optimization': '2', # Maximize Speed (/O2) 'InlineFunctionExpansion': '2', # Any suitable (/Ob2) 'EnableIntrinsicFunctions': 'true', # Yes (/Oi) 'FavorSizeOrSpeed': '1', # Favor fast code (/Ot) 'RuntimeLibrary': '0', # Multi-threaded (/MT) 'EnableEnhancedInstructionSet': '2', # Streaming SIMD Extensions 2 (/arch:SSE2) 'WholeProgramOptimization': 'true', # /GL }, 'VCLibrarianTool': { 'AdditionalOptions': [ '/LTCG', ] }, }, }, }, }, ], [ '"<(OS)" == "linux"', { 'defines': [ 'WEBRTC_POSIX', 'WEBRTC_LINUX', ], 'conditions': [ [ '"<!(uname -m)" == "i686"', { 'cflags_cc': [ '-msse2', ], }] ], 'direct_dependent_settings': { 'libraries': [ ], }, }, ], ], }, ], }
#!/usr/bin/env python GLOBAL_ARGUMENTS = [ 'property-id', 'start-date', 'end-date', 'ndays', 'domain', 'prefix', ] def format_comma(d): """ Format a comma separated number. """ return '{:,d}'.format(int(d)) def format_duration(secs): """ Format a duration in seconds as minutes and seconds. """ secs = int(secs) if abs(secs) > 60: mins = abs(secs) / 60 secs = abs(secs) - (mins * 60) return '%s%im %02is' % ('-' if secs < 0 else '', mins, secs) return '%is' % secs def format_percent(d, t): """ Format a value as a percent of a total. """ return '{:.1%}'.format(float(d) / t)
# Time: O(n * l^2) # Space: O(n * l) # Given a list of words, please write a program that returns # all concatenated words in the given list of words. # # A concatenated word is defined as a string that is comprised entirely of # at least two shorter words in the given array. # # Example: # Input: ["cat","cats","catsdogcats","dog","dogcatsdog","hippopotamuses","rat","ratcatdogcat"] # # Output: ["catsdogcats","dogcatsdog","ratcatdogcat"] # # Explanation: "catsdogcats" can be concatenated by "cats", "dog" and "cats"; # "dogcatsdog" can be concatenated by "dog", "cats" and "dog"; # "ratcatdogcat" can be concatenated by "rat", "cat", "dog" and "cat". # Note: # The number of elements of the given array will not exceed 10,000 # The length sum of elements in the given array will not exceed 600,000. # All the input string will only include lower case letters. # The returned elements order does not matter. class Solution(object): def findAllConcatenatedWordsInADict(self, words): """ :type words: List[str] :rtype: List[str] """ lookup = set(words) result = [] for word in words: dp = [False] * (len(word)+1) dp[0] = True for i in xrange(len(word)): if not dp[i]: continue for j in xrange(i+1, len(word)+1): if j - i < len(word) and word[i:j] in lookup: dp[j] = True if dp[len(word)]: result.append(word) break return result
def get_chunks(l, n, max_chunks=None): """ Returns a chunked version of list l with a maximum of n items in each chunk :param iterable[T] l: list of items of type T :param int n: max size of each chunk :param int max_chunks: maximum number of chunks that can be returned. Pass none (the default) for unbounded :return: list of chunks :rtype: list[T] """ if n is None: return [l] if n <= 0: raise ValueError('get_chunk: n must be a positive value. Received {}'.format(n)) if max_chunks is not None and max_chunks > 0: n = max(max_chunks, int(float(len(l)) / float(n)) + 1) return [l[i:i+n] for i in range(0, len(l), n)]
print("####################################################") print("#FILENAME:\t\ta1p3.py\t\t\t #") print("#ASSIGNMENT:\t\tHomework Assignment 1 Pt. 3#") print("#COURSE/SECTION:\tCIS 3389.251\t\t #") print("#DUE DATE:\t\tWednesday, 12.February 2020#") print("####################################################\n\n\n") sum = 0 for i in range(1,6): print("\nMONTH ", i, "\n--------") sum += float(input("Enter rainfall for month: ")) print("\n\n") if sum >= 20: print(sum, " inches:\nPLENTY RAINFALL") elif sum >= 15: print(sum, " inches:\nMODERATELY HIGH RAINFALL") elif sum >= 10: print(sum, " inches:\nMODERATELY LOW RAINFALL") else: print(sum, " inches:\nLOW RAINFALL") print("\n\n") #add blank line before return to prompt
description = '' pages = ['header', 'checkout'] def setup(data): pass def test(data): navigate('http://store.demoqa.com/') click(header.go_to_checkout) verify_text_in_element(checkout.title, 'Checkout') capture('Checkout page is displayed') def teardown(data): pass
# ENUM definitions # Symbol type SYMBOL_TYPE_SPOT = 'SPOT' # Order status ORDER_STATUS_NEW = 'NEW' ORDER_STATUS_PARTIALLY_FILLED = 'PARTIALLY_FILLED' ORDER_STATUS_FILLED = 'FILLED' ORDER_STATUS_CANCELED = 'CANCELED' ORDER_STATUS_PENDING_CANCEL = 'PENDING_CANCEL' ORDER_STATUS_REJECTED = 'REJECTED' ORDER_STATUS_EXPIRED = 'EXPIRED' # Order types ORDER_TYPE_LIMIT = 'LIMIT' ORDER_TYPE_MARKET = 'MARKET' # Order side ORDER_SIDE_BUY = 'BUY' ORDER_SIDE_SELL = 'SELL' # Time in force TIME_IN_FORCE_GTC = 'GTC' TIME_IN_FORCE_IOC = 'IOC' # Kline intervals # m -> minutes; h -> hours; d -> days; w -> weeks; M -> months KLINE_INTERVAL_1MINUTE = '1m' KLINE_INTERVAL_3MINUTE = '3m' KLINE_INTERVAL_5MINUTE = '5m' KLINE_INTERVAL_15MINUTE = '15m' KLINE_INTERVAL_30MINUTE = '30m' KLINE_INTERVAL_1HOUR = '1h' KLINE_INTERVAL_2HOUR = '2h' KLINE_INTERVAL_4HOUR = '4h' KLINE_INTERVAL_6HOUR = '6h' KLINE_INTERVAL_8HOUR = '8h' KLINE_INTERVAL_12HOUR = '12h' KLINE_INTERVAL_1DAY = '1d' KLINE_INTERVAL_3DAY = '3d' KLINE_INTERVAL_1WEEK = '1w' KLINE_INTERVAL_1MONTH = '1M'
# -*- coding: utf-8 -*- """ Created on Thu Nov 28 19:34:48 2019 @author: wenbin """ """ 实现一个队列的数据结构,使其具有入队列,出队列,查看队列首尾元素,查看队列大小等功能 数组实现队列. """ class MyQueue: def __init__(self): self.arr = [] self.front = 0 #队尾头 self.rear = 0 #队尾尾 # 判断队列是否为空 def isEmpty(self): return self.front == self.rear # 返回队列的大小 def size(self): return self.rear - self.front # 返回队列首元素 def getFront(self): if self.isEmpty(): return None else: return self.arr[self.front] #返回队列尾元素 def getBack(self): if self.isEmpty(): return None else: return self.arr[self.rear - 1] # 删除队列头元素 def deQueue(self): if self.rear > self.front: self.front += 1 else: print("The queue has been empty!") # 把新元素加入队列尾 def enQueue(self , item): self.arr.append(item) self.rear += 1 if __name__ == "__main__": queue = MyQueue() queue.enQueue(5) queue.enQueue(3) print("The first element of the queue is : " , queue.getFront()) print("The last element of the queue is : " , queue.getBack()) print("The size of queue is : " , queue.size())
# cases where DictAchievement should unlock # >> CASE {'name': 'John Doe', 'age': 24} # >> CASE { 'name': 'John Doe', 'age': 24 } # >> CASE func({'name': 'John Doe', 'age': 24})
km = float(input('Quantidade de Km rodados: ')) dias = float(input('Quantidade de dias: ')) q = km * 0.15 d = dias * 60 print('Você pagará R${:.2f} pelo aluguel do veículo.'.format(q + d))
#!/usr/bin/env python3 # default arguments, assume a default value if one is not provided def display_info(name, age='42'): print('Name: ', name, 'Age', age) display_info(age='56', name='Marc Wilson') display_info(name='Marc Wilson')
# Desenvolva uma lógica que leia o peso e a altura de uma pessoa, calcule seu Índice de Massa Corporal (IMC) # e mostre seu status, de acordo com a tabela abaixo: # # – IMC abaixo de 18,5: Abaixo do Peso # # – Entre 18,5 e 25: Peso Ideal # # – 25 até 30: Sobrepeso # # – 30 até 40: Obesidade peso = float(input('Qual é seu peso? (Kg)')) altura = float(input('Qual é sua altura? (m)')) imc = peso / (altura ** 2) print('O IMC dessa pessoa é de {:.1f}'. format(imc)) if imc < 18.5: print('Você esta ABAIXO DO PESO normal') elif imc >= 18.5 and imc < 25: print('PARABÉNS você está na faixa de PESO IDEAL') elif imc >= 25 and imc < 30: print('Você esta em SOBREPESO') elif imc >= 30 and imc < 40: print('Você esta na faixa de OBSIDADE ') else: print('Você esta em OBSEDIDADE MÓRBIDA, cuidado!')
#!/usr/bin/env python ''' Copyright (C) 2019, WAFW00F Developers. See the LICENSE file for copying permission. ''' NAME = 'Malcare (Inactiv)' def is_waf(self): schemes = [ self.matchContent(r'firewall.{0,15}?powered.by.{0,15}?malcare.{0,15}?pro'), self.matchContent('blocked because of malicious activities') ] if any(i for i in schemes): return True return False
n,k=map(int,input().split()) if k<n//2 or (n==1 and k!=0): print(-1) else: if n==1: print(1) else: x=r=k-(n-2)//2 while r<=x+n: r+=x ans=[r,x] for i in range(2,n): ans.append(ans[1]+i-1) print(*ans)
class news_source: ''' News Source Class to define News Source Objects ''' def __init__(self, id, name, homepage_url, description): self.id = id self.name = name self.homepage_url = homepage_url self.description = description # self.logo = logo class Articles: ''' Articles class to define Source's Articles Objects ''' def __init__(self, author, title, description, article_url, logo, publishedAt): # self.source_id = source self.author = author self.title = title self.description = description self.article_url = article_url self.logo = logo self.publishedAt = publishedAt
# Time: O(n) # Space: O(1) class Solution(object): def numberOfLines(self, widths, S): """ :type widths: List[int] :type S: str :rtype: List[int] """ result = [1, 0] for c in S: w = widths[ord(c)-ord('a')] result[1] += w if result[1] > 100: result[0] += 1 result[1] = w return result
class MarkerPosition: def __init__(self, markers_points, rotation_vector, translation_vector): self.markers_points = markers_points self.rotation_vector = rotation_vector self.translation_vector = translation_vector def set_markers_points(self, markers_points): self.markers_points = markers_points def set_rotation_vector(self, rotation_vector): self.rotation_vector = rotation_vector def get_markers_points(self): return self.markers_points def get_rotation_vector(self): return self.rotation_vector def get_translation_vector(self): return self.translation_vector
def _ghc_paths_module_impl(ctx): tools = ctx.toolchains["@rules_haskell//haskell:toolchain"].tools ghc = tools.ghc ctx.actions.run_shell( inputs = [ghc], outputs = [ctx.outputs.out], command = """ cat > {out} << EOM module GHC.Paths ( ghc, ghc_pkg, libdir, docdir ) where ghc, ghc_pkg, docdir, libdir :: FilePath ghc = "{ghc}" ghc_pkg = "{ghc_pkg}" docdir = "DOCDIR_IS_NOT_SET" EOM echo -n 'libdir = "' >> {out} {ghc} --print-libdir | tr '\\' '/' | tr -d '[:space:]' >> {out} echo '"' >> {out} """.format( ghc = ghc.path, ghc_pkg = tools.ghc_pkg.path, out = ctx.outputs.out.path, ), ) return [DefaultInfo(runfiles = ctx.runfiles(collect_data = True))] ghc_paths_module = rule( _ghc_paths_module_impl, toolchains = ["@rules_haskell//haskell:toolchain"], outputs = {"out": "GHC/Paths.hs"}, )
#import formatter #import htmllib url="http://www.cnn.com" filehandle = urllib.urlopen(url) #w = formatter.DumbWriter() # plain text #f = formatter.AbstractFormatter(w) #p = htmllib.HTMLParser(f) #p.feed(filehandle.read()) #p.close() #filehandle.close() fromaddr = "ahouman2@hatswitch.crhc.illinois.edu" msg = MIMEMultipart() msg['From'] = "amir" msg['To'] = "asdsadad" msg['Date'] = formatdate(localtime=True) msg['Subject'] = "salam" text = "saaalaaam" msg.attach( MIMEText(text) ) part = MIMEBase('application', "octet-stream") part.set_payload( filehandle.read() ) Encoders.encode_base64(part) part.add_header('Content-Disposition', 'attachment; filename="file.html"') msg.attach(part) smtp = smtplib.SMTP("localhost") smtp.sendmail("amir@localhost", "freeman77200@gmail.com", msg.as_string() ) smtp.close()
def factI(n): """ Assumes that n is an int > 0 Returns n!""" result = 1 while n > 1: result = result * n n -= 1 return result def factR(n): """ Assumes that n is an int > 0 Returns n! """ if n == 1: return n else: return n*factR(n-1)
class WebPageCssSelect: def __init__(self, url, ua_type, selector_name, value): self.url = url self.ua_type = ua_type self.selector_name = selector_name self.value = value def output(self): return self.url + ',' + self.ua_type + ',' + self.selector_name + ',' + self.value
num = 10 num1 = 10 num2 = 20 num3 = 30
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # class Context: def __init__(self): self.containers = [] def push(self, o): self.containers.append(o) def pop(self): return self.containers.pop() class Values: def __init__(self, *values): self.values = values def validate(self, o, ctx): if not o in self.values: return "%s not in %s" % (o, self.values) def __str__(self): return self.value class Types: def __init__(self, *types): self.types = types def validate(self, o, ctx): for t in self.types: if isinstance(o, t): return if len(self.types) == 1: return "%s is not a %s" % (o, self.types[0].__name__) else: return "%s is not one of: %s" % (o, ", ".join([t.__name__ for t in self.types])) class List: def __init__(self, condition): self.condition = condition def validate(self, o, ctx): if not isinstance(o, list): return "%s is not a list" % o ctx.push(o) for v in o: err = self.condition.validate(v, ctx) if err: return err class Map: def __init__(self, map, restricted=True): self.map = map self.restricted = restricted def validate(self, o, ctx): errors = [] if not hasattr(o, "get"): return "%s is not a map" % o ctx.push(o) for k, t in self.map.items(): v = o.get(k) if v is not None: err = t.validate(v, ctx) if err: errors.append("%s: %s" % (k, err)) if self.restricted: for k in o: if not k in self.map: errors.append("%s: illegal key" % k) ctx.pop() if errors: return ", ".join(errors) class And: def __init__(self, *conditions): self.conditions = conditions def validate(self, o, ctx): for c in self.conditions: err = c.validate(o, ctx) if err: return err
# Classes & Objects class Person: # __ means private __name = '' __email = '' def __init__(self, name, email): self.__name = name self.__email = email def set_name(self, name): self.__name = name def get_name(self): return self.__name def set_email(self, email): self.__email = email def get_email(self): return self.__email def to_string(self): return '{} can be contacted at {}'.format(self.__name, self.__email) person = Person('vuk samardžić', 'samardzic.vuk@gmail.com') # person.set_name('vuk') # person.set_email('samardzic.vuk@gmail.com') # print(person.get_name(), person.get_email()) # print(person.to_string()) class Customer(Person): __balance = 0 def __init__(self, name, email, balance): super().__init__(name, email) self.__balance = balance def set_balance(self, balance): self.__balance = balance def get_balance(self): return self.__balance def to_string(self): return '{} has the balance of {} and can be contacted at {}'.format(self.get_name(), self.__balance, self.get_email()) customer = Customer('John Doe', 'jdoe@gmail.com', 100) print(customer.to_string())
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) class PyTensorflow(Package): """TensorFlow is an Open Source Software Library for Machine Intelligence This is a wheel based recipe as opposed to source-based installation in the upstream spack.""" homepage = "https://www.tensorflow.org" url = "https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-2.5.0-cp38-cp38-manylinux2010_x86_64.whl" maintainers = ['pramodk', 'matz-e'] import_modules = ['tensorflow'] # For now only support python 38 wheels on linux with gpu. Mac os urls # are broken on the docuementation page. Below dict is setup so that this # can be easily extended. tensorflow_sha = { ('2.4.2', 'gpu-2.4.2-cp38-cp38-manylinux2010_x86_64'): 'a33acffb4816c5456eb0cbc1654e3f270d17245322aa3d7bfdd22a610c862e0a', } def wheel_url(version_id): return ( 'https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_{0}.whl' # noqa: E501 ).format(version_id) # add all version for key in tensorflow_sha.keys(): version(key[0], url=wheel_url(key[1]), sha256=tensorflow_sha[key], expand=False) extends('python') depends_on('cudnn@8:') depends_on('cuda@11:') depends_on('python@3:', type=('build', 'run')) depends_on('py-numpy', type=('build', 'run')) depends_on('py-pip', type='build') # compatible versions of py-h5py and py-six needs to be added # otherwise setup.py tries to uninstall them depends_on('py-h5py@2.10:2.99', when='@:2.4.99', type=('build', 'run')) depends_on('py-h5py@3:', when='@2.5:', type=('build', 'run')) depends_on('py-six@1.15.0', when='@:2.4.99', type=('build', 'run')) depends_on('py-six@1.16:', when='@2.5:', type=('build', 'run')) # no versions for Mac OS added conflicts('platform=darwin', msg='macOS is not supported') def install(self, spec, prefix): pip = which('pip') pip('install', self.stage.archive_file, '--prefix={0}'.format(prefix)) @run_after('install') @on_package_attributes(run_tests=True) def import_module_test(self): with working_dir('spack-test', create=True): for module in self.import_modules: python('-c', 'import {0}'.format(module)) def setup_run_environment(self, env): env.prepend_path('LD_LIBRARY_PATH', self.spec['cuda'].prefix.lib64) env.prepend_path('LD_LIBRARY_PATH', self.spec['cuda'].prefix.extras.CUPTI.lib64) # noqa: E501 env.prepend_path('LD_LIBRARY_PATH', self.spec['cudnn'].prefix.lib64)
''' https://leetcode.com/problems/longest-valid-parentheses/ ''' class Solution: def longestValidParentheses(self, s: str) -> int: stack=[-1] maxValid=0 for i in range(0,len(s)): if s[i]=="(": stack.append(i) else: stack.pop() if stack==[]: stack.append(i) else: maxValid=max(maxValid,i-stack[-1]) return maxValid
class Response: def __init__(self, inst): self.instance = inst @property def id(self): return self.instance['status'] @property def status(self): return self.instance['status']
NUM_ROWS = 10 NUM_COLS = 10 with open('input.txt') as file: octopi = [] num_flashes = 0 for row in range(NUM_ROWS): line = file.readline() octopi.append([]) for col in range(NUM_COLS): octopi[row].append(int(line[col])) for step in range(100): flashes = [] for row in range(NUM_ROWS): for col in range(NUM_COLS): octopi[row][col] += 1 check = [(row, col)] while (len(check) > 0): this = check.pop() r, c = this[0], this[1] if this in flashes: continue if octopi[r][c] > 9: # flashes flashes.append(this) # check clearances north = r > 0 west = c > 0 south = r < (NUM_ROWS - 1) east = c < (NUM_COLS - 1) # add 1 to all nearby and check them too if north: check.append((r-1,c)) octopi[r-1][c] += 1 if west: check.append((r-1,c-1)) octopi[r-1][c-1] += 1 if east: check.append((r-1,c+1)) octopi[r-1][c+1] += 1 if south: check.append((r+1,c)) octopi[r+1][c] += 1 if west: check.append((r+1,c-1)) octopi[r+1][c-1] += 1 if east: check.append((r+1,c+1)) octopi[r+1][c+1] += 1 if west: check.append((r,c-1)) octopi[r][c-1] += 1 if east: check.append((r,c+1)) octopi[r][c+1] += 1 for point in flashes: row, col = point[0], point[1] octopi[row][col] = 0 num_flashes += len(flashes) print(num_flashes)
FILTERS_KEY = 'FILTERS' SAMPLE_RATE_METRIC_KEY = '_sample_rate' SAMPLING_PRIORITY_KEY = '_sampling_priority_v1' ANALYTICS_SAMPLE_RATE_KEY = '_dd1.sr.eausr' ORIGIN_KEY = '_dd.origin' HOSTNAME_KEY = '_dd.hostname' ENV_KEY = 'env' NUMERIC_TAGS = (ANALYTICS_SAMPLE_RATE_KEY, ) MANUAL_DROP_KEY = 'manual.drop' MANUAL_KEEP_KEY = 'manual.keep'
def init(bot, data): @bot.command() async def add(ctx): await ctx.send("Add Ludus to your server: <https://discordapp.com/api/oauth2/authorize?client_id=593828724001079297&permissions=124992&scope=bot>") @bot.command() async def github(ctx): await ctx.send("Ludus is open source! You can find the source code here: https://github.com/mninc/ludus") @bot.command() async def server(ctx): await ctx.send("You can join the official Ludus discord server here: https://discord.gg/qZQN53p") @bot.command() async def contributors(ctx): await ctx.send("Ludus was developed by <@156895789795246081> and <@197059070740398080>, with art from <@253584079113551873>.") @bot.command() async def website(ctx): await ctx.send("Visit our website: https://mninc.github.io/ludus/")
"""User provided customizations. Here one changes the default arguments for compiling _gpaw.so (serial) and gpaw-python (parallel). Here are all the lists that can be modified: * libraries * library_dirs * include_dirs * extra_link_args * extra_compile_args * runtime_library_dirs * extra_objects * define_macros * mpi_libraries * mpi_library_dirs * mpi_include_dirs * mpi_runtime_library_dirs * mpi_define_macros To override use the form: libraries = ['somelib', 'otherlib'] To append use the form libraries += ['somelib', 'otherlib'] """ parallel_python_interpreter = True # compiler compiler = os.environ['CC'] mpicompiler = 'mpicc' mpilinker = 'mpicc' extra_compile_args = ['-std=c99', '-O3', '-fopenmp-simd', '-march=native', '-mtune=native', '-mavx2'] #extra_link_args = ['-fno-lto'] # libz libraries = ['z'] # libxc library_dirs += [os.environ['LIBXCDIR'] + '/lib'] include_dirs += [os.environ['LIBXCDIR'] + '/include'] libraries += ['xc'] # MKL # libraries += ['mkl_core', 'mkl_intel_lp64' ,'mkl_sequential'] libraries += os.environ['BLAS_LIBS'].split() # use ScaLAPACK and HDF5 scalapack = True if scalapack: libraries += os.environ['SCALAPACK_LIBS'].split() # hdf5 = True # GPAW defines define_macros += [('GPAW_NO_UNDERSCORE_CBLACS', '1')] define_macros += [('GPAW_NO_UNDERSCORE_CSCALAPACK', '1')] define_macros += [("GPAW_ASYNC",1)] define_macros += [("GPAW_MPI2",1)]
# -*- coding:utf-8 -*- # @Time : 2019/12/31 16:59 # @Author : Dg # 几个高阶函数的简单复习 def big_than_10(x): if x > 10: return True else: return False result = map(big_than_10, (1, 2, 3, 18,)) # 返回列表 print(result, type(result)) def fn(x, y): return x * 10 + y r_ = reduce(fn, [1, 2, 3]) print(r_, type(r_)) def is_o(x): return int(x) % 2 ==0 f_ = filter(is_o, "1234") # 可迭代对象是什么类型就返回什么类型 print(f_, type(f_)) lt = [1, 3, 2, 5] lt2 = sorted(lt) print(lt, lt2) lt3 = sorted(lt, reverse=True) print(lt3) dict_ = {1: 2, 3: 1, 2: 3, 4: 0} print(dict_.items()) dict2 = sorted(dict_.items(), key=lambda x: x[0]) print(dict2)
# -*- coding: utf-8 -*- """Top-level package for Spectrify.""" __author__ = """The Narrativ Company, Inc.""" __email__ = 'engineering@narrativ.com' __version__ = '1.0.1'
#! /usr/bin/env zxpy ~'echo Hello world!' def print_file_count(): file_count = ~'ls -1 | wc -l' ~"echo -n 'file count is: '" print(file_count) print_file_count()
class Solution(object): def __init__(self, nums): """ :type nums: List[int] :type numsSize: int """ self.nums = nums def pick(self, target): """ :type target: int :rtype: int """ count = 0 ans = -1 for i in xrange(0, len(self.nums)): if self.nums[i] == target: count += 1 if random.randrange(0, count) == 0: ans = i return ans # Your Solution object will be instantiated and called as such: # obj = Solution(nums) # param_1 = obj.pick(target)
#!/usr/bin/env python # _*_coding:utf-8 _*_ #@Time    :2019/4/24 0024 下午 9:36 #@Author  :喜欢二福的沧月君(necydcy@gmail.com) #@FileName: YanghuiTriangle.py #@Software: PyCharm """ (九)、杨辉三角形 【题目描述】 输出n(0<n)行杨辉三角形,n由用户输入。 """ def YangHui (num = 10): LL = [[1]] for i in range(1,num): LL.append([(0 if j== 0 else LL[i-1][j-1])+ (0 if j ==len(LL[i-1]) else LL[i-1][j]) for j in range(i+1)]) return LL a=int(input()) for i in YangHui(a): for j in i: print("%5d"%j,end="") print()
def extractLasciviousImouto(item): """ """ vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'].replace('-', '.')) if not (chp or vol) or 'preview' in item['title'].lower(): return None if 'The Beast of the 17th District' in item['tags'] or 'the beast of the 17th district' in item['title'].lower(): return buildReleaseMessageWithType(item, 'The Beast of the 17th District', vol, chp, frag=frag, postfix=postfix, tl_type='oel') if 'Le Festin de Vampire' in item['tags']: return buildReleaseMessageWithType(item, 'Le Festin de Vampire', vol, chp, frag=frag, postfix=postfix) return False
## Capitalizes the first letter of a string. ## Capitalizes the fist letter of the sring and then adds it with rest of the string. Omit the lower_rest parameter to keep the rest of the string intact, or set it to true to convert to lowercase. def capitalize(string, lower_rest=False): return string[:1].upper() + (string[1:].lower() if lower_rest else string[1:]) # capitalize('fooBar') # 'FooBar' # capitalize('fooBar', True) # 'Foobar'
# Topological sorting of a directed ascylic graph # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. # # Copyright (c) 2014-2019, Lars Asplund lars.anders.asplund@gmail.com """ Functionality to compute a dependency graph """ class DependencyGraph(object): """ A dependency graph """ def __init__(self): self._forward = {} self._backward = {} self._nodes = [] def toposort(self): """ Perform a topological sort returning a list of nodes such that every node is located after its dependency nodes """ sorted_nodes = [] self._visit(sorted(self._nodes), dict((key, sorted(values)) for key, values in self._forward.items()), sorted_nodes.append) sorted_nodes = list(reversed(sorted_nodes)) return sorted_nodes def add_node(self, node): self._nodes.append(node) def add_dependency(self, start, end): """ Add a dependency edge between the start and end node such that end node depends on the start node """ new_dependency = (start not in self._forward or end not in self._forward[start]) if start not in self._forward: self._forward[start] = set() if end not in self._backward: self._backward[end] = set() self._forward[start].add(end) self._backward[end].add(start) return new_dependency @staticmethod def _visit(nodes, graph, callback): """ Follow graph edges starting from the nodes iteratively returning all the nodes visited """ def visit(node): """ Visit a single node and all following nodes in the graph that have not already been visisted. Detects circular dependencies """ if node in path: start = path_ordered.index(node) raise CircularDependencyException(path_ordered[start:] + [node, ]) path.add(node) path_ordered.append(node) if node in graph: for other_node in graph[node]: if other_node not in visited: visit(other_node) path.remove(node) path_ordered.pop() visited.add(node) callback(node) visited = set() for node in nodes: if node not in visited: path = set() path_ordered = [] visit(node) def get_dependent(self, nodes): """ Get all nodes which are directly or indirectly dependent on the input nodes """ result = set() self._visit(nodes, self._forward, result.add) return result def get_dependencies(self, nodes): """ Get all nodes which are directly or indirectly dependencies of the input nodes """ result = set() self._visit(nodes, self._backward, result.add) return result def get_direct_dependencies(self, node): """ Get the direct dependencies of node """ return self._backward.get(node, set()) class CircularDependencyException(Exception): """ Raised when there are circular dependencies """ def __init__(self, path): Exception.__init__(self) self.path = path def __repr__(self): return "CircularDependencyException(%r)" % self.path
# Rock, paper, scissors game print("--------------------------------") print(" Rock, Paper, Scissors v1") print("--------------------------------") player1 = input("Player 1, enter your name: ") player2 = input("Player 2, enter your name: ") rolls = ["rock", "paper", "scissors"] roll1 = input(f"{player1}, enter your roll [rock, paper, scissors]: ") roll1 = roll1.lower().strip() if roll1 not in rolls: print(f"Sorry {player1}, {roll1} is not a valid roll.") roll2 = input(f"{player2}, enter your roll [rock, paper, scissors]: ") roll2 = roll2.lower().strip() if roll2 not in rolls: print(f"Sorry {player2}, {roll2} is not a valid roll.") print(f"{player1} rolls {roll1}.") print(f"{player2} rolls {roll2}.") # Win conditions winner = None if roll1 == roll2: winner = None elif roll1 == 'rock': if roll2 == 'paper': winner = player2 elif roll2 == 'scissors': winner = player1 elif roll1 == 'paper': if roll2 == 'rock': winner = player1 elif roll2 == 'scissors': winner = player2 elif roll1 == 'scissors': if roll2 == 'paper': winner = player1 elif roll2 == 'rock': winner = player2 print("The game is over!") if winner is None: print("It was a tie!") else: print(f"{winner} takes the game!")
"""Constants for the Carson integration.""" DOMAIN = "carson" UNLOCKED_TIMESPAN_SEC = 5 ATTRIBUTION = "provided by Eagle Eye" CONF_LIST_FROM_EAGLE_EYE = "list_from_eagle_eye" DEFAULT_CONF_LIST_FROM_EAGLE_EYE = False
# Copyright 2016 IBM All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. class ScorerConfigurationException(Exception): """ Define exceptions to be used by various services. ScorerConfigurationException: Raised if a Scorer is improperly configured ScorerRuntimeException: Raised if a Scorer has a runtime error """ def __init__(self, message): """ Wrapper exception for configuration issues. Should be raised if: 1) Inputs to the constructor of a scorer are bad, 2) Invariant to scorer is violated etc. """ super(ScorerConfigurationException, self).__init__(message) # endclass ScorerConfigurationException class ScorerRuntimeException(Exception): def __init__(self, message): """ Wrapper exception for general runtime issues for a scorer. Should be raised if: 1) Input to an api method (likely .score) is invalid 2) Unforeseen problems prevent properly scoring a query, document, query/document pair """ super(ScorerRuntimeException, self).__init__(message) # endclass ScorerRuntimeException class ScorerTimeoutException(ScorerRuntimeException): def __init__(self, message, args, kwargs): """ Should be raised if a scorer times out """ super(ScorerTimeoutException, self).__init__(message) self._args = args self._kwargs = kwargs #endclass ScorerTimeoutException
class VkError(Exception): def __init__(self, code, message, request_params): super(VkError, self).__init__() self.code = code self.message = message self.request_params = request_params def __str__(self): return 'VkError {}: {} (request_params: {})'.format(self.code, self.message, self.request_params) class VkWallAccessDeniedError(VkError): def __init__(self, code, message, request_params): super(VkWallAccessDeniedError, self).__init__(code, message, request_params)
#!/usr/bin/env python # -*- coding: utf-8 -*- print(''' .intel_syntax noprefix .extern isr_common ''') print('// Interrupt Service Routines') for i in range(255): print('''isr{0}: cli {1} push {0} jmp isr_common '''.format(i, 'push 0' if i not in [8, 10, 11, 12, 13, 14, 17] else 'nop')) print(''' // Vector table .section .data .global isr_table isr_table: ''') for i in range(255): print(' .quad isr{}'.format(i))
"""Find first and last position of elements in sorted arrays.""" """First and last position in sorted arrays.""" def searchRange(nums, target): """Find first and last position.""" def midpoint(x, y): """Find mid point.""" return x + (y - x) // 2 lo, hi = 0, len(nums)-1 _max = -1 _min = float('inf') while lo <= hi: mid = midpoint(lo, hi) if nums[mid] == target: _max = max(_max, mid) _min = min(_min, mid) if nums[mid] <= target: lo = mid+1 else: hi = mid-1 if _max == -1: return [-1, _max] lo, hi = 0, _min while lo <= hi: mid = midpoint(lo, hi) if nums[mid] == target: _min = min(_min, mid) if nums[mid] >= target: hi = mid-1 else: lo = mid+1 return [_min, _max]
# This sample tests the logic that infers parameter types based on # default argument values or annotated base class methods. class Parent: def func1(self, a: int, b: str) -> float: ... class Child(Parent): def func1(self, a, b): reveal_type(self, expected_text="Self@Child") reveal_type(a, expected_text="int") reveal_type(b, expected_text="str") return a def func2(a, b=0, c=None): reveal_type(a, expected_text="Unknown") reveal_type(b, expected_text="int") reveal_type(c, expected_text="Unknown | None") def func3(a=(1, 2), b=[1,2], c={1: 2}): reveal_type(a, expected_text="Unknown") reveal_type(b, expected_text="Unknown") reveal_type(c, expected_text="Unknown")
#!/usr/bin/python3 class Solution: def isPalindrome(self, x: int) -> bool: if x < 0: return False s = str(x) for i in range(int(len(s)/2)): last = len(s) - i - 1 if s[i] != s[last]: return False return True if __name__ == "__main__": solution = Solution() print(solution.isPalindrome(121))
# *** DEFINE CONSTANTS *** # DO NOT ADD TO VERSION CONTROL AFTER ENTERING PERSONAL INFO # LOCATION WHERE BLOTTER FILE IS LOCATED (INPUT) SRCPATH = "D:/financial/" SRCFILE = "blotter.xlsx" # OUTPUT DATA DESTINATION (WINDOWS FORMAT) outpath = "D://financial//" outpath_linux = "/mnt/d/financial/" # OUTPUT FILE NAMES outfile = "stock_data_output.xlsx" out_screener = "screener_output.xlsx" # **ROBOADVISOR DEFAULTS (OVERRIDDEN IN APP) # DATABASE FOR HISTORICAL STOCK INFO (USED BY stock_analysisPME) HIST_DB_SERVER = "sqlite" HIST_DB_NAME = "hist.db" HIST_DB_SCHEMA = "db_schema.sql" # RULES FOR DIVERSIFICATION & RISK (AS PERCENTAGE OF TOTAL PORTFOLIO) max_sector_pct = 0.2 max_stock_pct = 0.1 # INTERNAL DISCOUNT RATE (WHAT OPPORTUNITY COST OF INVESTED FUNDS) discount_rate = 0.12
""" https://leetcode.com/problems/reverse-integer/ """ class Solution: def reverse(self, x: int) -> int: multiply = 1 upper_bound = 2**31-1 lower_bound = -2**31 if x < 0: multiply = -1 value = int(str(abs(x))[::-1]) print(f"mult={multiply}, value={value}, final={multiply*value}") if lower_bound < value < upper_bound: # prin(f"{lower_bound} < {value} < {upper_bound}") return multiply * value return 0 def test_1(): assert Solution().reverse(-123) == -321 def test_outOfScope(): assert Solution().reverse(1534236469) == 0
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 2017/7/25 0025 上午 11:51 # @Author : Exchris Tsai # @Site : # @File : example55.py # @Software: PyCharm """ 题目:学习使用按位取反~。 程序分析:~0=1; ~1=0; (1)先使a右移4位。 (2)设置一个低4位全为1,其余全为0的数。可用~(~0<<4) (3)将上面二者进行&运算。 """ __author__ = 'Exchris Tsai' if __name__ == "__main__": a = 234 b = ~a print('The a\'s 1 complement is %d' %b) a = ~a print('The a\'s 2 complement is %d' %b)
def get_num(capacity, p): cell = [] for i in range(len(p) + 1): cell.append([]) for j in range(capacity + 1): cell[i].append(0) for i in range(1, len(p) + 1): for j in range(1, capacity + 1): if p[i - 1] <= i: cell[i][j] = max(p[i -1], cell[i][j-1]) if j - p[i - 1] >= 0: cell[i][j] = max(cell[i-1][j], p[i - 1] + cell[i - 1][j - p[i - 1]]) else: cell[i][j] = cell[i - 1][j] # for i in range(len(p) + 1): # for j in range(capacity + 1): # print(cell[i][j], end=' ') # print() # print() # print() if cell[len(p)][capacity] != capacity: return [] k = capacity t = len(p) ans = [] while k > 0: for el in p[::-1]: if k - el >= 0 and t - 1 >= 0 : if cell[t][k] - el == cell[t][k - el]: ans.append(el) k -= el elif cell[t][k] - el == cell[t - 1][k - el]: ans.append(el) t -= 1 k -= el return ans n =int(input()) p = list(map(int, input().split())) p.sort() if sum(p) % 3 == 0: capacity = sum(p) // 3 for i in range(3): k = get_num(capacity, p) if sum(k) != capacity: break for el in k: p.pop(p.index(el)) else: print(1) exit() print(0) # 11 # 17 59 34 57 17 23 67 1 18 2 59 # 11 2 # 21 10 1
#!/usr/bin/env python # examples of Church numerals zero = lambda f: lambda x: x one = lambda f: lambda x: f(x) two = lambda f: lambda x: f(f(x)) three = lambda f: lambda x: f(f(f(x))) four = lambda f: lambda x: f(f(f(f(x)))) five = lambda f: lambda x: f(f(f(f(f(x))))) def to_intp(f): print (f(lambda x: x + 1)(0)) def to_int(f): return (f(lambda x: x + 1)(0)) # boolean TRUE = lambda x: lambda y: x FALSE = lambda x: lambda y: y # logic operators AND = lambda x: lambda y: x(y)(x) OR = lambda x: lambda y: x(x)(y) NOT = lambda p: p(FALSE)(TRUE) XOR = lambda x: lambda y: x(NOT(y))(y) def to_boolp(f): print (f(True)(False)) def to_bool(f): return (f(True)(False)) # incrementation n+1 inc = lambda n: lambda f: lambda x: f(n(f)(x)) # decrementation n-1 dec = lambda n: lambda f: lambda x: n(lambda g: lambda h: h(g(f)))(lambda y: x)(lambda y: y) # addition n+m add = lambda n: lambda m: m(inc)(n) # subtraction n-m sub = lambda n: lambda m: m(dec)(n) # multiplying n*m mul = lambda n: lambda m: (m(add(n))(zero)) # exponentiation n^m pow = lambda n: lambda m: (m(mul(n))(one)) # if f.e. IF(TRUE)(one)(two)->one, IF(FALSE)(one)(two) -> two IF = lambda n: lambda x: lambda y: n(x)(y) # checking if the numeral equals ZERO is_zero = lambda n: n(lambda x: (FALSE))(TRUE) # checking if m<=n ? less_or_equal = lambda m: lambda n: is_zero(sub(m)(n)) # checking if m==n equal = lambda m: lambda n: AND(less_or_equal(m)(n))(less_or_equal(n)(m)) # Z_combinator is needed for recursion Z = lambda f: (lambda x: (f(lambda y: x(x)(y))))(lambda x: f(lambda y: (x(x)(y)))) # modulo m%n mod = Z(lambda f: lambda m: lambda n: IF(less_or_equal(n)(m))(lambda x: f(sub(m)(n))(n)(x))(m)) # pairs PAIR = lambda x: lambda y: lambda f: f(x)(y) LEFT = lambda p: p(lambda x: lambda y: x) RIGHT = lambda p: p(lambda x: lambda y: y) # list EMPTY = PAIR(TRUE)(TRUE) NEW = lambda l: lambda x: PAIR(FALSE)(PAIR(x)(l)) IS_EMPTY = LEFT FIRST = lambda l: (LEFT(RIGHT(l))) REST = lambda l: (RIGHT(RIGHT(l))) def to_int_array(k): array = [] while not to_bool(IS_EMPTY(k)): array.append(to_int(FIRST(k))) k = REST(k) return array # range(making list of range [m,n]) RANGE = Z(lambda f: lambda m: lambda n: IF(less_or_equal(m)(n))(lambda x: NEW(f(inc(m))(n))(m)(x))(EMPTY)) # fold FOLD = Z(lambda f: lambda l: lambda x: lambda g: IF(IS_EMPTY(l))(x)(lambda y: g(f(REST(l))(x)(g))(FIRST(l))(y))) # mapping function on the list MAP = lambda k: lambda f: FOLD(k)(EMPTY)(lambda l: lambda x: NEW(l)(f(x))) # gcd gcd = Z(lambda f: lambda m: lambda n: IF(is_zero(n))(m)(lambda x: f(n)(mod(m)(n))(x))) # program which for numbers from 1 to 50 puts gcd of this number and 54 in array ten = mul(two)(five) fifty = mul(ten)(five) fifty_four = add(fifty)(four) list = MAP(RANGE(one)(fifty))(lambda n: gcd(fifty_four)(n)) print(to_int_array(list))
#!/usr/bin/env python3 # https://arc098.contest.atcoder.jp/tasks/arc098_a n = int(input()) s = input() a = [0] * n if s[0] == 'W': a[0] = 1 for i in range(1, n): c = s[i] if c == 'E': a[i] = a[i - 1] else: a[i] = a[i - 1] + 1 b = [0] * n if s[n - 1] == 'E': b[n - 1] = 1 for i in range(n - 2, -1, -1): c = s[i] if c == 'W': b[i] = b[i + 1] else: b[i] = b[i + 1] + 1 m = n for i in range(n): m = min(m, a[i] + b[i]) print(m - 1)
class SystemUnsupported(Exception): def __init__(self): message = "不支持您的系统" super().__init__(message) class SubClassInvaild(Exception): def __init__(self): message = "SubClass didn't provide needed function" super().__init__(message) class InvalidInputUrl(Exception): def __init__(self): message = "商品链接无效, 请检查后重试" super().__init__(message) class InvalidInputTime(Exception): def __init__(self): message = "抢购时间无效, 请按照格式重新输入" super().__init__(message)
# Copyright (c) Jeremías Casteglione <jrmsdev@gmail.com> # See LICENSE file. __all__ = ['configure'] def configure(env, cfg): env.settings.merge(cfg, 'os', ( 'hostname', 'hostname.file', 'users.home.dir', ))
# # Copyright (c) 2013,2014, Oracle and/or its affiliates. All rights reserved. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; version 2 of the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # """Errors raised within the MySQL Fabric library. """ class Error(Exception): """Base exception for all errors in the package. """ pass class NotCallableError(Error): """Exception raised when a callable was expected but not provided. """ pass class NotEventError(Error): """Exception raised when a non-event instance was passed where an event instance was expected. """ pass class UnknownCallableError(Error): """Exception raised when trying to use a callable that was not known when a known callable was expected. """ pass class ExecutorError(Error): """Exception raised when the one tries to access the executor that is not properly configured. """ pass class InvalidGtidError(Error): """Exception raised when the one tries to use and make operations with invalid GTID(s). """ pass class InternalError(Error): """Exception raised when an internal error occurs. Typically it is raised when an extension added does not honor the internal interfaces. """ pass class UuidError(Error): """Exception raised when there are problems with uuids. For example, if the expected uuid does not match the server's uuid. """ pass class TimeoutError(Error): """Exception raised when there is a timeout. """ class DatabaseError(Error): """Exception raised when something bad happens while accessing a database. """ def __init__(self, msg, errno=None): """Constructor for DatabaseError object. """ super(DatabaseError, self).__init__(msg) self.errno = errno class ProgrammingError(Error): """Exception raised when a developer tries to use the interfaces and executes an invalid operation. """ pass class ConfigurationError(ProgrammingError): """Exception raised when configuration options are not properly set. """ pass class LockManagerError(Error): """Exception raised when an invalid operation is attempted on the lock manager or locks are broken. """ pass class ServiceError(Error): """Exception raised when one tries to use the service interface and executes an invalid operation. """ pass class GroupError(ServiceError): """Exception raised when one tries to execute an invalid operation on a group. For example, it is not possible to create two groups with the same id or remove a group that has associated servers. """ pass class ServerError(ServiceError): """Exception raised when one tries to execute an invalid operation on a server. For example, it is not possible to create two servers with the same uuid. """ pass class ProcedureError(ServiceError): """Exception raised when a procedure is not found. """ pass class ShardingError(ServiceError): """Exception raised when an invalid operation is attempted on the sharding system. """ pass class BackupError(Error): """Exception raised when a error occurs in the backup restore framework of Fabric. """ pass class CredentialError(Error): """Exception raised when something is wrong with credentials""" pass class CommandResultError(Error): """Exception raised for incorrect command result """ pass class ProviderError(ServiceError): """Exception raised when something is wrong while accessing a cloud provider. """ pass class MachineError(ServiceError): """Exception while processing a request that requires access to provider's machine. """ pass
def reverse_string(string): reverse = '' for char in range(len(string) - 1, -1, -1): reverse += string[char] print(reverse)
class RestException(Exception): pass class ResourceException(RestException): pass class RestServerException(RestException): pass
class CreateAuth: def __init__(self, repository): self.auth_repository = repository async def create(self): return await self.auth_repository.create()
length = int( input("Enter the length of rectagle: ") ) width = int( input("Enter the width of rectange: ") ) perimeter = 2 * (length + width) area = length * width print("Area: ", area, "square cm") print("Perimeter:", perimeter, "cm")