blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3540dfbcdedda9bb8b66e444ddf8e9f925b5eb30 | fbbe424559f64e9a94116a07eaaa555a01b0a7bb | /Tensorflow_LightGBM_Scipy_nightly/source/numpy/distutils/ccompiler.py | b03fb96b28519ed7c2f2a0b827651c40c399f598 | [
"GPL-3.0-or-later",
"BSD-3-Clause",
"Python-2.0",
"GCC-exception-3.1",
"LicenseRef-scancode-unknown-license-reference",
"GPL-3.0-only",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause-Open-MPI",
"MIT"
] | permissive | ryfeus/lambda-packs | 6544adb4dec19b8e71d75c24d8ed789b785b0369 | cabf6e4f1970dc14302f87414f170de19944bac2 | refs/heads/master | 2022-12-07T16:18:52.475504 | 2022-11-29T13:35:35 | 2022-11-29T13:35:35 | 71,386,735 | 1,283 | 263 | MIT | 2022-11-26T05:02:14 | 2016-10-19T18:22:39 | Python | UTF-8 | Python | false | false | 28,548 | py | from __future__ import division, absolute_import, print_function
import os
import re
import sys
import types
import shlex
import time
from copy import copy
from distutils import ccompiler
from distutils.ccompiler import *
from distutils.errors import DistutilsExecError, DistutilsModuleError, \
DistutilsPlatformError, CompileError
from distutils.sysconfig import customize_compiler
from distutils.version import LooseVersion
from numpy.distutils import log
from numpy.distutils.compat import get_exception
from numpy.distutils.exec_command import exec_command
from numpy.distutils.misc_util import cyg2win32, is_sequence, mingw32, \
quote_args, get_num_build_jobs, \
_commandline_dep_string
# globals for parallel build management
try:
import threading
except ImportError:
import dummy_threading as threading
_job_semaphore = None
_global_lock = threading.Lock()
_processing_files = set()
def _needs_build(obj, cc_args, extra_postargs, pp_opts):
"""
Check if an objects needs to be rebuild based on its dependencies
Parameters
----------
obj : str
object file
Returns
-------
bool
"""
# defined in unixcompiler.py
dep_file = obj + '.d'
if not os.path.exists(dep_file):
return True
# dep_file is a makefile containing 'object: dependencies'
# formatted like posix shell (spaces escaped, \ line continuations)
# the last line contains the compiler commandline arguments as some
# projects may compile an extension multiple times with different
# arguments
with open(dep_file, "r") as f:
lines = f.readlines()
cmdline =_commandline_dep_string(cc_args, extra_postargs, pp_opts)
last_cmdline = lines[-1]
if last_cmdline != cmdline:
return True
contents = ''.join(lines[:-1])
deps = [x for x in shlex.split(contents, posix=True)
if x != "\n" and not x.endswith(":")]
try:
t_obj = os.stat(obj).st_mtime
# check if any of the dependencies is newer than the object
# the dependencies includes the source used to create the object
for f in deps:
if os.stat(f).st_mtime > t_obj:
return True
except OSError:
# no object counts as newer (shouldn't happen if dep_file exists)
return True
return False
def replace_method(klass, method_name, func):
if sys.version_info[0] < 3:
m = types.MethodType(func, None, klass)
else:
# Py3k does not have unbound method anymore, MethodType does not work
m = lambda self, *args, **kw: func(self, *args, **kw)
setattr(klass, method_name, m)
######################################################################
## Method that subclasses may redefine. But don't call this method,
## it i private to CCompiler class and may return unexpected
## results if used elsewhere. So, you have been warned..
def CCompiler_find_executables(self):
"""
Does nothing here, but is called by the get_version method and can be
overridden by subclasses. In particular it is redefined in the `FCompiler`
class where more documentation can be found.
"""
pass
replace_method(CCompiler, 'find_executables', CCompiler_find_executables)
# Using customized CCompiler.spawn.
def CCompiler_spawn(self, cmd, display=None):
"""
Execute a command in a sub-process.
Parameters
----------
cmd : str
The command to execute.
display : str or sequence of str, optional
The text to add to the log file kept by `numpy.distutils`.
If not given, `display` is equal to `cmd`.
Returns
-------
None
Raises
------
DistutilsExecError
If the command failed, i.e. the exit status was not 0.
"""
if display is None:
display = cmd
if is_sequence(display):
display = ' '.join(list(display))
log.info(display)
s, o = exec_command(cmd)
if s:
if is_sequence(cmd):
cmd = ' '.join(list(cmd))
try:
print(o)
except UnicodeError:
# When installing through pip, `o` can contain non-ascii chars
pass
if re.search('Too many open files', o):
msg = '\nTry rerunning setup command until build succeeds.'
else:
msg = ''
raise DistutilsExecError('Command "%s" failed with exit status %d%s' % (cmd, s, msg))
replace_method(CCompiler, 'spawn', CCompiler_spawn)
def CCompiler_object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
"""
Return the name of the object files for the given source files.
Parameters
----------
source_filenames : list of str
The list of paths to source files. Paths can be either relative or
absolute, this is handled transparently.
strip_dir : bool, optional
Whether to strip the directory from the returned paths. If True,
the file name prepended by `output_dir` is returned. Default is False.
output_dir : str, optional
If given, this path is prepended to the returned paths to the
object files.
Returns
-------
obj_names : list of str
The list of paths to the object files corresponding to the source
files in `source_filenames`.
"""
if output_dir is None:
output_dir = ''
obj_names = []
for src_name in source_filenames:
base, ext = os.path.splitext(os.path.normpath(src_name))
base = os.path.splitdrive(base)[1] # Chop off the drive
base = base[os.path.isabs(base):] # If abs, chop off leading /
if base.startswith('..'):
# Resolve starting relative path components, middle ones
# (if any) have been handled by os.path.normpath above.
i = base.rfind('..')+2
d = base[:i]
d = os.path.basename(os.path.abspath(d))
base = d + base[i:]
if ext not in self.src_extensions:
raise UnknownFileError("unknown file type '%s' (from '%s')" % (ext, src_name))
if strip_dir:
base = os.path.basename(base)
obj_name = os.path.join(output_dir, base + self.obj_extension)
obj_names.append(obj_name)
return obj_names
replace_method(CCompiler, 'object_filenames', CCompiler_object_filenames)
def CCompiler_compile(self, sources, output_dir=None, macros=None,
include_dirs=None, debug=0, extra_preargs=None,
extra_postargs=None, depends=None):
"""
Compile one or more source files.
Please refer to the Python distutils API reference for more details.
Parameters
----------
sources : list of str
A list of filenames
output_dir : str, optional
Path to the output directory.
macros : list of tuples
A list of macro definitions.
include_dirs : list of str, optional
The directories to add to the default include file search path for
this compilation only.
debug : bool, optional
Whether or not to output debug symbols in or alongside the object
file(s).
extra_preargs, extra_postargs : ?
Extra pre- and post-arguments.
depends : list of str, optional
A list of file names that all targets depend on.
Returns
-------
objects : list of str
A list of object file names, one per source file `sources`.
Raises
------
CompileError
If compilation fails.
"""
# This method is effective only with Python >=2.3 distutils.
# Any changes here should be applied also to fcompiler.compile
# method to support pre Python 2.3 distutils.
global _job_semaphore
jobs = get_num_build_jobs()
# setup semaphore to not exceed number of compile jobs when parallelized at
# extension level (python >= 3.5)
with _global_lock:
if _job_semaphore is None:
_job_semaphore = threading.Semaphore(jobs)
if not sources:
return []
# FIXME:RELATIVE_IMPORT
if sys.version_info[0] < 3:
from .fcompiler import FCompiler, is_f_file, has_f90_header
else:
from numpy.distutils.fcompiler import (FCompiler, is_f_file,
has_f90_header)
if isinstance(self, FCompiler):
display = []
for fc in ['f77', 'f90', 'fix']:
fcomp = getattr(self, 'compiler_'+fc)
if fcomp is None:
continue
display.append("Fortran %s compiler: %s" % (fc, ' '.join(fcomp)))
display = '\n'.join(display)
else:
ccomp = self.compiler_so
display = "C compiler: %s\n" % (' '.join(ccomp),)
log.info(display)
macros, objects, extra_postargs, pp_opts, build = \
self._setup_compile(output_dir, macros, include_dirs, sources,
depends, extra_postargs)
cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
display = "compile options: '%s'" % (' '.join(cc_args))
if extra_postargs:
display += "\nextra options: '%s'" % (' '.join(extra_postargs))
log.info(display)
def single_compile(args):
obj, (src, ext) = args
if not _needs_build(obj, cc_args, extra_postargs, pp_opts):
return
# check if we are currently already processing the same object
# happens when using the same source in multiple extensions
while True:
# need explicit lock as there is no atomic check and add with GIL
with _global_lock:
# file not being worked on, start working
if obj not in _processing_files:
_processing_files.add(obj)
break
# wait for the processing to end
time.sleep(0.1)
try:
# retrieve slot from our #job semaphore and build
with _job_semaphore:
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
finally:
# register being done processing
with _global_lock:
_processing_files.remove(obj)
if isinstance(self, FCompiler):
objects_to_build = list(build.keys())
f77_objects, other_objects = [], []
for obj in objects:
if obj in objects_to_build:
src, ext = build[obj]
if self.compiler_type=='absoft':
obj = cyg2win32(obj)
src = cyg2win32(src)
if is_f_file(src) and not has_f90_header(src):
f77_objects.append((obj, (src, ext)))
else:
other_objects.append((obj, (src, ext)))
# f77 objects can be built in parallel
build_items = f77_objects
# build f90 modules serial, module files are generated during
# compilation and may be used by files later in the list so the
# ordering is important
for o in other_objects:
single_compile(o)
else:
build_items = build.items()
if len(build) > 1 and jobs > 1:
# build parallel
import multiprocessing.pool
pool = multiprocessing.pool.ThreadPool(jobs)
pool.map(single_compile, build_items)
pool.close()
else:
# build serial
for o in build_items:
single_compile(o)
# Return *all* object filenames, not just the ones we just built.
return objects
replace_method(CCompiler, 'compile', CCompiler_compile)
def CCompiler_customize_cmd(self, cmd, ignore=()):
"""
Customize compiler using distutils command.
Parameters
----------
cmd : class instance
An instance inheriting from `distutils.cmd.Command`.
ignore : sequence of str, optional
List of `CCompiler` commands (without ``'set_'``) that should not be
altered. Strings that are checked for are:
``('include_dirs', 'define', 'undef', 'libraries', 'library_dirs',
'rpath', 'link_objects')``.
Returns
-------
None
"""
log.info('customize %s using %s' % (self.__class__.__name__,
cmd.__class__.__name__))
def allow(attr):
return getattr(cmd, attr, None) is not None and attr not in ignore
if allow('include_dirs'):
self.set_include_dirs(cmd.include_dirs)
if allow('define'):
for (name, value) in cmd.define:
self.define_macro(name, value)
if allow('undef'):
for macro in cmd.undef:
self.undefine_macro(macro)
if allow('libraries'):
self.set_libraries(self.libraries + cmd.libraries)
if allow('library_dirs'):
self.set_library_dirs(self.library_dirs + cmd.library_dirs)
if allow('rpath'):
self.set_runtime_library_dirs(cmd.rpath)
if allow('link_objects'):
self.set_link_objects(cmd.link_objects)
replace_method(CCompiler, 'customize_cmd', CCompiler_customize_cmd)
def _compiler_to_string(compiler):
props = []
mx = 0
keys = list(compiler.executables.keys())
for key in ['version', 'libraries', 'library_dirs',
'object_switch', 'compile_switch',
'include_dirs', 'define', 'undef', 'rpath', 'link_objects']:
if key not in keys:
keys.append(key)
for key in keys:
if hasattr(compiler, key):
v = getattr(compiler, key)
mx = max(mx, len(key))
props.append((key, repr(v)))
lines = []
format = '%-' + repr(mx+1) + 's = %s'
for prop in props:
lines.append(format % prop)
return '\n'.join(lines)
def CCompiler_show_customization(self):
"""
Print the compiler customizations to stdout.
Parameters
----------
None
Returns
-------
None
Notes
-----
Printing is only done if the distutils log threshold is < 2.
"""
if 0:
for attrname in ['include_dirs', 'define', 'undef',
'libraries', 'library_dirs',
'rpath', 'link_objects']:
attr = getattr(self, attrname, None)
if not attr:
continue
log.info("compiler '%s' is set to %s" % (attrname, attr))
try:
self.get_version()
except Exception:
pass
if log._global_log.threshold<2:
print('*'*80)
print(self.__class__)
print(_compiler_to_string(self))
print('*'*80)
replace_method(CCompiler, 'show_customization', CCompiler_show_customization)
def CCompiler_customize(self, dist, need_cxx=0):
"""
Do any platform-specific customization of a compiler instance.
This method calls `distutils.sysconfig.customize_compiler` for
platform-specific customization, as well as optionally remove a flag
to suppress spurious warnings in case C++ code is being compiled.
Parameters
----------
dist : object
This parameter is not used for anything.
need_cxx : bool, optional
Whether or not C++ has to be compiled. If so (True), the
``"-Wstrict-prototypes"`` option is removed to prevent spurious
warnings. Default is False.
Returns
-------
None
Notes
-----
All the default options used by distutils can be extracted with::
from distutils import sysconfig
sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'BASECFLAGS',
'CCSHARED', 'LDSHARED', 'SO')
"""
# See FCompiler.customize for suggested usage.
log.info('customize %s' % (self.__class__.__name__))
customize_compiler(self)
if need_cxx:
# In general, distutils uses -Wstrict-prototypes, but this option is
# not valid for C++ code, only for C. Remove it if it's there to
# avoid a spurious warning on every compilation.
try:
self.compiler_so.remove('-Wstrict-prototypes')
except (AttributeError, ValueError):
pass
if hasattr(self, 'compiler') and 'cc' in self.compiler[0]:
if not self.compiler_cxx:
if self.compiler[0].startswith('gcc'):
a, b = 'gcc', 'g++'
else:
a, b = 'cc', 'c++'
self.compiler_cxx = [self.compiler[0].replace(a, b)]\
+ self.compiler[1:]
else:
if hasattr(self, 'compiler'):
log.warn("#### %s #######" % (self.compiler,))
if not hasattr(self, 'compiler_cxx'):
log.warn('Missing compiler_cxx fix for ' + self.__class__.__name__)
# check if compiler supports gcc style automatic dependencies
# run on every extension so skip for known good compilers
if hasattr(self, 'compiler') and ('gcc' in self.compiler[0] or
'g++' in self.compiler[0] or
'clang' in self.compiler[0]):
self._auto_depends = True
elif os.name == 'posix':
import tempfile
import shutil
tmpdir = tempfile.mkdtemp()
try:
fn = os.path.join(tmpdir, "file.c")
with open(fn, "w") as f:
f.write("int a;\n")
self.compile([fn], output_dir=tmpdir,
extra_preargs=['-MMD', '-MF', fn + '.d'])
self._auto_depends = True
except CompileError:
self._auto_depends = False
finally:
shutil.rmtree(tmpdir)
return
replace_method(CCompiler, 'customize', CCompiler_customize)
def simple_version_match(pat=r'[-.\d]+', ignore='', start=''):
"""
Simple matching of version numbers, for use in CCompiler and FCompiler.
Parameters
----------
pat : str, optional
A regular expression matching version numbers.
Default is ``r'[-.\\d]+'``.
ignore : str, optional
A regular expression matching patterns to skip.
Default is ``''``, in which case nothing is skipped.
start : str, optional
A regular expression matching the start of where to start looking
for version numbers.
Default is ``''``, in which case searching is started at the
beginning of the version string given to `matcher`.
Returns
-------
matcher : callable
A function that is appropriate to use as the ``.version_match``
attribute of a `CCompiler` class. `matcher` takes a single parameter,
a version string.
"""
def matcher(self, version_string):
# version string may appear in the second line, so getting rid
# of new lines:
version_string = version_string.replace('\n', ' ')
pos = 0
if start:
m = re.match(start, version_string)
if not m:
return None
pos = m.end()
while True:
m = re.search(pat, version_string[pos:])
if not m:
return None
if ignore and re.match(ignore, m.group(0)):
pos = m.end()
continue
break
return m.group(0)
return matcher
def CCompiler_get_version(self, force=False, ok_status=[0]):
"""
Return compiler version, or None if compiler is not available.
Parameters
----------
force : bool, optional
If True, force a new determination of the version, even if the
compiler already has a version attribute. Default is False.
ok_status : list of int, optional
The list of status values returned by the version look-up process
for which a version string is returned. If the status value is not
in `ok_status`, None is returned. Default is ``[0]``.
Returns
-------
version : str or None
Version string, in the format of `distutils.version.LooseVersion`.
"""
if not force and hasattr(self, 'version'):
return self.version
self.find_executables()
try:
version_cmd = self.version_cmd
except AttributeError:
return None
if not version_cmd or not version_cmd[0]:
return None
try:
matcher = self.version_match
except AttributeError:
try:
pat = self.version_pattern
except AttributeError:
return None
def matcher(version_string):
m = re.match(pat, version_string)
if not m:
return None
version = m.group('version')
return version
status, output = exec_command(version_cmd, use_tee=0)
version = None
if status in ok_status:
version = matcher(output)
if version:
version = LooseVersion(version)
self.version = version
return version
replace_method(CCompiler, 'get_version', CCompiler_get_version)
def CCompiler_cxx_compiler(self):
"""
Return the C++ compiler.
Parameters
----------
None
Returns
-------
cxx : class instance
The C++ compiler, as a `CCompiler` instance.
"""
if self.compiler_type in ('msvc', 'intelw', 'intelemw'):
return self
cxx = copy(self)
cxx.compiler_so = [cxx.compiler_cxx[0]] + cxx.compiler_so[1:]
if sys.platform.startswith('aix') and 'ld_so_aix' in cxx.linker_so[0]:
# AIX needs the ld_so_aix script included with Python
cxx.linker_so = [cxx.linker_so[0], cxx.compiler_cxx[0]] \
+ cxx.linker_so[2:]
else:
cxx.linker_so = [cxx.compiler_cxx[0]] + cxx.linker_so[1:]
return cxx
replace_method(CCompiler, 'cxx_compiler', CCompiler_cxx_compiler)
compiler_class['intel'] = ('intelccompiler', 'IntelCCompiler',
"Intel C Compiler for 32-bit applications")
compiler_class['intele'] = ('intelccompiler', 'IntelItaniumCCompiler',
"Intel C Itanium Compiler for Itanium-based applications")
compiler_class['intelem'] = ('intelccompiler', 'IntelEM64TCCompiler',
"Intel C Compiler for 64-bit applications")
compiler_class['intelw'] = ('intelccompiler', 'IntelCCompilerW',
"Intel C Compiler for 32-bit applications on Windows")
compiler_class['intelemw'] = ('intelccompiler', 'IntelEM64TCCompilerW',
"Intel C Compiler for 64-bit applications on Windows")
compiler_class['pathcc'] = ('pathccompiler', 'PathScaleCCompiler',
"PathScale Compiler for SiCortex-based applications")
ccompiler._default_compilers += (('linux.*', 'intel'),
('linux.*', 'intele'),
('linux.*', 'intelem'),
('linux.*', 'pathcc'),
('nt', 'intelw'),
('nt', 'intelemw'))
if sys.platform == 'win32':
compiler_class['mingw32'] = ('mingw32ccompiler', 'Mingw32CCompiler',
"Mingw32 port of GNU C Compiler for Win32"\
"(for MSC built Python)")
if mingw32():
# On windows platforms, we want to default to mingw32 (gcc)
# because msvc can't build blitz stuff.
log.info('Setting mingw32 as default compiler for nt.')
ccompiler._default_compilers = (('nt', 'mingw32'),) \
+ ccompiler._default_compilers
_distutils_new_compiler = new_compiler
def new_compiler (plat=None,
compiler=None,
verbose=0,
dry_run=0,
force=0):
# Try first C compilers from numpy.distutils.
if plat is None:
plat = os.name
try:
if compiler is None:
compiler = get_default_compiler(plat)
(module_name, class_name, long_description) = compiler_class[compiler]
except KeyError:
msg = "don't know how to compile C/C++ code on platform '%s'" % plat
if compiler is not None:
msg = msg + " with '%s' compiler" % compiler
raise DistutilsPlatformError(msg)
module_name = "numpy.distutils." + module_name
try:
__import__ (module_name)
except ImportError:
msg = str(get_exception())
log.info('%s in numpy.distutils; trying from distutils',
str(msg))
module_name = module_name[6:]
try:
__import__(module_name)
except ImportError:
msg = str(get_exception())
raise DistutilsModuleError("can't compile C/C++ code: unable to load module '%s'" % \
module_name)
try:
module = sys.modules[module_name]
klass = vars(module)[class_name]
except KeyError:
raise DistutilsModuleError(("can't compile C/C++ code: unable to find class '%s' " +
"in module '%s'") % (class_name, module_name))
compiler = klass(None, dry_run, force)
log.debug('new_compiler returns %s' % (klass))
return compiler
ccompiler.new_compiler = new_compiler
_distutils_gen_lib_options = gen_lib_options
def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
library_dirs = quote_args(library_dirs)
runtime_library_dirs = quote_args(runtime_library_dirs)
r = _distutils_gen_lib_options(compiler, library_dirs,
runtime_library_dirs, libraries)
lib_opts = []
for i in r:
if is_sequence(i):
lib_opts.extend(list(i))
else:
lib_opts.append(i)
return lib_opts
ccompiler.gen_lib_options = gen_lib_options
# Also fix up the various compiler modules, which do
# from distutils.ccompiler import gen_lib_options
# Don't bother with mwerks, as we don't support Classic Mac.
for _cc in ['msvc9', 'msvc', '_msvc', 'bcpp', 'cygwinc', 'emxc', 'unixc']:
_m = sys.modules.get('distutils.' + _cc + 'compiler')
if _m is not None:
setattr(_m, 'gen_lib_options', gen_lib_options)
_distutils_gen_preprocess_options = gen_preprocess_options
def gen_preprocess_options (macros, include_dirs):
include_dirs = quote_args(include_dirs)
return _distutils_gen_preprocess_options(macros, include_dirs)
ccompiler.gen_preprocess_options = gen_preprocess_options
##Fix distutils.util.split_quoted:
# NOTE: I removed this fix in revision 4481 (see ticket #619), but it appears
# that removing this fix causes f2py problems on Windows XP (see ticket #723).
# Specifically, on WinXP when gfortran is installed in a directory path, which
# contains spaces, then f2py is unable to find it.
import string
_wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
_squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
_dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
_has_white_re = re.compile(r'\s')
def split_quoted(s):
s = s.strip()
words = []
pos = 0
while s:
m = _wordchars_re.match(s, pos)
end = m.end()
if end == len(s):
words.append(s[:end])
break
if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
words.append(s[:end]) # we definitely have a word delimiter
s = s[end:].lstrip()
pos = 0
elif s[end] == '\\': # preserve whatever is being escaped;
# will become part of the current word
s = s[:end] + s[end+1:]
pos = end+1
else:
if s[end] == "'": # slurp singly-quoted string
m = _squote_re.match(s, end)
elif s[end] == '"': # slurp doubly-quoted string
m = _dquote_re.match(s, end)
else:
raise RuntimeError("this can't happen (bad char '%c')" % s[end])
if m is None:
raise ValueError("bad string (mismatched %s quotes?)" % s[end])
(beg, end) = m.span()
if _has_white_re.search(s[beg+1:end-1]):
s = s[:beg] + s[beg+1:end-1] + s[end:]
pos = m.end() - 2
else:
# Keeping quotes when a quoted word does not contain
# white-space. XXX: send a patch to distutils
pos = m.end()
if pos >= len(s):
words.append(s)
break
return words
ccompiler.split_quoted = split_quoted
##Fix distutils.util.split_quoted:
| [
"ryfeus@gmail.com"
] | ryfeus@gmail.com |
d1bc5e61fa3c11ca862dc136ea3dbb0de9ae534f | bf049dd5150794070fb816b665e626559b29d5ed | /code/docmodel/metadata_parser.py | 4cce43ffcdfd329bfbd82d879b8e3eeb39fa42ed | [] | no_license | mnscholz/ttk | af8cbaeb7e7a15e00757a1e65c4d8e36402fd372 | 07291e45512ad9a819016f8891a3bfa6f462eef0 | refs/heads/master | 2021-01-18T06:22:46.658707 | 2016-05-16T18:35:43 | 2016-05-16T18:35:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,675 | py | """Metadata Parsers.
This module contains metadata parsers, that is, parsers that pull out the
metadata and add it to a TarsqiDocument. The only requirements on each parser is
that it defines an __init__() method that takes a dictionary of options and a
parse() method that takes a TarsqiDocument instance.
Current parsers only deal with the DCT.
"""
import re, time, os, sqlite3
from docmodel.document import TarsqiDocument
import utilities.logger as logger
class MetadataParser:
"""This is the minimal metadata parser that is used as a default. It sets the
DCT to today's date and provides some common functionality to subclasses."""
def __init__(self, options):
"""At the moment, initialization does not use any of the options,
but this could change."""
self.options = options
self.tarsqidoc = None # added in by the parse() method
def parse(self, tarsqidoc):
"""Adds metadata to the TarsqiDocument. The only thing it adds to the
metadata dictionary is the DCT, which is set to today."""
self.tarsqidoc = tarsqidoc
self.tarsqidoc.metadata['dct'] = self.get_dct()
def get_dct(self):
"""Return today's date in YYYYMMDD format."""
return get_today()
def get_source(self):
"""A convenience method to lift the SourceDoc out of the tarsqi
instance."""
return self.tarsqidoc.source
def _get_tag_content(self, tagname):
"""Return the text content of the first tag with name tagname, return
None if there is no such tag."""
try:
tag = self.get_source().tags.find_tags(tagname)[0]
content = self.get_source().text[tag.begin:tag.end].strip()
return content
except IndexError:
logger.warn("Cannot get the %s tag in this document" % tagname)
return None
class MetadataParserTTK(MetadataParser):
"""The metadata parser for the ttk format, simply copies the meta data."""
def parse(self, tarsqidoc):
"""Adds metadata to the TarsqiDocument. The only thing it adds to the
metadata dictionary is the DCT, which is copied from the metadata in the
SourceDoc."""
self.tarsqidoc = tarsqidoc
self.tarsqidoc.metadata['dct'] = self.get_dct(tarsqidoc.source)
def get_dct(self, sourcedoc):
return sourcedoc.metadata.get('dct')
class MetadataParserText(MetadataParser):
"""For now this one adds nothing to the default metadata parser."""
class MetadataParserTimebank(MetadataParser):
"""The parser for Timebank documents. All it does is overwriting the
get_dct() method."""
def get_dct(self):
"""Extracts the document creation time, and returns it as a string of
the form YYYYMMDD. Depending on the source, the DCT can be found in one
of the following tags: DOCNO, DATE_TIME, PUBDATE or FILEID."""
result = self._get_doc_source()
if result is None:
# dct defaults to today if we cannot find the DOCNO tag in the
# document
return get_today()
source_identifier, content = result
if source_identifier in ('ABC', 'CNN', 'PRI', 'VOA'):
return content[3:11]
elif source_identifier == 'AP':
dct = self._parse_tag_content("(?:AP-NR-)?(\d+)-(\d+)-(\d+)",
'FILEID')
# the DCT format is YYYYMMDD or YYMMDD
return dct if len(dct) == 8 else '19' + dct
elif source_identifier in ('APW', 'NYT'):
return self._parse_tag_content("(\d+)/(\d+)/(\d+)", 'DATE_TIME')
elif source_identifier == 'SJMN':
pubdate_content = self._get_tag_content('PUBDATE')
return '19' + pubdate_content
elif source_identifier == 'WSJ':
return '19' + content[3:9]
elif source_identifier in ('ea', 'ed'):
return '19' + content[2:8]
def _get_doc_source(self):
"""Return the name of the content provider as well as the content of the DOCNO
tag that has that information."""
content = self._get_tag_content('DOCNO')
content = str(content) # in case the above returned None
for source_identifier in ('ABC', 'APW', 'AP', 'CNN', 'NYT', 'PRI',
'SJMN', 'VOA', 'WSJ', 'ea', 'ed'):
if content.startswith(source_identifier):
return (source_identifier, content)
logger.warn("Could not determine document source from DOCNO tag")
return None
def _parse_tag_content(self, regexpr, tagname):
"""Return the DCT part of the tag content of tagname, requires a reqular
expression as one of the arguments."""
content_string = self._get_tag_content(tagname)
result = re.compile(regexpr).match(content_string)
if result:
(month, day, year) = result.groups()
return "%s%s%s" % (year, month, day)
else:
logger.warn("Could not get date from %s tag" % tagname)
return get_today()
class MetadataParserATEE(MetadataParser):
"""The parser for ATEE document."""
def get_dct(self):
"""All ATEE documents have a DATE tag with a value attribute, the value
of that attribute is returned."""
date_tag = self.sourcedoc.tags.find_tag('DATE')
return date_tag.attrs['value']
class MetadataParserRTE3(MetadataParser):
"""The parser for RTE3 documents, no differences with the default parser."""
def get_dct(self):
return get_today()
class MetadataParserVA(MetadataParser):
"""A minimal example parser for VA data. It is identical to MetadataParser
except for how it gets the DCT. This is done by lookup in a database. This
here is the simplest possible case, and it is quite inefficient. It assumes
there is an sqlite databse at 'TTK_ROOT/code/data/in/va/dct.sqlite' which
was created as follows:
$ sqlite3 dct.sqlite
sqlite> create table dct (filename TEXT, dct TEXT)
sqlite> insert into dct values ("test.xml", "1999-12-31");
The get_dct method uses this database. """
def get_dct(self):
fname = self.sourcedoc.filename
fname = os.path.basename(fname)
db_connection = sqlite3.connect('data/in/va/dct.sqlite')
db_cursor = db_connection.cursor()
db_cursor.execute('SELECT dct FROM dct WHERE filename=?', (fname,))
dct = db_cursor.fetchone()[0]
return dct
def get_today():
"""Return today's date in YYYYMMDD format."""
return time.strftime("%Y%m%d", time.localtime())
| [
"marc@cs.brandeis.edu"
] | marc@cs.brandeis.edu |
8f2dd07fdb0e0134084b3f4071f28442126b838a | 56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e | /CMGTools/H2TauTau/prod/TauES_test/nom/emb/DoubleMu/StoreResults-Run2012A_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/PAT_CMG_V5_16_0_1374658146/HTT_24Jul_newTES_manzoni_Nom_Jobs/Job_20/run_cfg.py | 7235989e5727134dd60b495ee3459b3b573b074b | [] | no_license | rmanzoni/HTT | 18e6b583f04c0a6ca10142d9da3dd4c850cddabc | a03b227073b2d4d8a2abe95367c014694588bf98 | refs/heads/master | 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69,005 | py | import FWCore.ParameterSet.Config as cms
import os,sys
sys.path.append('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/H2TauTau/prod/TauES_test/nom/emb/DoubleMu/StoreResults-Run2012A_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/PAT_CMG_V5_16_0_1374658146/HTT_24Jul_newTES_manzoni_Nom_Jobs')
from base_cfg import *
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
lumisToProcess = cms.untracked.VLuminosityBlockRange( ("190645:10-190645:110", "190646:1-190646:111", "190659:33-190659:167", "190679:1-190679:55", "190688:69-190688:249",
"190702:51-190702:53", "190702:55-190702:122", "190702:124-190702:169", "190703:1-190703:252", "190704:1-190704:3",
"190705:1-190705:5", "190705:7-190705:65", "190705:81-190705:336", "190705:338-190705:350", "190705:353-190705:383",
"190706:1-190706:126", "190707:1-190707:237", "190707:239-190707:257", "190708:1-190708:189", "190733:71-190733:96",
"190733:99-190733:389", "190733:392-190733:460", "190736:1-190736:80", "190736:83-190736:185", "190738:1-190738:130",
"190738:133-190738:226", "190738:229-190738:349", "190782:55-190782:181", "190782:184-190782:233", "190782:236-190782:399",
"190782:401-190782:409", "190895:64-190895:202", "190895:210-190895:302", "190895:305-190895:584", "190895:587-190895:948",
"190906:73-190906:256", "190906:259-190906:354", "190906:356-190906:496", "190945:124-190945:207", "190949:1-190949:81",
"191043:45-191043:46", "191046:1-191046:21", "191046:24-191046:82", "191046:84-191046:88", "191046:92-191046:116",
"191046:119-191046:180", "191046:183", "191046:185-191046:239", "191056:1", "191056:4-191056:9",
"191056:16-191056:17", "191056:19", "191057:1", "191057:4-191057:40", "191062:1",
"191062:3", "191062:5-191062:214", "191062:216-191062:541", "191090:1-191090:55", "191201:38-191201:49",
"191201:52-191201:79", "191202:1-191202:64", "191202:66-191202:68", "191202:87-191202:105", "191202:108-191202:118",
"191226:77-191226:78", "191226:81-191226:831", "191226:833-191226:1454", "191226:1456-191226:1466", "191226:1469-191226:1507",
"191226:1510-191226:1686", "191247:1-191247:153", "191247:156-191247:280", "191247:283-191247:606", "191247:608-191247:620",
"191247:622-191247:818", "191247:821-191247:834", "191247:837-191247:1031", "191247:1034-191247:1046", "191247:1049-191247:1140",
"191247:1143-191247:1187", "191247:1190-191247:1214", "191247:1217-191247:1224", "191248:1-191248:103", "191264:59-191264:79",
"191264:82-191264:152", "191264:155-191264:189", "191271:56-191271:223", "191271:225-191271:363", "191276:1-191276:16",
"191277:1-191277:28", "191277:30-191277:164", "191277:167-191277:253", "191277:255-191277:457", "191277:460-191277:535",
"191277:537-191277:576", "191277:579-191277:775", "191277:778-191277:811", "191277:813-191277:849", "191367:1-191367:2",
"191411:1-191411:23", "191695:1", "191718:43-191718:95", "191718:98-191718:207", "191720:1",
"191720:3-191720:15", "191720:17-191720:181", "191721:1", "191721:3-191721:34", "191721:36-191721:183",
"191721:186-191721:189", "191726:1-191726:13", "191810:15", "191810:22-191810:49", "191810:52-191810:92",
"191830:54-191830:242", "191830:245-191830:301", "191830:304-191830:393", "191833:1", "191833:3-191833:103",
"191834:1-191834:30", "191834:33-191834:74", "191834:77-191834:299", "191834:302-191834:352", "191837:1-191837:44",
"191837:47-191837:53", "191837:56-191837:65", "191856:1-191856:133", "191859:1-191859:28", "191859:31-191859:126",
"193093:1-193093:33", "193123:1-193123:27", "193124:1-193124:52", "193192:58-193192:86", "193193:1-193193:6",
"193193:8", "193193:11-193193:83", "193193:86-193193:120", "193193:122-193193:160", "193193:162-193193:274",
"193193:276-193193:495", "193193:497-193193:506", "193207:54-193207:182", "193334:29-193334:172", "193336:1-193336:264",
"193336:267-193336:492", "193336:495-193336:684", "193336:687-193336:729", "193336:732-193336:951", "193541:77-193541:101",
"193541:103-193541:413", "193541:416-193541:575", "193541:578-193541:619", "193556:41-193556:83", "193557:1-193557:84",
"193575:48-193575:173", "193575:176-193575:349", "193575:351-193575:394", "193575:397-193575:415", "193575:417-193575:658",
"193575:660-193575:752", "193621:60-193621:570", "193621:573-193621:769", "193621:772-193621:976", "193621:979-193621:1053",
"193621:1056-193621:1137", "193621:1139-193621:1193", "193621:1195-193621:1371", "193621:1373-193621:1654", "193834:1-193834:35",
"193835:1-193835:20", "193835:22-193835:26", "193836:1-193836:2", "193998:66-193998:113", "193998:115-193998:278",
"193999:1-193999:45", "194027:57-194027:113", "194050:53-194050:113", "194050:116-194050:273", "194050:275-194050:355",
"194050:357-194050:369", "194050:372-194050:391", "194050:394-194050:490", "194050:492-194050:814", "194050:816-194050:1435",
"194050:1437-194050:1735", "194050:1760-194050:1888", "194051:1-194051:12", "194052:1-194052:99", "194052:102-194052:166",
"194075:48-194075:101", "194075:103", "194075:105-194075:107", "194075:109", "194075:111",
"194076:1-194076:9", "194076:11-194076:55", "194076:58-194076:163", "194076:165-194076:228", "194076:230-194076:264",
"194076:267-194076:507", "194076:509-194076:527", "194076:530-194076:538", "194076:541-194076:562", "194076:565-194076:748",
"194108:81-194108:161", "194108:164-194108:264", "194108:266-194108:373", "194108:376-194108:396", "194108:398-194108:433",
"194108:436-194108:452", "194108:454-194108:577", "194108:579-194108:590", "194108:593-194108:668", "194108:671-194108:872",
"194115:66-194115:184", "194115:186-194115:338", "194115:340-194115:346", "194115:348-194115:493", "194115:496-194115:731",
"194115:819-194115:857", "194117:1-194117:38", "194119:1-194119:229", "194119:232-194119:261", "194120:1-194120:162",
"194120:165-194120:406", "194150:42-194150:127", "194150:129-194150:261", "194150:264-194150:311", "194151:47-194151:72",
"194151:75-194151:191", "194151:193-194151:238", "194151:240-194151:617", "194151:619", "194151:621",
"194151:623", "194153:1-194153:115", "194199:96-194199:227", "194199:229-194199:336", "194199:339-194199:402",
"194210:3-194210:195", "194210:198-194210:217", "194210:220-194210:359", "194210:361-194210:555", "194223:61-194223:112",
"194224:1-194224:126", "194224:129-194224:206", "194224:208-194224:250", "194224:253-194224:309", "194224:312-194224:386",
"194224:389-194224:412", "194225:1-194225:23", "194225:26-194225:47", "194225:49-194225:85", "194225:88-194225:149",
"194270:56-194270:68", "194303:56-194303:66", "194303:69-194303:102", "194304:1-194304:43", "194304:46",
"194305:1-194305:84", "194314:52-194314:130", "194314:133-194314:300", "194315:1-194315:10", "194315:13-194315:314",
"194315:317-194315:428", "194315:431-194315:452", "194315:455-194315:467", "194317:1-194317:20", "194424:63-194424:141",
"194424:144-194424:195", "194424:198-194424:266", "194424:268-194424:421", "194424:424-194424:478", "194424:481-194424:531",
"194424:534-194424:553", "194424:556-194424:706", "194424:708", "194428:1-194428:85", "194428:87-194428:122",
"194428:125-194428:294", "194428:296-194428:465", "194429:1-194429:4", "194429:7-194429:54", "194429:57-194429:147",
"194429:150-194429:411", "194429:413-194429:742", "194429:745-194429:986", "194429:988-194429:1019", "194439:46-194439:77",
"194439:79-194439:106", "194455:45-194455:64", "194455:67-194455:140", "194455:142-194455:255", "194455:293-194455:303",
"194464:1-194464:127", "194464:130-194464:142", "194464:145-194464:210", "194479:1-194479:44", "194479:165-194479:232",
"194479:235-194479:262", "194479:265-194479:374", "194479:377-194479:431", "194479:434-194479:489", "194479:492-194479:529",
"194479:531-194479:566", "194480:1-194480:32", "194480:34-194480:205", "194480:207-194480:375", "194480:377-194480:387",
"194480:389-194480:759", "194480:762-194480:956", "194480:959-194480:1402", "194533:46-194533:379", "194533:382-194533:415",
"194533:417-194533:618", "194533:620-194533:872", "194619:31-194619:110", "194631:1-194631:42", "194631:44-194631:100",
"194631:102-194631:169", "194631:171-194631:222", "194643:1-194643:287", "194644:1-194644:168", "194644:171-194644:181",
"194644:184-194644:185", "194644:187-194644:319", "194644:321-194644:421", "194691:61-194691:104", "194691:107-194691:155",
"194691:158-194691:251", "194691:254-194691:268", "194691:271-194691:272", "194691:275-194691:289", "194691:292-194691:313",
"194699:1-194699:30", "194699:32-194699:52", "194699:55-194699:64", "194699:67-194699:71", "194699:73-194699:154",
"194699:157-194699:215", "194699:218-194699:238", "194699:241-194699:259", "194702:1-194702:138", "194702:141-194702:191",
"194704:1-194704:41", "194704:44-194704:545", "194704:548-194704:592", "194711:1-194711:7", "194711:9-194711:619",
"194712:1-194712:56", "194712:61-194712:418", "194712:420-194712:625", "194712:627-194712:759", "194735:44-194735:71",
"194735:74-194735:101", "194735:104-194735:130", "194778:60-194778:118", "194778:120-194778:219", "194789:1-194789:18",
"194789:21-194789:32", "194789:34-194789:80", "194789:82-194789:166", "194789:168-194789:269", "194789:272-194789:405",
"194789:409-194789:414", "194789:417-194789:427", "194789:430-194789:566", "194790:1-194790:45", "194825:72-194825:117",
"194825:120-194825:221", "194896:34-194896:55", "194896:58-194896:79", "194896:82-194896:103", "194897:1-194897:6",
"194897:8-194897:78", "194897:80-194897:96", "194897:98-194897:102", "194912:53-194912:70", "194912:72-194912:96",
"194912:98-194912:444", "194912:446-194912:450", "194912:453-194912:467", "194912:470-194912:561", "194912:564-194912:660",
"194912:663-194912:813", "194912:815-194912:840", "194912:843-194912:864", "194912:866-194912:1004", "194912:1007-194912:1025",
"194912:1027-194912:1067", "194912:1069-194912:1137", "194912:1140-194912:1166", "194912:1168-194912:1249", "194912:1251-194912:1304",
"194912:1307-194912:1444", "194912:1447-194912:1487", "194912:1489-194912:1503", "194912:1506-194912:1662", "194914:1-194914:38",
"194915:1-194915:74", "195013:94-195013:144", "195013:146-195013:185", "195013:187-195013:206", "195013:208-195013:299",
"195013:302-195013:324", "195013:326-195013:366", "195013:369-195013:447", "195013:450-195013:526", "195013:528-195013:541",
"195014:1-195014:6", "195014:9-195014:119", "195014:121-195014:148", "195015:1-195015:13", "195016:1-195016:21",
"195016:23-195016:55", "195016:58-195016:63", "195016:65-195016:174", "195016:177-195016:184", "195016:186-195016:241",
"195016:243-195016:246", "195016:248-195016:251", "195016:254-195016:367", "195016:370-195016:422", "195016:425-195016:560",
"195016:563-195016:569", "195099:70-195099:144", "195099:147-195099:186", "195099:189-195099:208", "195099:211-195099:224",
"195099:227-195099:248", "195109:98-195109:241", "195112:1-195112:12", "195112:15-195112:26", "195113:1-195113:209",
"195113:212-195113:388", "195113:391-195113:403", "195113:406-195113:419", "195113:422-195113:492", "195113:495-195113:579",
"195114:1-195114:69", "195114:72-195114:103", "195115:1-195115:7", "195115:10-195115:22", "195147:132-195147:282",
"195147:285-195147:294", "195147:297-195147:331", "195147:334-195147:363", "195147:366-195147:442", "195147:445-195147:536",
"195147:539-195147:559", "195163:72-195163:138", "195163:140-195163:224", "195163:227-195163:240", "195163:243",
"195163:246-195163:347", "195164:1-195164:64", "195165:1-195165:4", "195165:7-195165:41", "195165:44-195165:54",
"195165:56-195165:153", "195165:156-195165:260", "195165:263-195165:266", "195251:1-195251:131", "195251:134-195251:137",
"195251:140-195251:152", "195251:154-195251:165", "195251:167-195251:242", "195303:109-195303:191", "195303:194-195303:277",
"195303:280-195303:310", "195303:312-195303:316", "195303:318-195303:409", "195304:1-195304:3", "195304:6-195304:22",
"195304:27-195304:80", "195304:83-195304:100", "195304:103-195304:154", "195304:157-195304:341", "195304:344-195304:588",
"195304:590-195304:727", "195304:729-195304:1003", "195304:1006-195304:1079", "195304:1083-195304:1140", "195304:1143-195304:1229",
"195378:90-195378:117", "195378:120-195378:127", "195378:130-195378:185", "195378:187-195378:204", "195378:206-195378:302",
"195378:305-195378:542", "195378:544-195378:565", "195378:567-195378:645", "195378:647-195378:701", "195378:703-195378:734",
"195378:737-195378:1120", "195378:1122-195378:1133", "195390:1", "195390:4-195390:27", "195390:30-195390:145",
"195390:147-195390:183", "195390:186-195390:187", "195390:190-195390:208", "195390:210-195390:213", "195390:215-195390:400",
"195396:49-195396:55", "195396:58-195396:63", "195396:66-195396:131", "195397:1-195397:10", "195397:12-195397:89",
"195397:92-195397:120", "195397:123-195397:141", "195397:143-195397:251", "195397:253", "195397:256-195397:475",
"195397:478-195397:525", "195397:527-195397:608", "195397:611-195397:776", "195397:779-195397:970", "195397:972-195397:1121",
"195397:1123-195397:1181", "195397:1184-195397:1198", "195397:1200-195397:1209", "195398:3-195398:137", "195398:139-195398:494",
"195398:497-195398:585", "195398:587-195398:817", "195398:820-195398:824", "195398:827-195398:1225", "195398:1228-195398:1307",
"195398:1309-195398:1712", "195398:1721-195398:1736", "195398:1741-195398:1752", "195398:1767-195398:1795", "195399:1-195399:192",
"195399:194-195399:382", "195530:1-195530:80", "195530:82-195530:104", "195530:107-195530:156", "195530:159-195530:300",
"195530:302-195530:405", "195540:68-195540:123", "195540:126-195540:137", "195540:140-195540:283", "195540:286-195540:319",
"195551:91-195551:106", "195552:1-195552:21", "195552:23-195552:27", "195552:30-195552:147", "195552:149-195552:155",
"195552:158-195552:182", "195552:185-195552:287", "195552:290-195552:349", "195552:352-195552:469", "195552:472-195552:815",
"195552:818-195552:823", "195552:825-195552:883", "195552:885-195552:1152", "195552:1154-195552:1300", "195552:1303-195552:1789",
"195633:40-195633:42", "195647:1-195647:41", "195649:1-195649:69", "195649:72-195649:151", "195649:154-195649:181",
"195649:183-195649:247", "195655:1-195655:129", "195655:131-195655:184", "195655:186-195655:260", "195655:263-195655:350",
"195655:353-195655:446", "195655:448-195655:483", "195655:485-195655:498", "195656:1-195656:362", "195658:1-195658:37",
"195658:40-195658:362", "195658:364-195658:382", "195658:384-195658:386", "195749:1-195749:8", "195749:10-195749:33",
"195749:36-195749:131", "195757:1-195757:82", "195757:85-195757:115", "195757:118-195757:161", "195757:163-195757:206",
"195758:1-195758:18", "195774:1-195774:13", "195774:16-195774:137", "195774:139-195774:151", "195774:154-195774:162",
"195774:164-195774:256", "195774:258-195774:276", "195774:279-195774:362", "195774:365-195774:466", "195774:469-195774:618",
"195774:620-195774:649", "195774:651-195774:830", "195775:1-195775:57", "195775:60-195775:100", "195775:103-195775:170",
"195776:1-195776:63", "195776:66-195776:283", "195776:286-195776:337", "195776:340-195776:399", "195776:401-195776:409",
"195776:411-195776:477", "195841:74-195841:85", "195868:1-195868:88", "195868:90-195868:107", "195868:110-195868:205",
"195915:1-195915:109", "195915:111-195915:275", "195915:278-195915:390", "195915:393-195915:417", "195915:419-195915:429",
"195915:432-195915:505", "195915:507-195915:747", "195915:749-195915:785", "195915:787-195915:828", "195915:830-195915:850",
"195916:1-195916:16", "195916:19-195916:68", "195916:71-195916:212", "195917:1-195917:4", "195918:1-195918:44",
"195918:46", "195918:49-195918:64", "195919:1-195919:15", "195923:1-195923:14", "195925:1-195925:12",
"195926:1", "195926:3-195926:19", "195926:21-195926:34", "195929:1-195929:29", "195930:1-195930:77",
"195930:80-195930:176", "195930:179-195930:526", "195930:529-195930:596", "195937:1-195937:28", "195937:31-195937:186",
"195937:188-195937:396", "195947:23-195947:62", "195947:64-195947:88", "195948:51-195948:116", "195948:119-195948:144",
"195948:147", "195948:150-195948:352", "195948:355-195948:369", "195948:372-195948:402", "195948:404-195948:500",
"195948:503-195948:540", "195948:543-195948:565", "195948:567-195948:602", "195948:605-195948:615", "195950:1-195950:71",
"195950:73-195950:138", "195950:141-195950:169", "195950:172-195950:332", "195950:335-195950:350", "195950:353-195950:382",
"195950:385-195950:421", "195950:424-195950:450", "195950:453-195950:483", "195950:485-195950:616", "195950:619-195950:715",
"195950:718-195950:787", "195950:789-195950:800", "195950:803-195950:829", "195950:831", "195950:833-195950:1587",
"195963:54-195963:58", "195970:44-195970:49", "195970:51-195970:85", "196019:54-196019:68", "196027:1-196027:55",
"196027:58-196027:119", "196027:121-196027:155", "196027:158-196027:186", "196046:12-196046:40", "196047:1-196047:64",
"196047:70-196047:75", "196048:1-196048:44", "196048:46-196048:48", "196197:58-196197:122", "196197:125-196197:179",
"196197:181-196197:311", "196197:313-196197:516", "196197:519-196197:562", "196199:1-196199:33", "196199:36-196199:83",
"196199:86-196199:118", "196199:121-196199:147", "196199:150-196199:237", "196199:239-196199:285", "196199:287-196199:534",
"196200:1-196200:68", "196202:3-196202:61", "196202:64-196202:108", "196203:1-196203:102", "196203:107-196203:117",
"196218:55-196218:199", "196218:201-196218:224", "196218:226-196218:393", "196218:396-196218:494", "196218:496-196218:741",
"196218:744-196218:752", "196218:754-196218:757", "196218:759-196218:820", "196239:1-196239:59", "196239:62-196239:154",
"196239:157-196239:272", "196239:274-196239:373", "196239:375-196239:432", "196239:435-196239:465", "196239:468-196239:647",
"196239:650-196239:706", "196239:709-196239:1025", "196249:63-196249:77", "196249:80-196249:99", "196250:1-196250:2",
"196250:5-196250:265", "196250:267-196250:426", "196252:1-196252:35", "196334:59-196334:111", "196334:113-196334:123",
"196334:126-196334:132", "196334:135-196334:167", "196334:170-196334:193", "196334:196-196334:257", "196334:259-196334:267",
"196334:270-196334:289", "196334:292-196334:342", "196349:65-196349:84", "196349:86-196349:154", "196349:157-196349:244",
"196349:246-196349:258", "196357:1-196357:4", "196359:1-196359:2", "196362:1-196362:88", "196363:1-196363:8",
"196363:11-196363:34", "196364:1-196364:93", "196364:96-196364:136", "196364:139-196364:365", "196364:368-196364:380",
"196364:382-196364:601", "196364:603-196364:795", "196364:798-196364:884", "196364:887-196364:1196", "196364:1199-196364:1200",
"196364:1203-196364:1299", "196437:1", "196437:3-196437:74", "196437:77-196437:169", "196438:1-196438:181",
"196438:184-196438:699", "196438:701-196438:1269", "196452:82-196452:112", "196452:114-196452:490", "196452:493-196452:586",
"196452:589-196452:618", "196452:622-196452:668", "196452:671-196452:716", "196452:718-196452:726", "196452:728-196452:956",
"196452:958-196452:1004", "196452:1007-196452:1091", "196453:1-196453:74", "196453:77-196453:145", "196453:147-196453:669",
"196453:673-196453:714", "196453:717-196453:799", "196453:802-196453:988", "196453:991-196453:1178", "196453:1180",
"196453:1182-196453:1248", "196453:1250-196453:1528", "196453:1531-196453:1647", "196495:114-196495:180", "196495:182-196495:272",
"196509:1-196509:68", "196531:62-196531:150", "196531:152-196531:253", "196531:256-196531:285", "196531:288-196531:302",
"196531:305-196531:422", "196531:425-196531:440", "198049:1-198049:11", "198049:14-198049:57", "198050:2-198050:155",
"198063:1-198063:37", "198063:40-198063:72", "198063:74-198063:124", "198063:127-198063:294", "198116:36-198116:52",
"198116:54-198116:55", "198116:58-198116:96", "198116:98-198116:112", "198207:1-198207:97", "198208:1-198208:92",
"198208:94-198208:134", "198208:137-198208:147", "198208:150-198208:209", "198210:1-198210:221", "198212:1-198212:574",
"198213:1-198213:107", "198215:1-198215:12", "198230:1-198230:33", "198230:36-198230:57", "198230:60-198230:235",
"198230:237-198230:324", "198230:326-198230:388", "198230:390-198230:459", "198230:462-198230:625", "198230:627-198230:651",
"198230:653-198230:805", "198230:808-198230:811", "198230:814-198230:948", "198230:950-198230:1090", "198230:1093-198230:1103",
"198230:1106-198230:1332", "198230:1335-198230:1380", "198249:1-198249:7", "198269:3-198269:198", "198271:1-198271:91",
"198271:93-198271:170", "198271:173-198271:299", "198271:301-198271:450", "198271:453-198271:513", "198271:516-198271:616",
"198271:619-198271:628", "198271:631-198271:791", "198271:793-198271:797", "198272:1-198272:185", "198272:188-198272:245",
"198272:248-198272:314", "198272:317-198272:433", "198272:436-198272:444", "198272:454-198272:620", "198346:44-198346:47",
"198372:57-198372:110", "198485:68-198485:109", "198485:112-198485:134", "198485:136-198485:181", "198485:184-198485:239",
"198487:1-198487:145", "198487:147-198487:514", "198487:517-198487:668", "198487:671-198487:733", "198487:736-198487:757",
"198487:760-198487:852", "198487:854-198487:994", "198487:997-198487:1434", "198487:1437-198487:1610", "198522:65-198522:144",
"198522:147-198522:208", "198941:102-198941:189", "198941:191-198941:220", "198941:222-198941:241", "198941:243-198941:249",
"198941:252-198941:284", "198954:108-198954:156", "198954:159-198954:277", "198955:1-198955:45", "198955:47-198955:50",
"198955:53-198955:220", "198955:223-198955:269", "198955:271-198955:284", "198955:286-198955:338", "198955:340-198955:580",
"198955:583-198955:742", "198955:744-198955:910", "198955:913-198955:946", "198955:949-198955:1162", "198955:1165-198955:1169",
"198955:1172-198955:1182", "198955:1185-198955:1188", "198955:1190-198955:1246", "198955:1249-198955:1304", "198955:1306-198955:1467",
"198955:1470-198955:1485", "198955:1487-198955:1552", "198969:58-198969:81", "198969:84-198969:247", "198969:249-198969:323",
"198969:325-198969:365", "198969:367-198969:413", "198969:416-198969:466", "198969:468-198969:643", "198969:646-198969:918",
"198969:920-198969:1011", "198969:1013-198969:1175", "198969:1178-198969:1236", "198969:1239-198969:1253", "199008:75-199008:93",
"199008:95-199008:121", "199008:124-199008:208", "199008:211-199008:331", "199008:333-199008:373", "199008:376-199008:482",
"199008:485-199008:605", "199008:608-199008:644", "199011:1-199011:11", "199011:13-199011:24", "199021:59-199021:88",
"199021:91-199021:128", "199021:130-199021:133", "199021:136-199021:309", "199021:311-199021:333", "199021:335-199021:410",
"199021:414-199021:469", "199021:471-199021:533", "199021:535-199021:563", "199021:565-199021:1223", "199021:1226-199021:1479",
"199021:1481-199021:1494", "199318:65-199318:138", "199319:1-199319:7", "199319:9-199319:223", "199319:226-199319:277",
"199319:280-199319:348", "199319:351-199319:358", "199319:360-199319:422", "199319:424-199319:490", "199319:492-199319:493",
"199319:496-199319:612", "199319:615-199319:642", "199319:645-199319:720", "199319:723-199319:728", "199319:730-199319:731",
"199319:734-199319:741", "199319:744-199319:752", "199319:754-199319:943", "199319:945-199319:997", "199336:1-199336:33",
"199336:36-199336:122", "199336:125-199336:231", "199336:234-199336:614", "199336:617-199336:789", "199336:791-199336:977",
"199356:95-199356:121", "199356:123-199356:168", "199356:171-199356:205", "199356:208-199356:231", "199409:25-199409:54",
"199409:56-199409:89", "199409:91-199409:204", "199409:206-199409:290", "199409:293-199409:583", "199409:586-199409:602",
"199409:604-199409:1014", "199409:1016-199409:1300", "199428:61-199428:197", "199428:200-199428:210", "199428:212-199428:382",
"199428:387-199428:414", "199428:417-199428:436", "199428:439-199428:530", "199428:533-199428:648", "199429:1-199429:28",
"199429:30-199429:36", "199429:39-199429:55", "199429:58-199429:101", "199429:103-199429:148", "199429:151-199429:154",
"199435:63-199435:106", "199435:109-199435:261", "199435:263-199435:579", "199435:582-199435:654", "199435:656-199435:696",
"199435:699-199435:1034", "199435:1037-199435:1144", "199435:1147-199435:1327", "199435:1330-199435:1411", "199435:1414-199435:1431",
"199435:1434-199435:1441", "199435:1444-199435:1487", "199435:1489-199435:1610", "199436:1-199436:113", "199436:116-199436:254",
"199436:257-199436:675", "199436:678-199436:748", "199564:1-199564:3", "199569:1-199569:2", "199569:5-199569:136",
"199569:139-199569:367", "199570:1-199570:17", "199571:1-199571:184", "199571:186-199571:360", "199571:363-199571:561",
"199572:1-199572:317", "199573:1-199573:22", "199574:1-199574:53", "199574:56-199574:153", "199574:156-199574:246",
"199608:60-199608:157", "199608:159-199608:209", "199608:211-199608:341", "199608:344-199608:390", "199608:392-199608:461",
"199608:464-199608:800", "199608:802-199608:1064", "199608:1067-199608:1392", "199608:1395-199608:1630", "199608:1633-199608:1904",
"199608:1907-199608:1962", "199608:1965-199608:2252", "199608:2255-199608:2422", "199698:72-199698:94", "199698:96-199698:127",
"199699:1-199699:154", "199699:157-199699:169", "199699:172-199699:410", "199699:412-199699:756", "199703:1-199703:94",
"199703:97-199703:482", "199703:485-199703:529", "199739:66-199739:133", "199751:103-199751:119", "199751:121-199751:127",
"199752:1-199752:141", "199752:144-199752:180", "199752:182-199752:186", "199752:188-199752:211", "199752:214-199752:322",
"199753:1-199753:59", "199754:1-199754:203", "199754:205-199754:325", "199754:328-199754:457", "199754:459-199754:607",
"199754:610-199754:613", "199754:615-199754:806", "199754:808-199754:998", "199804:78-199804:88", "199804:90-199804:181",
"199804:183-199804:235", "199804:238-199804:278", "199804:281-199804:290", "199804:292-199804:519", "199804:522-199804:575",
"199804:577-199804:628", "199804:631-199804:632", "199812:70-199812:141", "199812:144-199812:163", "199812:182-199812:211",
"199812:214-199812:471", "199812:474-199812:505", "199812:508-199812:557", "199812:560-199812:571", "199812:574-199812:623",
"199812:626-199812:751", "199812:754-199812:796", "199832:58-199832:62", "199832:65-199832:118", "199832:121-199832:139",
"199832:142-199832:286", "199833:1-199833:13", "199833:16-199833:103", "199833:105-199833:250", "199833:253-199833:493",
"199833:496-199833:794", "199833:797-199833:1032", "199833:1034-199833:1185", "199833:1188-199833:1239", "199834:1-199834:9",
"199834:11", "199834:14-199834:18", "199834:21-199834:54", "199834:56-199834:57", "199834:62-199834:65",
"199834:69-199834:284", "199834:286-199834:503", "199834:505-199834:942", "199862:59-199862:141", "199864:1-199864:87",
"199864:89", "199864:92-199864:103", "199864:106-199864:372", "199864:374-199864:385", "199864:388-199864:486",
"199867:1-199867:134", "199867:136-199867:172", "199867:174-199867:218", "199867:221-199867:320", "199868:1-199868:21",
"199875:70-199875:150", "199875:152-199875:334", "199876:1-199876:19", "199876:22-199876:95", "199876:97-199876:249",
"199876:252-199876:272", "199876:274-199876:340", "199876:343-199876:362", "199876:365-199876:376", "199877:1-199877:173",
"199877:175-199877:605", "199877:607-199877:701", "199877:703-199877:871", "199960:72-199960:139", "199960:141-199960:197",
"199960:204-199960:232", "199960:235-199960:363", "199960:365-199960:367", "199960:370-199960:380", "199960:383-199960:459",
"199960:461-199960:466", "199960:469-199960:485", "199961:1-199961:211", "199961:213-199961:287", "199967:60-199967:120",
"199967:122-199967:170", "199967:172-199967:198", "199973:73-199973:89", "200041:62-200041:83", "200041:85-200041:157",
"200041:162-200041:274", "200041:277-200041:318", "200041:321-200041:335", "200041:337-200041:386", "200041:388-200041:389",
"200041:392-200041:400", "200041:402-200041:568", "200041:571-200041:593", "200041:595-200041:646", "200041:649-200041:728",
"200041:731-200041:860", "200041:862-200041:930", "200041:932-200041:1096", "200042:1-200042:110", "200042:112-200042:536",
"200049:1-200049:177", "200075:76-200075:139", "200075:142-200075:232", "200075:256-200075:326", "200075:329-200075:422",
"200075:425-200075:431", "200075:434-200075:500", "200075:502-200075:605", "200091:67", "200091:70-200091:151",
"200091:154-200091:172", "200091:174-200091:187", "200091:190-200091:196", "200091:199-200091:201", "200091:204-200091:425",
"200091:428-200091:535", "200091:537-200091:607", "200091:610-200091:879", "200091:881-200091:943", "200091:946-200091:999",
"200091:1001-200091:1025", "200091:1027-200091:1132", "200091:1135-200091:1339", "200091:1341-200091:1433", "200091:1435-200091:1450",
"200091:1453-200091:1523", "200091:1526-200091:1664", "200091:1667-200091:1680", "200091:1683-200091:1710", "200152:74-200152:116",
"200160:52-200160:68", "200161:1-200161:97", "200161:100-200161:112", "200174:81-200174:84", "200177:1-200177:56",
"200178:1-200178:38", "200180:1-200180:18", "200186:1-200186:3", "200186:6-200186:24", "200188:1-200188:24",
"200188:27-200188:28", "200188:31-200188:76", "200188:79-200188:271", "200188:274-200188:352", "200190:1-200190:4",
"200190:6-200190:76", "200190:79-200190:143", "200190:146-200190:159", "200190:162-200190:256", "200190:258-200190:321",
"200190:324-200190:401", "200190:403-200190:453", "200190:456-200190:457", "200190:460-200190:565", "200190:567-200190:588",
"200190:591", "200190:593-200190:595", "200190:597-200190:646", "200190:649-200190:878", "200229:1-200229:33",
"200229:41-200229:219", "200229:222-200229:244", "200229:247-200229:290", "200229:293-200229:624", "200229:627-200229:629",
"200243:69-200243:103", "200243:106-200243:139", "200244:3-200244:304", "200244:307-200244:442", "200244:445-200244:507",
"200244:510-200244:619", "200245:1-200245:103", "200245:105-200245:128", "200245:131-200245:248", "200245:251-200245:357",
"200368:72-200368:180", "200369:1-200369:5", "200369:8-200369:61", "200369:64-200369:360", "200369:363-200369:439",
"200369:441-200369:578", "200369:580-200369:603", "200369:606-200369:684", "200369:686", "200381:8-200381:15",
"200381:18-200381:36", "200381:38-200381:89", "200381:91-200381:195", "200466:134-200466:274", "200473:96-200473:157",
"200473:159-200473:224", "200473:226-200473:304", "200473:306-200473:469", "200473:472-200473:524", "200473:527-200473:542",
"200473:545-200473:619", "200473:622-200473:688", "200473:691-200473:730", "200473:733-200473:738", "200473:740-200473:1324",
"200491:87-200491:107", "200491:110-200491:149", "200491:152-200491:157", "200491:160-200491:197", "200491:199-200491:237",
"200491:240-200491:270", "200491:273", "200491:276-200491:334", "200491:336-200491:360", "200491:363-200491:419",
"200515:97-200515:183", "200519:1-200519:111", "200519:114-200519:126", "200519:129-200519:136", "200519:138-200519:224",
"200519:227-200519:258", "200519:261-200519:350", "200519:353-200519:611", "200519:613-200519:747", "200525:77-200525:149",
"200525:151-200525:164", "200525:166-200525:190", "200525:193-200525:276", "200525:278-200525:311", "200525:314-200525:464",
"200525:467-200525:488", "200525:491-200525:674", "200525:676-200525:704", "200525:707-200525:755", "200525:757-200525:895",
"200525:898-200525:937", "200525:939-200525:990", "200532:1-200532:37", "200599:75-200599:129", "200599:132-200599:137",
"200600:1-200600:183", "200600:186-200600:299", "200600:302-200600:313", "200600:316-200600:324", "200600:327-200600:334",
"200600:336-200600:397", "200600:399-200600:417", "200600:420-200600:526", "200600:529-200600:591", "200600:594-200600:596",
"200600:598-200600:609", "200600:611-200600:660", "200600:663-200600:823", "200600:826-200600:900", "200600:902-200600:943",
"200600:945-200600:1139", "200961:1-200961:115", "200976:94-200976:164", "200990:75-200990:143", "200991:1-200991:42",
"200991:44", "200991:47-200991:80", "200991:83-200991:175", "200991:178-200991:181", "200991:184-200991:252",
"200991:255-200991:632", "200991:635-200991:916", "200991:918-200991:1017", "200991:1019-200991:1048", "200992:1-200992:405",
"200992:408-200992:434", "200992:436-200992:581", "201062:78-201062:268", "201097:83-201097:136", "201097:138-201097:245",
"201097:248-201097:300", "201097:303-201097:370", "201097:372-201097:429", "201097:432-201097:497", "201114:1-201114:14",
"201115:1-201115:73", "201159:70-201159:211", "201164:1-201164:8", "201164:10-201164:94", "201164:96-201164:125",
"201164:128-201164:178", "201164:180-201164:198", "201164:200-201164:271", "201164:274-201164:416", "201164:418",
"201168:1-201168:37", "201168:39-201168:275", "201168:278-201168:481", "201168:483-201168:558", "201168:560-201168:730",
"201173:1-201173:194", "201173:197-201173:586", "201174:1-201174:214", "201174:216-201174:263", "201174:265-201174:339",
"201174:342-201174:451", "201191:75-201191:98", "201191:100-201191:216", "201191:218-201191:389", "201191:392-201191:492",
"201191:494-201191:506", "201191:509-201191:585", "201191:587-201191:594", "201191:597-201191:607", "201191:609-201191:794",
"201191:796-201191:838", "201191:841-201191:974", "201191:977-201191:1105", "201191:1108-201191:1117", "201191:1120-201191:1382",
"201191:1385-201191:1386", "201193:1-201193:19", "201196:1-201196:238", "201196:241-201196:278", "201196:286-201196:299",
"201196:302-201196:338", "201196:341-201196:515", "201196:518-201196:720", "201196:723-201196:789", "201196:803-201196:841",
"201197:1-201197:23", "201202:1-201202:437", "201229:1-201229:5", "201229:8-201229:26", "201229:29-201229:73",
"201278:62-201278:163", "201278:166-201278:229", "201278:232-201278:256", "201278:259-201278:316", "201278:318-201278:595",
"201278:598-201278:938", "201278:942-201278:974", "201278:976-201278:1160", "201278:1163-201278:1304", "201278:1306-201278:1793",
"201278:1796-201278:1802", "201278:1805-201278:1906", "201278:1909-201278:1929", "201278:1932-201278:2174", "201554:70-201554:86",
"201554:88-201554:114", "201554:116-201554:126", "201602:76-201602:81", "201602:83-201602:194", "201602:196-201602:494",
"201602:496-201602:614", "201602:617-201602:635", "201611:87-201611:145", "201611:149-201611:182", "201611:184-201611:186",
"201613:1-201613:42", "201613:44-201613:49", "201613:53-201613:210", "201613:213-201613:215", "201613:218-201613:225",
"201613:228-201613:646", "201624:83-201624:92", "201624:95-201624:240", "201624:270", "201625:211-201625:312",
"201625:315-201625:348", "201625:351-201625:416", "201625:418-201625:588", "201625:591-201625:671", "201625:673-201625:758",
"201625:760-201625:791", "201625:793-201625:944", "201657:77-201657:93", "201657:95-201657:108", "201657:110-201657:118",
"201658:1-201658:19", "201658:21-201658:118", "201658:121-201658:136", "201658:139-201658:288", "201668:78-201668:157",
"201669:1-201669:9", "201669:12-201669:136", "201669:139-201669:141", "201669:143-201669:165", "201671:1-201671:120",
"201671:122-201671:174", "201671:177-201671:462", "201671:464-201671:482", "201671:485-201671:499", "201671:501-201671:545",
"201671:547-201671:571", "201671:574-201671:614", "201671:617-201671:766", "201671:768-201671:896", "201671:899-201671:911",
"201671:914-201671:1007", "201678:1-201678:120", "201679:1-201679:110", "201679:112-201679:241", "201679:244-201679:298",
"201679:302-201679:321", "201679:324-201679:461", "201679:463-201679:483", "201692:78-201692:81", "201692:83-201692:179",
"201705:65-201705:73", "201705:75-201705:109", "201705:111-201705:187", "201706:1-201706:62", "201707:1-201707:23",
"201707:26-201707:42", "201707:45-201707:115", "201707:118-201707:130", "201707:133-201707:160", "201707:163-201707:276",
"201707:279-201707:471", "201707:473-201707:511", "201707:514-201707:545", "201707:547-201707:570", "201707:572-201707:622",
"201707:625-201707:735", "201707:738-201707:806", "201707:809-201707:876", "201707:879-201707:964", "201708:1-201708:79",
"201718:58-201718:108", "201727:67-201727:185", "201729:6-201729:20", "201729:22-201729:75", "201729:77-201729:126",
"201729:129-201729:154", "201729:156-201729:216", "201729:219-201729:244", "201794:58-201794:94", "201802:68-201802:209",
"201802:211-201802:214", "201802:216-201802:220", "201802:223-201802:288", "201802:290-201802:296", "201816:1-201816:72",
"201816:74-201816:105", "201816:107-201816:157", "201817:1-201817:274", "201818:1", "201819:1-201819:94",
"201819:96-201819:241", "201824:1-201824:139", "201824:141-201824:176", "201824:179-201824:286", "201824:289-201824:492",
"202012:98-202012:121", "202012:126-202012:131", "202013:1-202013:2", "202013:5-202013:35", "202013:38-202013:57",
"202014:1-202014:5", "202014:8-202014:14", "202014:16-202014:18", "202014:20-202014:77", "202014:79-202014:102",
"202014:104-202014:174", "202014:177-202014:190", "202014:192-202014:196", "202016:1-202016:48", "202016:51-202016:134",
"202016:137-202016:177", "202016:179-202016:743", "202016:745-202016:831", "202016:834-202016:890", "202016:893-202016:896",
"202016:898-202016:932", "202016:934-202016:1010", "202044:84-202044:101", "202044:104-202044:266", "202044:268-202044:461",
"202044:463-202044:466", "202045:1-202045:30", "202045:33-202045:72", "202045:75-202045:528", "202045:531-202045:601",
"202045:603-202045:785", "202045:788-202045:809", "202045:822-202045:823", "202054:6-202054:266", "202054:268-202054:489",
"202054:492-202054:605", "202054:608-202054:631", "202060:76-202060:142", "202060:144-202060:154", "202060:156-202060:244",
"202060:246-202060:497", "202060:499-202060:642", "202060:644-202060:682", "202060:684-202060:743", "202060:746-202060:936",
"202074:66-202074:174", "202075:1-202075:18", "202075:21-202075:187", "202075:189-202075:214", "202075:217-202075:247",
"202075:250-202075:342", "202075:345-202075:406", "202075:409-202075:497", "202075:500-202075:537", "202075:539",
"202075:542-202075:560", "202075:562-202075:615", "202075:618-202075:628", "202084:83-202084:156", "202084:159-202084:177",
"202084:179-202084:180", "202084:182-202084:239", "202087:1-202087:25", "202087:28-202087:208", "202087:210-202087:357",
"202087:359-202087:652", "202087:655-202087:853", "202087:856-202087:1093", "202088:1-202088:286", "202093:1-202093:104",
"202093:107-202093:320", "202093:322-202093:360", "202116:59-202116:60", "202178:67-202178:78", "202178:80-202178:88",
"202178:91-202178:177", "202178:180-202178:186", "202178:188-202178:337", "202178:340-202178:377", "202178:379-202178:425",
"202178:428-202178:475", "202178:478-202178:548", "202178:551-202178:717", "202178:720-202178:965", "202178:967-202178:1444",
"202178:1447-202178:1505", "202178:1508-202178:1519", "202178:1522-202178:1555", "202205:94-202205:114", "202209:1-202209:48",
"202209:51-202209:142", "202237:39-202237:128", "202237:131", "202237:134-202237:219", "202237:222-202237:235",
"202237:238-202237:275", "202237:277-202237:289", "202237:291-202237:316", "202237:319-202237:419", "202237:422-202237:538",
"202237:540-202237:936", "202237:939-202237:950", "202237:952-202237:976", "202237:979-202237:1079", "202272:76-202272:112",
"202272:115-202272:141", "202272:144-202272:185", "202272:188-202272:205", "202272:208-202272:305", "202272:307-202272:313",
"202272:315-202272:371", "202272:436-202272:480", "202272:483-202272:555", "202272:558-202272:577", "202272:579-202272:683",
"202272:686-202272:705", "202272:707-202272:740", "202272:742-202272:890", "202272:937-202272:1295", "202272:1299-202272:1481",
"202299:68-202299:84", "202299:87-202299:141", "202299:143-202299:193", "202299:196-202299:358", "202299:361-202299:379",
"202299:382-202299:414", "202299:416-202299:452", "202299:455-202299:555", "202305:1-202305:89", "202305:92-202305:130",
"202305:133-202305:323", "202314:67-202314:104", "202314:107-202314:265", "202314:268-202314:278", "202328:46-202328:89",
"202328:92-202328:156", "202328:158-202328:276", "202328:278-202328:291", "202328:294-202328:434", "202328:437-202328:460",
"202328:463-202328:586", "202328:588-202328:610", "202328:612-202328:614", "202333:1-202333:235", "202389:81-202389:182",
"202389:185-202389:190", "202389:192-202389:199", "202469:87-202469:158", "202469:160-202469:174", "202469:177-202469:352",
"202472:1-202472:96", "202472:99-202472:112", "202477:1-202477:129", "202477:131-202477:150", "202478:1-202478:177",
"202478:180-202478:183", "202478:186-202478:219", "202478:222-202478:360", "202478:362-202478:506", "202478:509-202478:531",
"202478:534-202478:718", "202478:720-202478:927", "202478:929-202478:973", "202478:975-202478:1029", "202478:1031-202478:1186",
"202478:1189-202478:1212", "202478:1215-202478:1248", "202504:77-202504:96", "202504:99-202504:133", "202504:135-202504:182",
"202504:184-202504:211", "202504:213-202504:241", "202504:243-202504:392", "202504:395-202504:527", "202504:529-202504:617",
"202504:620-202504:715", "202504:718-202504:763", "202504:766-202504:1172", "202504:1174-202504:1247", "202504:1250-202504:1471",
"202504:1474-202504:1679", "202504:1682-202504:1704", "202972:1-202972:30", "202972:33-202972:184", "202972:186-202972:290",
"202972:292-202972:295", "202972:298-202972:371", "202972:374-202972:429", "202972:431-202972:544", "202973:1-202973:234",
"202973:237-202973:305", "202973:308-202973:437", "202973:439-202973:530", "202973:532-202973:541", "202973:544-202973:552",
"202973:555-202973:851", "202973:853-202973:1408", "203002:77-203002:128", "203002:130-203002:141", "203002:144-203002:207",
"203002:209-203002:267", "203002:270-203002:360", "203002:362-203002:501", "203002:504-203002:641", "203002:643-203002:669",
"203002:671", "203002:674-203002:717", "203002:720-203002:1034", "203002:1037-203002:1070", "203002:1073-203002:1370",
"203002:1372-203002:1392", "203002:1395-203002:1410", "203002:1413-203002:1596", "203709:1-203709:121", "203742:1-203742:29",
"203777:103-203777:113", "203830:82-203830:182", "203832:1-203832:11", "203833:1-203833:70", "203833:73-203833:128",
"203834:1-203834:40", "203835:1-203835:70", "203835:73-203835:358", "203853:122-203853:222", "203894:82-203894:272",
"203894:275-203894:477", "203894:480-203894:902", "203894:905-203894:1319", "203909:79-203909:113", "203909:116-203909:117",
"203909:120-203909:140", "203909:143-203909:382", "203912:1-203912:306", "203912:308-203912:566", "203912:569-203912:609",
"203912:611-203912:698", "203912:701-203912:820", "203912:823-203912:865", "203912:867-203912:1033", "203912:1035-203912:1321",
"203987:1-203987:9", "203987:12-203987:241", "203987:243-203987:339", "203987:342-203987:781", "203987:784-203987:1014",
"203992:1-203992:15", "203994:1-203994:56", "203994:59-203994:136", "203994:139-203994:304", "203994:306-203994:342",
"203994:344-203994:425", "204100:117-204100:139", "204101:1-204101:74", "204113:82-204113:96", "204113:98-204113:102",
"204113:105-204113:127", "204113:129-204113:191", "204113:194-204113:258", "204113:261-204113:327", "204113:329-204113:388",
"204113:390-204113:400", "204113:402-204113:583", "204113:585-204113:690", "204114:1-204114:358", "204238:23-204238:52",
"204238:55", "204250:92-204250:118", "204250:121-204250:177", "204250:179-204250:285", "204250:287-204250:336",
"204250:339-204250:400", "204250:403-204250:521", "204250:524-204250:543", "204250:546-204250:682", "204250:684-204250:801",
"204511:1-204511:56", "204541:5-204541:39", "204541:42", "204541:44-204541:139", "204541:142-204541:149",
"204541:151-204541:204", "204544:1-204544:11", "204544:13-204544:93", "204544:96-204544:195", "204544:197-204544:224",
"204544:226-204544:334", "204544:337-204544:426", "204552:1-204552:9", "204553:1-204553:51", "204553:53-204553:60",
"204553:63-204553:101", "204554:1-204554:5", "204554:7-204554:221", "204554:224-204554:455", "204554:458-204554:470",
"204554:472-204554:481", "204554:483-204554:514", "204555:1-204555:329", "204555:331-204555:334", "204563:91-204563:99",
"204563:102-204563:178", "204563:180-204563:219", "204563:222-204563:229", "204563:231-204563:364", "204563:366",
"204563:369-204563:470", "204563:473-204563:524", "204563:527-204563:571", "204564:1-204564:84", "204564:87-204564:89",
"204564:92-204564:159", "204564:161-204564:187", "204564:190-204564:191", "204564:193-204564:293", "204564:296-204564:315",
"204564:317-204564:340", "204564:343-204564:427", "204564:429-204564:434", "204564:437-204564:735", "204564:737-204564:855",
"204564:858-204564:1206", "204564:1209-204564:1248", "204564:1251-204564:1284", "204565:1-204565:48", "204566:1-204566:12",
"204567:1-204567:38", "204576:49-204576:192", "204576:195-204576:301", "204577:1-204577:46", "204577:49-204577:64",
"204577:67-204577:105", "204577:107-204577:170", "204577:173-204577:181", "204577:183-204577:193", "204577:196-204577:653",
"204577:656-204577:669", "204577:671-204577:740", "204577:742-204577:913", "204577:915-204577:1057", "204577:1059-204577:1115",
"204577:1117-204577:1282", "204599:73-204599:83", "204599:85-204599:94", "204599:97-204599:121", "204599:124-204599:125",
"204599:128-204599:173", "204599:175-204599:240", "204599:243-204599:245", "204599:248-204599:264", "204599:266-204599:292",
"204599:294-204599:334", "204601:1-204601:25", "204601:28-204601:62", "204601:65-204601:80", "204601:83-204601:89",
"204601:92-204601:290", "204601:292-204601:563", "204601:565-204601:591", "204601:593-204601:652", "204601:655-204601:780",
"204601:783-204601:812", "204601:814-204601:892", "204601:894-204601:984", "204601:986-204601:1003", "204601:1006-204601:1038",
"204601:1040-204601:1088", "204601:1091-204601:1102", "204601:1105-204601:1161", "204601:1164-204601:1250", "205086:95-205086:149",
"205111:88-205111:390", "205111:392-205111:441", "205111:444-205111:446", "205158:81-205158:289", "205158:292-205158:313",
"205158:315-205158:473", "205158:476-205158:591", "205158:594-205158:595", "205158:597-205158:612", "205158:615-205158:663",
"205158:665-205158:667", "205158:672-205158:685", "205158:687-205158:733", "205193:80-205193:109", "205193:111-205193:349",
"205193:352-205193:486", "205193:488-205193:650", "205193:652-205193:712", "205193:714-205193:902", "205217:1-205217:12",
"205217:16-205217:111", "205217:113-205217:171", "205217:174-205217:250", "205217:253-205217:318", "205233:94-205233:153",
"205236:1-205236:190", "205236:193-205236:207", "205236:209-205236:260", "205236:263-205236:331", "205236:334-205236:352",
"205238:1-205238:6", "205238:9-205238:199", "205238:202-205238:254", "205238:256-205238:304", "205238:306-205238:355",
"205238:358-205238:381", "205238:384-205238:596", "205238:598-205238:617", "205303:35-205303:54", "205303:90-205303:132",
"205303:135-205303:144", "205310:76-205310:306", "205310:309-205310:313", "205310:316", "205310:319-205310:321",
"205310:324-205310:457", "205310:460-205310:559", "205311:1-205311:85", "205311:88-205311:92", "205311:95-205311:183",
"205311:186-205311:395", "205311:397-205311:592", "205311:595-205311:910", "205311:913-205311:1260", "205339:71-205339:175",
"205339:178-205339:213", "205339:216-205339:230", "205339:233-205339:262", "205339:265-205339:404", "205344:1-205344:83",
"205344:86-205344:104", "205344:106-205344:359", "205344:362-205344:431", "205344:433-205344:949", "205344:951-205344:967",
"205344:969-205344:1127", "205344:1129-205344:1346", "205344:1348-205344:1586", "205515:82-205515:201", "205515:203-205515:216",
"205519:1-205519:47", "205519:50-205519:172", "205519:175-205519:367", "205519:370-205519:386", "205519:389-205519:472",
"205526:1-205526:269", "205526:272-205526:277", "205526:280-205526:332", "205614:1-205614:4", "205614:7-205614:40",
"205617:1-205617:29", "205617:32-205617:102", "205617:105-205617:123", "205617:125-205617:140", "205617:143-205617:264",
"205617:266-205617:448", "205617:451-205617:532", "205617:534-205617:547", "205618:1-205618:12", "205620:1-205620:175",
"205666:60-205666:119", "205666:122-205666:165", "205666:168-205666:259", "205666:261-205666:322", "205666:325-205666:578",
"205666:580-205666:594", "205666:597-205666:721", "205666:724-205666:739", "205667:1-205667:165", "205667:168-205667:282",
"205667:285-205667:318", "205667:321-205667:412", "205667:415-205667:689", "205667:692-205667:751", "205667:754-205667:774",
"205667:777-205667:1109", "205683:76-205683:82", "205683:85-205683:178", "205683:181-205683:198", "205683:201-205683:305",
"205690:1-205690:40", "205694:1-205694:205", "205694:208-205694:230", "205694:233-205694:347", "205694:350-205694:452",
"205694:455-205694:593", "205694:595-205694:890", "205718:49-205718:75", "205718:78-205718:97", "205718:100-205718:103",
"205718:105-205718:176", "205718:178-205718:338", "205718:341-205718:361", "205718:363-205718:524", "205718:527-205718:531",
"205718:534-205718:589", "205718:591-205718:694", "205774:1-205774:80", "205777:1-205777:8", "205781:1-205781:89",
"205781:91-205781:197", "205781:200-205781:502", "205826:80-205826:232", "205826:235-205826:303", "205826:306-205826:468",
"205833:84-205833:86", "205833:89-205833:121", "205833:123-205833:155", "205833:157-205833:165", "205833:167-205833:173",
"205833:176-205833:219", "205833:221-205833:267", "205833:270-205833:312", "205833:315-205833:346", "205833:350-205833:355",
"205833:360-205833:366", "205834:1-205834:12", "205834:14-205834:195", "205908:68-205908:200", "205908:202-205908:209",
"205921:22-205921:73", "205921:76-205921:268", "205921:271-205921:394", "205921:397-205921:401", "205921:410-205921:428",
"205921:431-205921:498", "205921:500-205921:571", "205921:574-205921:779", "205921:782-205921:853", "206066:89-206066:146",
"206088:86-206088:159", "206088:161-206088:178", "206088:181-206088:199", "206088:202-206088:286", "206102:83-206102:116",
"206102:120-206102:130", "206102:133-206102:208", "206102:211-206102:235", "206102:238-206102:246", "206102:249-206102:278",
"206102:281-206102:349", "206187:107-206187:169", "206187:172-206187:242", "206187:245-206187:288", "206187:290-206187:340",
"206187:343-206187:427", "206187:429-206187:435", "206187:437-206187:486", "206187:489-206187:569", "206187:571-206187:647",
"206187:649-206187:662", "206187:664-206187:708", "206188:1-206188:40", "206188:42-206188:55", "206199:1-206199:75",
"206199:77-206199:82", "206199:85-206199:114", "206207:82-206207:130", "206207:132-206207:176", "206207:179-206207:194",
"206207:196-206207:388", "206207:390-206207:419", "206207:422-206207:447", "206207:450-206207:569", "206207:572-206207:690",
"206208:1-206208:470", "206208:472-206208:518", "206210:11-206210:25", "206210:28-206210:275", "206210:277-206210:298",
"206210:300-206210:383", "206210:386-206210:466", "206243:62-206243:169", "206243:172-206243:196", "206243:199-206243:354",
"206243:357-206243:433", "206243:435-206243:448", "206243:451-206243:533", "206243:536-206243:554", "206243:557-206243:723",
"206243:726-206243:905", "206245:1-206245:62", "206246:1-206246:14", "206246:16-206246:237", "206246:240-206246:285",
"206246:288-206246:407", "206246:412-206246:676", "206246:678-206246:704", "206246:706-206246:785", "206246:787-206246:962",
"206246:965-206246:997", "206246:1000-206246:1198", "206246:1201-206246:1290", "206257:1-206257:29", "206258:1-206258:36",
"206258:39-206258:223", "206258:226-206258:249", "206302:1-206302:8", "206302:11-206302:33", "206302:36-206302:44",
"206302:47-206302:82", "206302:84-206302:108", "206302:110-206302:149", "206302:151-206302:186", "206302:189-206302:229",
"206302:231-206302:232", "206302:234-206302:241", "206302:243-206302:276", "206303:1-206303:19", "206303:23-206303:286",
"206304:1-206304:4", "206304:6-206304:62", "206331:91-206331:222", "206331:225-206331:312", "206389:88-206389:185",
"206389:187-206389:249", "206389:252-206389:272", "206389:275-206389:392", "206391:1-206391:55", "206391:57-206391:91",
"206401:69-206401:90", "206401:92-206401:194", "206401:197-206401:210", "206401:212-206401:249", "206401:251-206401:265",
"206401:267-206401:409", "206446:92-206446:141", "206446:143-206446:159", "206446:162-206446:205", "206446:208-206446:301",
"206446:304-206446:442", "206446:445", "206446:448-206446:474", "206446:476-206446:616", "206446:619-206446:872",
"206446:874-206446:910", "206446:912-206446:948", "206446:950-206446:989", "206446:992-206446:1030", "206446:1033-206446:1075",
"206446:1109-206446:1149", "206448:1-206448:143", "206448:145-206448:559", "206448:561-206448:1170", "206448:1173-206448:1231",
"206448:1235-206448:1237", "206466:24-206466:137", "206466:140-206466:277", "206466:280-206466:296", "206466:299-206466:303",
"206466:306-206466:405", "206466:407-206466:419", "206466:422-206466:477", "206466:480-206466:511", "206466:514-206466:676",
"206476:73-206476:129", "206476:133-206476:137", "206476:140-206476:141", "206476:143-206476:219", "206477:1-206477:14",
"206477:16-206477:31", "206477:33-206477:41", "206477:44-206477:51", "206477:53-206477:70", "206477:73-206477:75",
"206477:77-206477:89", "206477:91-206477:94", "206477:97-206477:115", "206477:118-206477:184", "206478:1-206478:27",
"206478:29-206478:136", "206478:139-206478:144", "206484:73-206484:95", "206484:98-206484:133", "206484:136-206484:163",
"206484:166-206484:186", "206484:189-206484:384", "206484:387-206484:463", "206484:465-206484:551", "206484:554",
"206484:556-206484:669", "206512:91-206512:123", "206512:125-206512:133", "206512:136-206512:161", "206512:163-206512:190",
"206512:193-206512:201", "206512:203-206512:212", "206512:214-206512:332", "206512:334-206512:584", "206512:587-206512:604",
"206512:607-206512:1005", "206512:1008-206512:1123", "206512:1126-206512:1163", "206512:1165-206512:1211", "206513:3-206513:39",
"206513:42-206513:188", "206513:191-206513:234", "206513:237-206513:238", "206513:241-206513:323", "206542:1-206542:115",
"206542:117-206542:165", "206542:168-206542:511", "206542:514-206542:547", "206542:550-206542:603", "206542:606-206542:668",
"206542:671-206542:727", "206542:730-206542:739", "206542:741-206542:833", "206550:77-206550:132", "206550:135-206550:144",
"206572:37-206572:47", "206573:2-206573:14", "206574:1-206574:87", "206575:1-206575:7", "206575:10",
"206575:12-206575:69", "206594:72-206594:107", "206594:110-206594:246", "206594:249-206594:281", "206595:1-206595:34",
"206595:37-206595:42", "206595:45-206595:193", "206596:1-206596:13", "206596:15-206596:220", "206596:222-206596:228",
"206596:231-206596:236", "206596:239-206596:292", "206596:295-206596:695", "206596:697-206596:728", "206596:730-206596:810",
"206598:1-206598:81", "206598:83-206598:103", "206598:105-206598:588", "206598:591-206598:657", "206598:659-206598:719",
"206605:1-206605:36", "206605:39-206605:78", "206744:49-206744:157", "206744:160-206744:192", "206744:195-206744:395",
"206744:398-206744:452", "206745:1-206745:81", "206745:84-206745:199", "206745:202-206745:224", "206745:227-206745:237",
"206745:240-206745:304", "206745:306-206745:318", "206745:321-206745:720", "206745:723-206745:796", "206745:799-206745:894",
"206745:897-206745:944", "206745:946-206745:1106", "206745:1108-206745:1524", "206745:1527-206745:1862", "206745:1988-206745:1996",
"206859:79-206859:210", "206859:212-206859:258", "206859:260-206859:323", "206859:325-206859:356", "206859:359-206859:609",
"206859:612-206859:681", "206859:684-206859:732", "206859:734-206859:768", "206859:771-206859:808", "206859:811-206859:827",
"206859:830-206859:848", "206866:1-206866:30", "206866:33-206866:113", "206866:115-206866:274", "206868:1-206868:3",
"206868:10-206868:16", "206869:1-206869:251", "206869:253-206869:271", "206869:274-206869:502", "206869:507-206869:520",
"206869:522-206869:566", "206869:568-206869:752", "206897:1-206897:34", "206897:38-206897:61", "206897:63-206897:102",
"206897:109", "206897:111-206897:112", "206897:114-206897:131", "206897:133-206897:137", "206901:1-206901:98",
"206906:1-206906:31", "206906:38-206906:94", "206906:96-206906:136", "206906:138-206906:139", "206906:142-206906:149",
"206906:151-206906:175", "206906:177-206906:206", "206940:1-206940:151", "206940:153", "206940:155-206940:298",
"206940:301-206940:382", "206940:384-206940:712", "206940:715-206940:803", "206940:805-206940:960", "206940:963-206940:1027",
"207099:83-207099:134", "207099:137-207099:172", "207099:175-207099:213", "207099:216-207099:314", "207099:316-207099:320",
"207099:323-207099:330", "207099:333-207099:367", "207099:370-207099:481", "207099:484-207099:602", "207099:605-207099:755",
"207099:757-207099:1046", "207099:1048-207099:1171", "207100:1-207100:91", "207100:94", "207214:57-207214:112",
"207214:114-207214:177", "207214:179-207214:181", "207214:184-207214:196", "207214:199-207214:220", "207214:223-207214:262",
"207214:265-207214:405", "207214:408-207214:482", "207214:485-207214:640", "207214:643-207214:708", "207214:718-207214:757",
"207214:759-207214:808", "207214:811-207214:829", "207217:1-207217:32", "207219:1-207219:112", "207220:1-207220:160",
"207221:1-207221:102", "207222:1-207222:17", "207222:20-207222:289", "207231:70-207231:84", "207231:86-207231:121",
"207231:123-207231:184", "207231:187-207231:189", "207231:192-207231:303", "207231:306-207231:354", "207231:357-207231:481",
"207231:484-207231:504", "207231:508-207231:549", "207231:552-207231:626", "207231:628-207231:690", "207231:693-207231:875",
"207231:878-207231:1000", "207231:1003-207231:1170", "207231:1173-207231:1187", "207231:1189-207231:1227", "207231:1229-207231:1415",
"207231:1418-207231:1445", "207231:1447-207231:1505", "207233:1-207233:119", "207233:121-207233:148", "207269:80-207269:394",
"207269:397-207269:436", "207269:439-207269:463", "207269:466-207269:551", "207269:568-207269:577", "207273:3-207273:877",
"207279:68-207279:138", "207279:141-207279:149", "207279:151-207279:237", "207279:240-207279:266", "207279:269-207279:307",
"207279:309-207279:416", "207279:498-207279:551", "207279:554-207279:640", "207279:643-207279:961", "207279:963-207279:1095",
"207279:1098-207279:1160", "207320:1-207320:110", "207320:112-207320:350", "207371:72-207371:117", "207371:120-207371:124",
"207372:1-207372:27", "207372:30-207372:113", "207372:116-207372:154", "207372:156-207372:174", "207372:176-207372:478",
"207372:480-207372:496", "207397:32-207397:77", "207397:80-207397:140", "207397:143-207397:179", "207398:1-207398:14",
"207398:16-207398:33", "207454:79-207454:95", "207454:98-207454:123", "207454:126-207454:259", "207454:261-207454:363",
"207454:365-207454:458", "207454:461-207454:498", "207454:501-207454:609", "207454:612-207454:632", "207454:635-207454:781",
"207454:784-207454:866", "207454:869-207454:974", "207454:977-207454:1064", "207454:1067-207454:1079", "207454:1081-207454:1321",
"207454:1323-207454:1464", "207454:1467-207454:1569", "207454:1571-207454:1604", "207454:1607-207454:1712", "207454:1714-207454:1988",
"207469:1-207469:31", "207469:34-207469:45", "207477:76-207477:104", "207477:107-207477:111", "207477:114-207477:147",
"207477:150-207477:295", "207477:298-207477:483", "207477:486-207477:494", "207477:497-207477:527", "207477:530-207477:563",
"207477:565-207477:570", "207487:50-207487:98", "207487:101-207487:311", "207487:313-207487:359", "207487:363-207487:468",
"207487:471-207487:472", "207488:1-207488:63", "207488:66-207488:92", "207488:95-207488:113", "207488:116-207488:198",
"207488:200-207488:250", "207488:252-207488:288", "207488:291-207488:365", "207488:368-207488:377", "207488:379-207488:440",
"207490:1-207490:48", "207490:51-207490:111", "207491:1-207491:176", "207491:179-207491:458", "207492:1-207492:20",
"207492:23-207492:298", "207515:79-207515:109", "207515:112-207515:132", "207515:134-207515:208", "207515:211-207515:225",
"207515:228-207515:320", "207515:322-207515:381", "207515:383-207515:498", "207515:500-207515:730", "207515:733-207515:849",
"207515:851-207515:954", "207515:957-207515:994", "207515:997-207515:1052", "207515:1055-207515:1143", "207515:1145-207515:1211",
"207517:1-207517:12", "207517:15-207517:57", "207518:1-207518:59", "207518:61-207518:83", "207882:22-207882:45",
"207883:1", "207883:3-207883:4", "207883:7-207883:75", "207884:1-207884:106", "207884:108-207884:183",
"207885:1-207885:90", "207886:1-207886:30", "207886:32-207886:90", "207886:92-207886:156", "207886:158-207886:166",
"207886:168-207886:171", "207889:1-207889:43", "207889:47-207889:57", "207889:60-207889:303", "207889:306-207889:442",
"207889:445", "207889:447-207889:551", "207889:553-207889:731", "207889:733-207889:907", "207889:910-207889:945",
"207898:1-207898:33", "207898:36-207898:57", "207898:60-207898:235", "207898:239-207898:257", "207898:260-207898:277",
"207905:75-207905:196", "207905:198-207905:281", "207905:284-207905:329", "207905:331-207905:402", "207905:404-207905:565",
"207905:568-207905:672", "207905:675-207905:805", "207905:807-207905:850", "207905:852-207905:861", "207905:864-207905:884",
"207905:886-207905:1180", "207905:1183-207905:1283", "207905:1285-207905:1331", "207905:1333-207905:1515", "207905:1518-207905:1734",
"207905:1737-207905:1796", "207920:84-207920:146", "207920:149-207920:241", "207920:243-207920:261", "207920:264-207920:291",
"207920:294-207920:486", "207920:489-207920:518", "207920:520-207920:598", "207920:600-207920:708", "207920:710-207920:826",
"207921:1-207921:37", "207921:40-207921:58", "207922:1-207922:69", "207922:71-207922:100", "207922:103-207922:126",
"207922:129-207922:242", "207922:274-207922:291", "207924:1-207924:52", "207924:54-207924:171", "207924:173-207924:178",
"207924:181-207924:339", "208307:2-208307:42", "208307:45", "208307:47-208307:70", "208307:72-208307:147",
"208307:150-208307:252", "208307:256-208307:259", "208307:262-208307:275", "208307:278-208307:342", "208307:345-208307:450",
"208307:453-208307:527", "208307:530-208307:583", "208307:586-208307:605", "208307:608-208307:616", "208307:618-208307:667",
"208307:670-208307:761", "208307:763-208307:798", "208307:800-208307:889", "208307:891-208307:893", "208307:896-208307:1055",
"208307:1057-208307:1205", "208307:1208-208307:1294", "208307:1297-208307:1328", "208339:77-208339:89", "208339:91-208339:122",
"208339:125-208339:208", "208339:211-208339:346", "208339:349-208339:363", "208341:1-208341:84", "208341:87-208341:117",
"208341:120-208341:513", "208341:515-208341:685", "208341:688-208341:693", "208341:695-208341:775", "208341:777-208341:824",
"208351:83-208351:97", "208351:100-208351:356", "208351:359-208351:367", "208351:369", "208352:1-208352:15",
"208352:17", "208352:19", "208353:1-208353:76", "208353:78-208353:269", "208353:271-208353:348",
"208357:1-208357:70", "208357:73-208357:507", "208390:72-208390:128", "208390:130-208390:169", "208391:52-208391:82",
"208391:84-208391:162", "208391:164-208391:216", "208391:219-208391:493", "208391:495-208391:498", "208391:500-208391:523",
"208391:526-208391:533", "208391:535-208391:588", "208391:591-208391:660", "208391:663-208391:869", "208427:49-208427:89",
"208427:92-208427:161", "208427:164", "208427:166-208427:173", "208427:175-208427:268", "208427:271-208427:312",
"208427:315", "208427:317-208427:335", "208427:337-208427:361", "208427:364-208427:402", "208427:404-208427:422",
"208427:425-208427:577", "208427:580-208427:647", "208428:1-208428:58", "208428:61-208428:68", "208428:70-208428:156",
"208428:159-208428:227", "208429:1-208429:56", "208429:59-208429:139", "208429:141-208429:159", "208429:162-208429:237",
"208429:240-208429:440", "208429:442-208429:452", "208429:455-208429:589", "208429:592-208429:712", "208429:715-208429:922",
"208487:2-208487:26", "208487:29-208487:159", "208487:161-208487:307", "208487:309-208487:459", "208487:462-208487:476",
"208487:479-208487:621", "208509:71-208509:232", "208538:2-208538:43", "208540:1-208540:26", "208540:29-208540:98",
"208541:1-208541:57", "208541:59-208541:173", "208541:175-208541:376", "208541:378-208541:413", "208551:119-208551:193",
"208551:195-208551:212", "208551:215-208551:300", "208551:303-208551:354", "208551:356-208551:554", "208551:557-208551:580",
"208686:73-208686:79", "208686:82-208686:181", "208686:183-208686:224", "208686:227-208686:243", "208686:246-208686:311",
"208686:313-208686:459" ) ),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/user/cmgtools/CMG/DoubleMu/StoreResults-Run2012A_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/PAT_CMG_V5_16_0/cmgTuple_61.root',
'/store/cmst3/user/cmgtools/CMG/DoubleMu/StoreResults-Run2012A_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/PAT_CMG_V5_16_0/cmgTuple_62.root',
'/store/cmst3/user/cmgtools/CMG/DoubleMu/StoreResults-Run2012A_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/PAT_CMG_V5_16_0/cmgTuple_63.root')
)
| [
"riccardo.manzoni@cern.ch"
] | riccardo.manzoni@cern.ch |
6438a7be02ad5b060376ea2c06ab0e060842b15f | e00fe1e065b448f6f8c0472ed2b8a39991fa7b1b | /Fuzzy_clustering/version3/project_manager/Projects_Train_Manager.py | fe9014b867bf397065759370221daa970b2b14aa | [
"Apache-2.0"
] | permissive | joesider9/forecasting_library | 1a4ded5b09fc603f91fa1c075e79fc2ed06c08a8 | db07ff8f0f2693983058d49004f2fc6f8849d197 | refs/heads/master | 2023-03-29T12:18:22.261488 | 2021-04-01T08:57:08 | 2021-04-01T08:57:08 | 319,906,316 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25,506 | py | import sys
import os
import pandas as pd
import numpy as np
import logging, shutil, glob, json
import copy, joblib
from Fuzzy_clustering.version3.project_manager.ModelManager.Models_train_manager import ModelTrainManager
from Fuzzy_clustering.version3.project_manager.ModelManager.Models_predict_manager import ModelPredictManager
from Fuzzy_clustering.version3.project_manager.correlation_link_projects import ProjectLinker
from Fuzzy_clustering.version3.project_manager.rabbitmq_client import rabbit_client_data, rabbit_client_nwp
import time
# for timing
from contextlib import contextmanager
from timeit import default_timer
@contextmanager
def elapsed_timer():
start = default_timer()
elapser = lambda: default_timer() - start
yield lambda: elapser()
end = default_timer()
elapser = lambda: end-start
class ProjectsTrainManager(object):
def __init__(self,static_data):
self.static_data = static_data
self.file_data = static_data['data_file_name']
self.project_owner = static_data['project_owner']
self.projects_group = static_data['projects_group']
self.area_group = static_data['area_group']
self.version_group = static_data['version_group']
self.version_model = static_data['version_model']
self.weather_in_data = static_data['weather_in_data']
self.nwp_model = static_data['NWP_model']
self.nwp_resolution = static_data['NWP_resolution']
self.data_variables = static_data['data_variables']
data_file_name = os.path.basename(self.file_data)
if 'load' in data_file_name:
self.model_type = 'load'
elif 'pv' in data_file_name:
self.model_type = 'pv'
elif 'wind' in data_file_name:
self.model_type = 'wind'
elif 'fa' in data_file_name:
self.model_type = 'fa'
else:
raise IOError('Wrong data file name. Use one of load_ts.csv, wind_ts.csv, pv_ts.csv')
if self.static_data['Docker']:
if sys.platform == 'linux':
self.sys_folder = '/models/'
if self.nwp_model == 'skiron' and self.nwp_resolution == 0.05:
self.path_nwp = '/nwp_grib/SKIRON'
elif self.nwp_model == 'skiron' and self.nwp_resolution == 0.1:
self.path_nwp = '/nwp_grib/SKIRON_low'
elif self.nwp_model == 'ecmwf':
self.path_nwp = '/nwp_grib/ECMWF'
else:
self.path_nwp = None
else:
if self.nwp_model == 'ecmwf':
self.sys_folder = '/models/'
self.path_nwp = '/nwp_grib/ECMWF'
else:
self.sys_folder = '/models/'
self.path_nwp = None
else:
if sys.platform == 'linux':
self.sys_folder = '/media/smartrue/HHD1/George/models/'
if self.nwp_model == 'skiron' and self.nwp_resolution==0.05:
self.path_nwp = '/media/smartrue/HHD2/SKIRON'
elif self.nwp_model == 'skiron' and self.nwp_resolution==0.1:
self.path_nwp = '/media/smartrue/HHD2/SKIRON_low'
elif self.nwp_model == 'ecmwf':
self.path_nwp = '/media/smartrue/HHD2/ECMWF'
else:
self.path_nwp = None
else:
if self.nwp_model == 'ecmwf':
self.sys_folder = 'D:/models/'
self.path_nwp = 'D:/Dropbox/ECMWF'
else:
self.sys_folder = 'D:/models/'
self.path_nwp = None
self.path_group = self.sys_folder + self.project_owner + '/' + self.projects_group + '_ver' + str(self.version_group)+ '/' + self.model_type
if not os.path.exists(self.path_group):
os.makedirs(self.path_group)
self.path_nwp_group = self.sys_folder + self.project_owner + '/' + self.projects_group + '_ver' + str(self.version_group) + '/nwp'
if not os.path.exists(self.path_nwp_group):
os.makedirs(self.path_nwp_group)
self.create_logger()
def create_logger(self):
self.logger = logging.getLogger('ProjectsTrainManager_' + self.model_type)
self.logger.setLevel(logging.INFO)
handler = logging.FileHandler(os.path.join(self.path_group, 'log_' + self.projects_group + '.log'), 'a')
handler.setLevel(logging.INFO)
# create a logging format
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
# add the handlers to the logger
self.logger.addHandler(handler)
def check_project_names(self):
flag = True
if self.model_type in {'wind', 'pv'}:
for name in self.projects:
if name not in self.coord.index.to_list() and name != self.projects_group + '_' + self.model_type and name != 'APE_net':
flag = False
self.logger.info('There is inconsistency to files data and coord for the project %s', name)
if flag==False:
raise ValueError('Inconcistency in project names between data and coord')
if self.use_rated:
for name in self.projects:
if name not in self.rated.index.to_list() and name != self.projects_group + '_' + self.model_type:
flag = False
self.logger.info('There is inconsistency to files data and rated for the project %s', name)
if flag==False:
raise ValueError('Inconcistency in project names between data and rated')
return flag
def load_data(self):
try:
self.data = pd.read_csv(self.file_data, header=0, index_col=0, parse_dates=True, dayfirst=True)
except:
self.logger.info('Cannot import timeseries from the file %s', self.file_data)
raise IOError('Cannot import timeseries from the file %s', self.file_data)
self.logger.info('Timeseries imported successfully from the file %s', self.file_data)
if 'total' in self.data.columns:
self.data = self.data.rename(columns={'total':self.projects_group + '_' + self.model_type})
if self.static_data['Evaluation_start'] != None:
if self.model_type == 'fa':
try:
eval_date = pd.to_datetime(self.static_data['Evaluation_start'], format='%d%m%Y %H:%M')
self.data_eval = self.data.iloc[np.where(self.data.index>eval_date-pd.DateOffset(days=372))]
self.data = self.data.iloc[np.where(self.data.index<=eval_date)]
except:
raise ValueError('Wrong date format, use %d%m%Y %H:%M. Or the date does not exist in the dataset')
else:
try:
eval_date = pd.to_datetime(self.static_data['Evaluation_start'], format='%d%m%Y %H:%M')
self.data_eval = self.data.iloc[np.where(self.data.index > eval_date)]
self.data = self.data.iloc[np.where(self.data.index <= eval_date)]
except:
raise ValueError(
'Wrong date format, use %d%m%Y %H:%M. Or the date does not exist in the dataset')
self.projects = []
if self.model_type == 'load':
self.projects.append(self.data.columns[0])
elif self.model_type == 'fa':
self.projects.append('fa')
else:
for name in self.data.columns:
if name=='total':
name = self.projects_group + '_' + self.model_type
self.projects.append(name)
if self.weather_in_data == False:
try:
self.coord = pd.read_csv(self.file_coord, header=None, index_col=0)
except:
self.logger.info('Cannot import coordinates from the file %s', self.file_coord)
raise IOError('Cannot import coordinates from the file %s', self.file_coord)
self.logger.info('Coordinates imported successfully from the file %s', self.file_coord)
else:
self.logger.info('Coordinates in the data')
if self.use_rated:
try:
self.rated = pd.read_csv(self.file_rated, header=None, index_col=0)
except:
self.logger.info('Cannot import Rated Power from the file %s', self.file_rated)
raise IOError('Cannot import Rated Power from the file %s', self.file_rated)
self.logger.info('Rated Power imported successfully from the file %s', self.file_rated)
self.logger.info('Data loaded successfully')
def create_area(self, coord, resolution):
if self.nwp_resolution == 0.05:
levels = 4
round_coord = 1
else:
levels = 2
round_coord = 0
if coord!=None:
if isinstance(coord, list):
if len(coord)==2:
lat = coord[0]
long = coord[1]
lat_range = np.arange(np.around(lat, round_coord) - 20, np.around(lat, round_coord) + 20, resolution)
lat1 = lat_range[np.abs(lat_range - lat).argmin()]-self.nwp_resolution/10
lat2 = lat_range[np.abs(lat_range - lat).argmin()]+self.nwp_resolution/10
long_range = np.arange(np.around(long, round_coord) - 20, np.around(long, round_coord) + 20, resolution)
long1 = long_range[np.abs(long_range - long).argmin()] - self.nwp_resolution / 10
long2 = long_range[np.abs(long_range - long).argmin()] + self.nwp_resolution / 10
area=[[lat1 - self.nwp_resolution*levels, long1 - self.nwp_resolution*levels],
[lat2 + self.nwp_resolution*levels, long2 + self.nwp_resolution*levels]]
elif len(coord)==4:
area = list(np.array(coord).reshape(2,2))
else:
raise ValueError('Wrong coordinates. Should be point (lat, long) or area [lat1, long1, lat2, long2]')
elif isinstance(coord, dict):
area = dict()
for key, value in coord.items():
if len(value)==2:
lat = value[0]
long = value[1]
lat_range = np.arange(np.around(lat, round_coord) - 20, np.around(lat, round_coord) + 20,
resolution)
lat1 = lat_range[np.abs(lat_range - lat).argmin()] - self.nwp_resolution / 10
lat2 = lat_range[np.abs(lat_range - lat).argmin()] + self.nwp_resolution / 10
long_range = np.arange(np.around(long, round_coord) - 20, np.around(long, round_coord) + 20,
resolution)
long1 = long_range[np.abs(long_range - long).argmin()] - self.nwp_resolution / 10
long2 = long_range[np.abs(long_range - long).argmin()] + self.nwp_resolution / 10
area[key] = [[lat1 - self.nwp_resolution*levels, long1 - self.nwp_resolution*levels],
[lat2 + self.nwp_resolution*levels, long2 + self.nwp_resolution*levels]]
else:
area[key] = np.array(value).reshape(2,2)
else:
raise ValueError('Wrong coordinates. Should be dict or list')
else:
area = dict()
self.logger.info('Areas created succesfully')
return area
def initialize(self):
data_file_name = os.path.basename(self.file_data)
if os.path.exists(os.path.join(os.path.dirname(self.file_data), 'coord_auto_' + self.model_type + '.csv')):
self.file_coord = os.path.join(os.path.dirname(self.file_data), 'coord_auto_' + self.model_type + '.csv')
else:
self.file_coord = os.path.join(os.path.dirname(self.file_data), 'coord_' + self.model_type + '.csv')
if not os.path.exists(self.file_coord) and self.weather_in_data==False:
raise IOError('File with coordinates does not exist')
self.file_rated = os.path.join(os.path.dirname(self.file_data), 'rated_' + self.model_type + '.csv')
if not os.path.exists(self.file_rated):
if self.model_type in {'wind', 'pv'} and self.projects_group not in {'APE_net'}:
raise ValueError('Provide rated_power for each project. The type of projects is %s', self.model_type)
self.use_rated = False
else:
self.use_rated = True
self.load_data()
self.group_static_data = []
if self.check_project_names():
for project_name in self.projects:
path_project = self.path_group + '/' + project_name
if not os.path.exists(path_project):
os.makedirs(path_project)
path_backup = self.path_group + '/backup_models/' + project_name
if not os.path.exists(path_backup):
os.makedirs(path_backup)
path_model = path_project + '/model_ver' + str(self.version_model)
if not os.path.exists(path_model):
os.makedirs(path_model)
path_data = path_model + '/DATA'
if not os.path.exists(path_data):
os.makedirs(path_data)
path_fuzzy_models = path_model + '/fuzzy_models'
if not os.path.exists(path_fuzzy_models):
os.makedirs(path_fuzzy_models)
if self.use_rated:
if project_name == self.projects_group + '_' + self.model_type and project_name not in self.rated.index.to_list():
rated = self.rated.sum().to_list()[0]
else:
rated = self.rated.loc[project_name].to_list()[0]
else:
rated = None
if hasattr(self, 'coord'):
if project_name=='APE_net' or self.model_type=='load' or project_name == self.projects_group + '_' + self.model_type:
coord = dict()
for name, latlong in self.coord.iterrows():
coord[name] = latlong.values.tolist()
else:
coord = self.coord.loc[project_name].to_list()
else:
coord = None
area = self.create_area(coord, self.nwp_resolution)
temp = {'_id': project_name,
'owner': self.project_owner,
'project_group': self.projects_group,
'type': self.model_type,
'location': coord,
'areas': area,
'rated': rated,
'path_project': path_project,
'path_model': path_model,
'version_group': self.version_group,
'version_model': self.version_model,
'path_backup': path_backup,
'path_data': path_data,
'pathnwp': self.path_nwp_group,
'path_fuzzy_models': path_fuzzy_models,
'run_on_platform': False,
}
static_data=dict()
for key, value in self.static_data.items():
static_data[key] = value
for key, value in temp.items():
static_data[key] = value
self.group_static_data.append({'_id' : project_name, 'static_data' : static_data})
joblib.dump(static_data, os.path.join(path_model, 'static_data.pickle'))
with open(os.path.join(path_model, 'static_data.txt'), 'w') as file:
for k, v in static_data.items():
if not isinstance(v, dict):
file.write(str(k) + ' >>> ' + str(v) + '\n\n')
else:
file.write(str(k) + ' >>> ' + '\n')
for kk, vv in v.items():
file.write('\t' + str(kk) + ' >>> ' + str(vv) + '\n')
joblib.dump(self.group_static_data, os.path.join(self.path_group, 'static_data_projects.pickle'))
self.logger.info('Static data of all projects created')
def nwp_extractor(self, data):
static_data = copy.deepcopy(self.static_data)
data.index = data.index.astype(str)
static_data['data'] = data.to_dict()
client = rabbit_client_nwp()
return client.call(static_data)
def create_datasets(self, data, test=False):
static_data = copy.deepcopy(self.static_data)
data.index = data.index.astype(str)
static_data['data'] = data.to_dict()
static_data['test'] = test
static_data['group_static_data'] = self.group_static_data
client = rabbit_client_data()
return client.call(static_data)
def create_projects_relations(self):
if os.path.exists(os.path.join(self.path_group, 'static_data_projects.pickle')):
self.group_static_data = joblib.load(os.path.join(self.path_group, 'static_data_projects.pickle'))
else:
self.initialize()
if self.static_data['enable_transfer_learning'] and len(self.group_static_data)>1:
self.logger.info('Create projects relations')
transfer_learning_linker = ProjectLinker(self.group_static_data)
self.training_project_groups = transfer_learning_linker.find_relations()
projects = dict()
for project in self.group_static_data:
if project['_id'] != project['static_data']['projects_group'] + '_' + project['static_data']['type']:
projects[project['_id']] = project
count_projects = 0
for project_name, project in projects.items():
if project_name not in self.training_project_groups.keys():
for main_project, group in self.training_project_groups.items():
if project_name in group:
project['static_data']['transfer_learning'] = True
project['static_data']['tl_project'] = projects[main_project]
count_projects+=1
else:
count_projects += 1
if len(projects) != count_projects:
raise RuntimeError('Some projects does not include in a transfer learning project group')
for project in self.group_static_data:
if project['_id'] != project['static_data']['projects_group'] + '_' + project['static_data']['type']:
project['static_data']['transfer_learning'] = projects[project['_id']]['static_data']['transfer_learning']
project['static_data']['tl_project'] = projects[project['_id']]['static_data']['tl_project']
def fit(self):
for project in self.group_static_data:
if project['_id'] != project['static_data']['projects_group'] + '_' + project['static_data']['type']:
project_model = ModelTrainManager(project['static_data']['path_model'])
# if project_model.istrained == False:
project_model.init(project['static_data'], self.data_variables)
if self.model_type in {'wind', 'pv'}:
if os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_X.csv')) \
and os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_y.csv'))\
and os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_cnn.pickle')):
if project['static_data']['transfer_learning'] == False:
self.logger.info('Start train project %s', project['_id'])
project_model.train()
# else:
# project_model.train_TL(project['static_data']['tl_project']['static_data']['path_model'])
else:
raise ValueError('Cannot find project ', project['_id'], ' datasets')
elif self.model_type in {'load'}:
if os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_X.csv')) \
and os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_y.csv')) \
and os.path.exists(
os.path.join(project['static_data']['path_data'], 'dataset_lstm.pickle')):
self.logger.info('Start train project %s', project['_id'])
project_model.train()
elif self.model_type in {'fa'}:
if os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_X.csv')) \
and os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_y.csv')) \
and os.path.exists(
os.path.join(project['static_data']['path_data'], 'dataset_lstm.pickle')):
if project['static_data']['transfer_learning'] == False:
self.logger.info('Start train project %s', project['_id'])
project_model.train()
else:
raise ValueError('Cannot recognize model type')
if self.static_data['enable_transfer_learning']:
for project in self.group_static_data:
if project['_id'] != project['static_data']['projects_group'] + '_' + project['static_data']['type']:
project_model = ModelTrainManager(project['static_data']['path_model'])
if project_model.istrained == False:
project_model.init(project['static_data'], self.data_variables)
if self.model_type in {'wind', 'pv'}:
if os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_X.csv')) \
and os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_y.csv'))\
and os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_cnn.pickle')):
if project['static_data']['transfer_learning'] == True:
self.logger.info('Start train project %s', project['_id'])
project_model.train_TL(project['static_data']['tl_project']['static_data']['path_model'])
else:
raise ValueError('Cannot find project ', project['_id'], ' datasets')
else:
raise ValueError('Cannot recognize model type')
def evaluate(self):
self.initialize()
self.create_datasets(self.data_eval, test=True)
for project in self.group_static_data:
if project['_id'] != project['static_data']['projects_group'] + '_' + project['static_data']['type']:
project_model = ModelPredictManager(project['static_data']['path_model'])
# if project_model.istrained == False:
project_model.init(project['static_data'], self.data_variables)
if self.model_type in {'wind', 'pv'}:
if os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_X_test.csv')) \
and os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_y_test.csv'))\
and os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_cnn_test.pickle')):
self.logger.info('Evaluate project %s', project['_id'])
project_model.evaluate_all()
else:
raise ValueError('Cannot find project ', project['_id'], ' datasets')
elif self.model_type in {'load'}:
raise NotImplementedError('load model manager not implemented yet')
elif self.model_type in {'fa'}:
if os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_X_test.csv')) \
and os.path.exists(os.path.join(project['static_data']['path_data'], 'dataset_y_test.csv')) \
and os.path.exists(
os.path.join(project['static_data']['path_data'], 'dataset_lstm_test.pickle')):
self.logger.info('Evaluate project %s', project['_id'])
project_model.evaluate_all()
else:
raise ValueError('Cannot recognize model type')
| [
"joesider9@gmail.com"
] | joesider9@gmail.com |
0147192e5e7915f56773a85531b0aa0143af88c2 | 8d6ae21b78b3b40382e21198c571a7957e055be5 | /Aug20/projectEuler/utilities/numeric.py | 8c0bf7fb94084c1085ac1ceaf2dfa7ca1596a545 | [] | no_license | vj-reddy/PythonBatch1 | 6c1a429e0ac57ea1db7b04af18187e84cd52f2d5 | b86a5a16b1004d1e4f855a57b019704c71425bbf | refs/heads/master | 2023-03-16T06:05:48.104363 | 2020-10-16T13:55:03 | 2020-10-16T13:55:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 672 | py | def is_divisible_by(number, factor):
"""
This method will return true if the number is divisible by factor
:param number: number
:param factor: factor
:return: true if divisible false other wise
"""
return number % factor == 0
def is_prime(number):
"""
This method will be used to determine if the number passed is prime
or not
:param number: number on which check has to be performed
:return: true if prime false otherwise
"""
is_prime_number = True
for index in range((number//2)+1):
if is_divisible_by(number, index):
is_prime_number = False
break
return is_prime_number
| [
"qtdevops@gmail.com"
] | qtdevops@gmail.com |
75f825c43426ee4c037bbc78f9ee08315259bbd0 | 2afb1095de2b03b05c8b96f98f38ddeca889fbff | /web_scrapping/try_beautifulSoup.py | c821555604b1369258a76b38f3042aaacfd6ecbb | [] | no_license | draganmoo/trypython | 187316f8823296b12e1df60ef92c54b7a04aa3e7 | 90cb0fc8626e333c6ea430e32aa21af7d189d975 | refs/heads/master | 2023-09-03T16:24:33.548172 | 2021-11-04T21:21:12 | 2021-11-04T21:21:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,110 | py | # import requests
# from bs4 import BeautifulSoup
#
# url = "https://www.bilibili.com/video/av28951415?from=search&seid=12506065350425881349"
#
# r = requests.get(url)
# r_text = r.text
#
# soup = BeautifulSoup(r_text, "html.parser")
# ##查看第一个a标签
# a = soup.a
# print (a)
##查询a标签在bs4中的对象种类(tag,navigable string, beautifulsoup, comment)
# a_tage = soup.a
# print (type (a_tage))
##查询a标签下的href属性信息
# a_href = soup.a.attrs["href"]
# print (a_href)
##查看a标签的字符内容
# a_string = soup.a.string
# print (a_string)
##获取class为title所有a标签的title
# for string_content in soup.find_all ("a", class_="title"):
# print (string_content.get("title"))
##获取class为title所有a标签的href属性
# for link in soup.find_all ("a", class_="title"):
# print (link.get("href"))
##获取class为title所有a标签文本
# for string_content in soup.find_all ("a", class_="title"):
# print (string_content.get_text())
page = "50"
print('downloading page # '+page)
| [
"13701304462@163.com"
] | 13701304462@163.com |
ee554ca6e66e18bfa4501978070d9dc44a2deb22 | 28f088b5356e66780c4bad204564bff92f910f02 | /src/python/pants/backend/awslambda/python/target_types_test.py | 446131aa8fa84fc5d0d5626d1eb5fac0367b19c9 | [
"Apache-2.0"
] | permissive | wonlay/pants | 57dcd99f82cdb2e37fcb7c563ec2bccf797ee7b7 | 53c66503b6898e83c9c9596e56cde5ad9ed6a0d3 | refs/heads/master | 2023-03-06T03:23:08.602817 | 2022-05-05T23:41:32 | 2022-05-05T23:41:32 | 24,695,709 | 0 | 0 | Apache-2.0 | 2023-03-01T11:59:58 | 2014-10-01T21:15:29 | Python | UTF-8 | Python | false | false | 11,377 | py | # Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import re
from textwrap import dedent
from typing import List, Optional
import pytest
from pants.backend.awslambda.python.target_types import (
InjectPythonLambdaHandlerDependency,
PythonAWSLambda,
PythonAwsLambdaDependencies,
PythonAwsLambdaHandlerField,
PythonAwsLambdaRuntime,
ResolvedPythonAwsHandler,
ResolvePythonAwsHandlerRequest,
)
from pants.backend.awslambda.python.target_types import rules as target_type_rules
from pants.backend.python.target_types import (
PexCompletePlatformsField,
PythonRequirementTarget,
PythonSourcesGeneratorTarget,
)
from pants.backend.python.target_types_rules import rules as python_target_types_rules
from pants.build_graph.address import Address
from pants.core.target_types import FileTarget
from pants.engine.internals.scheduler import ExecutionError
from pants.engine.target import InjectedDependencies, InvalidFieldException
from pants.testutil.rule_runner import QueryRule, RuleRunner, engine_error
@pytest.fixture
def rule_runner() -> RuleRunner:
return RuleRunner(
rules=[
*target_type_rules(),
*python_target_types_rules(),
QueryRule(ResolvedPythonAwsHandler, [ResolvePythonAwsHandlerRequest]),
QueryRule(InjectedDependencies, [InjectPythonLambdaHandlerDependency]),
],
target_types=[
FileTarget,
PythonAWSLambda,
PythonRequirementTarget,
PythonSourcesGeneratorTarget,
],
)
@pytest.mark.parametrize(
["runtime", "expected_major", "expected_minor"],
(
# The available runtimes at the time of writing.
# See https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html.
["python2.7", 2, 7],
["python3.6", 3, 6],
["python3.7", 3, 7],
["python3.8", 3, 8],
),
)
def test_to_interpreter_version(runtime: str, expected_major: int, expected_minor: int) -> None:
assert (expected_major, expected_minor) == PythonAwsLambdaRuntime(
runtime, Address("", target_name="t")
).to_interpreter_version()
@pytest.mark.parametrize("invalid_runtime", ("python88.99", "fooobar"))
def test_runtime_validation(invalid_runtime: str) -> None:
with pytest.raises(InvalidFieldException):
PythonAwsLambdaRuntime(invalid_runtime, Address("", target_name="t"))
@pytest.mark.parametrize("invalid_handler", ("path.to.lambda", "lambda.py"))
def test_handler_validation(invalid_handler: str) -> None:
with pytest.raises(InvalidFieldException):
PythonAwsLambdaHandlerField(invalid_handler, Address("", target_name="t"))
@pytest.mark.parametrize(
["handler", "expected"],
(("path.to.module:func", []), ("lambda.py:func", ["project/dir/lambda.py"])),
)
def test_handler_filespec(handler: str, expected: List[str]) -> None:
field = PythonAwsLambdaHandlerField(handler, Address("project/dir"))
assert field.filespec == {"includes": expected}
def test_resolve_handler(rule_runner: RuleRunner) -> None:
def assert_resolved(handler: str, *, expected: str, is_file: bool) -> None:
addr = Address("src/python/project")
rule_runner.write_files(
{"src/python/project/lambda.py": "", "src/python/project/f2.py": ""}
)
field = PythonAwsLambdaHandlerField(handler, addr)
result = rule_runner.request(
ResolvedPythonAwsHandler, [ResolvePythonAwsHandlerRequest(field)]
)
assert result.val == expected
assert result.file_name_used == is_file
assert_resolved("path.to.lambda:func", expected="path.to.lambda:func", is_file=False)
assert_resolved("lambda.py:func", expected="project.lambda:func", is_file=True)
with engine_error(contains="Unmatched glob"):
assert_resolved("doesnt_exist.py:func", expected="doesnt matter", is_file=True)
# Resolving >1 file is an error.
with engine_error(InvalidFieldException):
assert_resolved("*.py:func", expected="doesnt matter", is_file=True)
def test_inject_handler_dependency(rule_runner: RuleRunner, caplog) -> None:
rule_runner.write_files(
{
"BUILD": dedent(
"""\
python_requirement(
name='ansicolors',
requirements=['ansicolors'],
modules=['colors'],
)
"""
),
"project/app.py": "",
"project/ambiguous.py": "",
"project/ambiguous_in_another_root.py": "",
"project/BUILD": dedent(
"""\
python_sources(sources=['app.py'])
python_awslambda(name='first_party', handler='project.app:func', runtime='python3.7')
python_awslambda(name='first_party_shorthand', handler='app.py:func', runtime='python3.7')
python_awslambda(name='third_party', handler='colors:func', runtime='python3.7')
python_awslambda(name='unrecognized', handler='who_knows.module:func', runtime='python3.7')
python_sources(name="dep1", sources=["ambiguous.py"])
python_sources(name="dep2", sources=["ambiguous.py"])
python_awslambda(
name="ambiguous",
handler='ambiguous.py:func',
runtime='python3.7',
)
python_awslambda(
name="disambiguated",
handler='ambiguous.py:func',
runtime='python3.7',
dependencies=["!./ambiguous.py:dep2"],
)
python_sources(
name="ambiguous_in_another_root", sources=["ambiguous_in_another_root.py"]
)
python_awslambda(
name="another_root__file_used",
handler="ambiguous_in_another_root.py:func",
runtime="python3.7",
)
python_awslambda(
name="another_root__module_used",
handler="project.ambiguous_in_another_root:func",
runtime="python3.7",
)
"""
),
"src/py/project/ambiguous_in_another_root.py": "",
"src/py/project/BUILD.py": "python_sources()",
}
)
def assert_injected(address: Address, *, expected: Optional[Address]) -> None:
tgt = rule_runner.get_target(address)
injected = rule_runner.request(
InjectedDependencies,
[InjectPythonLambdaHandlerDependency(tgt[PythonAwsLambdaDependencies])],
)
assert injected == InjectedDependencies([expected] if expected else [])
assert_injected(
Address("project", target_name="first_party"),
expected=Address("project", relative_file_path="app.py"),
)
assert_injected(
Address("project", target_name="first_party_shorthand"),
expected=Address("project", relative_file_path="app.py"),
)
assert_injected(
Address("project", target_name="third_party"),
expected=Address("", target_name="ansicolors"),
)
assert_injected(Address("project", target_name="unrecognized"), expected=None)
# Warn if there's ambiguity, meaning we cannot infer.
caplog.clear()
assert_injected(Address("project", target_name="ambiguous"), expected=None)
assert len(caplog.records) == 1
assert (
"project:ambiguous has the field `handler='ambiguous.py:func'`, which maps to the Python "
"module `project.ambiguous`"
) in caplog.text
assert "['project/ambiguous.py:dep1', 'project/ambiguous.py:dep2']" in caplog.text
# Test that ignores can disambiguate an otherwise ambiguous handler. Ensure we don't log a
# warning about ambiguity.
caplog.clear()
assert_injected(
Address("project", target_name="disambiguated"),
expected=Address("project", target_name="dep1", relative_file_path="ambiguous.py"),
)
assert not caplog.records
# Test that using a file path results in ignoring all targets which are not an ancestor. We can
# do this because we know the file name must be in the current directory or subdir of the
# `python_awslambda`.
assert_injected(
Address("project", target_name="another_root__file_used"),
expected=Address(
"project",
target_name="ambiguous_in_another_root",
relative_file_path="ambiguous_in_another_root.py",
),
)
caplog.clear()
assert_injected(Address("project", target_name="another_root__module_used"), expected=None)
assert len(caplog.records) == 1
assert (
"['project/ambiguous_in_another_root.py:ambiguous_in_another_root', 'src/py/project/"
"ambiguous_in_another_root.py']"
) in caplog.text
# Test that we can turn off the injection.
rule_runner.set_options(["--no-python-infer-entry-points"])
assert_injected(Address("project", target_name="first_party"), expected=None)
def test_at_least_one_target_platform(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"project/app.py": "",
"project/platform-py37.json": "",
"project/BUILD": dedent(
"""\
python_awslambda(
name='runtime',
handler='project.app:func',
runtime='python3.7',
)
file(name="python37", source="platform-py37.json")
python_awslambda(
name='complete_platforms',
handler='project.app:func',
complete_platforms=[':python37'],
)
python_awslambda(
name='both',
handler='project.app:func',
runtime='python3.7',
complete_platforms=[':python37'],
)
python_awslambda(
name='neither',
handler='project.app:func',
)
"""
),
}
)
runtime = rule_runner.get_target(Address("project", target_name="runtime"))
assert "python3.7" == runtime[PythonAwsLambdaRuntime].value
assert runtime[PexCompletePlatformsField].value is None
complete_platforms = rule_runner.get_target(
Address("project", target_name="complete_platforms")
)
assert complete_platforms[PythonAwsLambdaRuntime].value is None
assert (":python37",) == complete_platforms[PexCompletePlatformsField].value
both = rule_runner.get_target(Address("project", target_name="both"))
assert "python3.7" == both[PythonAwsLambdaRuntime].value
assert (":python37",) == both[PexCompletePlatformsField].value
with pytest.raises(
ExecutionError,
match=r".*{}.*".format(
re.escape(
"InvalidTargetException: The `python_awslambda` target project:neither must "
"specify either a `runtime` or `complete_platforms` or both."
)
),
):
rule_runner.get_target(Address("project", target_name="neither"))
| [
"noreply@github.com"
] | wonlay.noreply@github.com |
6af7ce031ab72dbf0aff50709a14055d44efc7f8 | 6206ad73052b5ff1b6690c225f000f9c31aa4ff7 | /Code/Optimal Account Balancing.py | 1e1d342648e9e3f972af5beca08a82012254c012 | [] | no_license | mws19901118/Leetcode | 7f9e3694cb8f0937d82b6e1e12127ce5073f4df0 | 752ac00bea40be1e3794d80aa7b2be58c0a548f6 | refs/heads/master | 2023-09-01T10:35:52.389899 | 2023-09-01T03:37:22 | 2023-09-01T03:37:22 | 21,467,719 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,380 | py | class Solution:
def minTransfers(self, transactions: List[List[int]]) -> int:
balance = collections.defaultdict(int) #Initialize final balances for each person.
for x, y, amount in transactions: #Traverse transcations and update balance.
balance[x] += amount
balance[y] -= amount
unsettled = [amount for amount in balance.values() if amount] #Filter out unsettled balances.
@cache #Cache result.
def dfs(mask: int) -> int: #DFS to find the max number of subgroups whose balance can be settled in current group.
if not mask: #If mask is 0, means no balance in current group, return 0.
return 0
balanceSum, result = 0, 0 #Initialize balance sum of current group and result.
for i in range(len(unsettled)): #Traverse all unsettled balance.
currentMask = 1 << i #Calculate mask for current balance.
if mask & currentMask: #If mask & currentMask, current balance is in current group.
balanceSum += unsettled[i] #Add its balance to balanceSum.
result = max(result, dfs(mask ^ currentMask)) #Keep DFS to calculate the result of removing current balance from current group and update result.
return result + (balanceSum == 0) #If balanceSum is 0, increase 1 to result and return because current group is already settled and removing a non zero balance will break it so the removed balance must belongs to a settled subgroup.
return len(unsettled) - dfs((1 << len(unsettled)) - 1) #The reason is that a settled group x balances needs x - 1 transactions to settle, so overall minimum transactions needed is the length of unsettled balance substract DFS result of all balances in one group.
| [
"noreply@github.com"
] | mws19901118.noreply@github.com |
8a03520a85137791b333c28877f03c52c71b60c5 | 9b2e4652ae8c1f3e49bb0d3e678e92d4a0645f13 | /tests/adapters/test_redirects.py | 574b6dc531f7e24a4bdf3fe25cbb526a76d61202 | [
"BSD-3-Clause"
] | permissive | realsby/httpcore | b2d9b220995a2aa191639e01b637c7fe59c13e3d | 8a227c1f2bd9b409602f5ed4e8ccd4c20245ad6a | refs/heads/master | 2020-05-18T20:21:29.837515 | 2019-05-02T11:11:05 | 2019-05-02T11:11:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,832 | py | import json
from urllib.parse import parse_qs
import pytest
from httpcore import (
URL,
Adapter,
RedirectAdapter,
RedirectBodyUnavailable,
RedirectLoop,
Request,
Response,
TooManyRedirects,
codes,
)
class MockDispatch(Adapter):
def prepare_request(self, request: Request) -> None:
pass
async def send(self, request: Request, **options) -> Response:
if request.url.path == "/redirect_301":
status_code = codes.moved_permanently
headers = {"location": "https://example.org/"}
return Response(status_code, headers=headers, request=request)
elif request.url.path == "/redirect_302":
status_code = codes.found
headers = {"location": "https://example.org/"}
return Response(status_code, headers=headers, request=request)
elif request.url.path == "/redirect_303":
status_code = codes.see_other
headers = {"location": "https://example.org/"}
return Response(status_code, headers=headers, request=request)
elif request.url.path == "/relative_redirect":
headers = {"location": "/"}
return Response(codes.see_other, headers=headers, request=request)
elif request.url.path == "/no_scheme_redirect":
headers = {"location": "//example.org/"}
return Response(codes.see_other, headers=headers, request=request)
elif request.url.path == "/multiple_redirects":
params = parse_qs(request.url.query)
count = int(params.get("count", "0")[0])
redirect_count = count - 1
code = codes.see_other if count else codes.ok
location = "/multiple_redirects"
if redirect_count:
location += "?count=" + str(redirect_count)
headers = {"location": location} if count else {}
return Response(code, headers=headers, request=request)
if request.url.path == "/redirect_loop":
headers = {"location": "/redirect_loop"}
return Response(codes.see_other, headers=headers, request=request)
elif request.url.path == "/cross_domain":
headers = {"location": "https://example.org/cross_domain_target"}
return Response(codes.see_other, headers=headers, request=request)
elif request.url.path == "/cross_domain_target":
headers = dict(request.headers.items())
content = json.dumps({"headers": headers}).encode()
return Response(codes.ok, content=content, request=request)
elif request.url.path == "/redirect_body":
await request.read()
headers = {"location": "/redirect_body_target"}
return Response(codes.permanent_redirect, headers=headers, request=request)
elif request.url.path == "/redirect_body_target":
content = await request.read()
body = json.dumps({"body": content.decode()}).encode()
return Response(codes.ok, content=body, request=request)
return Response(codes.ok, content=b"Hello, world!", request=request)
@pytest.mark.asyncio
async def test_redirect_301():
client = RedirectAdapter(MockDispatch())
response = await client.request("POST", "https://example.org/redirect_301")
assert response.status_code == codes.ok
assert response.url == URL("https://example.org/")
assert len(response.history) == 1
@pytest.mark.asyncio
async def test_redirect_302():
client = RedirectAdapter(MockDispatch())
response = await client.request("POST", "https://example.org/redirect_302")
assert response.status_code == codes.ok
assert response.url == URL("https://example.org/")
assert len(response.history) == 1
@pytest.mark.asyncio
async def test_redirect_303():
client = RedirectAdapter(MockDispatch())
response = await client.request("GET", "https://example.org/redirect_303")
assert response.status_code == codes.ok
assert response.url == URL("https://example.org/")
assert len(response.history) == 1
@pytest.mark.asyncio
async def test_disallow_redirects():
client = RedirectAdapter(MockDispatch())
response = await client.request(
"POST", "https://example.org/redirect_303", allow_redirects=False
)
assert response.status_code == codes.see_other
assert response.url == URL("https://example.org/redirect_303")
assert len(response.history) == 0
response = await response.next()
assert response.status_code == codes.ok
assert response.url == URL("https://example.org/")
assert len(response.history) == 1
@pytest.mark.asyncio
async def test_relative_redirect():
client = RedirectAdapter(MockDispatch())
response = await client.request("GET", "https://example.org/relative_redirect")
assert response.status_code == codes.ok
assert response.url == URL("https://example.org/")
assert len(response.history) == 1
@pytest.mark.asyncio
async def test_no_scheme_redirect():
client = RedirectAdapter(MockDispatch())
response = await client.request("GET", "https://example.org/no_scheme_redirect")
assert response.status_code == codes.ok
assert response.url == URL("https://example.org/")
assert len(response.history) == 1
@pytest.mark.asyncio
async def test_fragment_redirect():
client = RedirectAdapter(MockDispatch())
url = "https://example.org/relative_redirect#fragment"
response = await client.request("GET", url)
assert response.status_code == codes.ok
assert response.url == URL("https://example.org/#fragment")
assert len(response.history) == 1
@pytest.mark.asyncio
async def test_multiple_redirects():
client = RedirectAdapter(MockDispatch())
url = "https://example.org/multiple_redirects?count=20"
response = await client.request("GET", url)
assert response.status_code == codes.ok
assert response.url == URL("https://example.org/multiple_redirects")
assert len(response.history) == 20
@pytest.mark.asyncio
async def test_too_many_redirects():
client = RedirectAdapter(MockDispatch())
with pytest.raises(TooManyRedirects):
await client.request("GET", "https://example.org/multiple_redirects?count=21")
@pytest.mark.asyncio
async def test_too_many_redirects_calling_next():
client = RedirectAdapter(MockDispatch())
url = "https://example.org/multiple_redirects?count=21"
response = await client.request("GET", url, allow_redirects=False)
with pytest.raises(TooManyRedirects):
while response.is_redirect:
response = await response.next()
@pytest.mark.asyncio
async def test_redirect_loop():
client = RedirectAdapter(MockDispatch())
with pytest.raises(RedirectLoop):
await client.request("GET", "https://example.org/redirect_loop")
@pytest.mark.asyncio
async def test_redirect_loop_calling_next():
client = RedirectAdapter(MockDispatch())
url = "https://example.org/redirect_loop"
response = await client.request("GET", url, allow_redirects=False)
with pytest.raises(RedirectLoop):
while response.is_redirect:
response = await response.next()
@pytest.mark.asyncio
async def test_cross_domain_redirect():
client = RedirectAdapter(MockDispatch())
url = "https://example.com/cross_domain"
headers = {"Authorization": "abc"}
response = await client.request("GET", url, headers=headers)
data = json.loads(response.content.decode())
assert response.url == URL("https://example.org/cross_domain_target")
assert data == {"headers": {}}
@pytest.mark.asyncio
async def test_same_domain_redirect():
client = RedirectAdapter(MockDispatch())
url = "https://example.org/cross_domain"
headers = {"Authorization": "abc"}
response = await client.request("GET", url, headers=headers)
data = json.loads(response.content.decode())
assert response.url == URL("https://example.org/cross_domain_target")
assert data == {"headers": {"authorization": "abc"}}
@pytest.mark.asyncio
async def test_body_redirect():
client = RedirectAdapter(MockDispatch())
url = "https://example.org/redirect_body"
content = b"Example request body"
response = await client.request("POST", url, content=content)
data = json.loads(response.content.decode())
assert response.url == URL("https://example.org/redirect_body_target")
assert data == {"body": "Example request body"}
@pytest.mark.asyncio
async def test_cannot_redirect_streaming_body():
client = RedirectAdapter(MockDispatch())
url = "https://example.org/redirect_body"
async def streaming_body():
yield b"Example request body"
with pytest.raises(RedirectBodyUnavailable):
await client.request("POST", url, content=streaming_body())
| [
"tom@tomchristie.com"
] | tom@tomchristie.com |
7ab3beaed3f6386264eb6ef9b7bfcc566325c83f | eef64f44003dff45287b487bc7a8da589d85d9cc | /chatbot/twitter_qa.py | 1ba8c671b2a055f4723ba61405dd3e29f7cbe936 | [
"Apache-2.0"
] | permissive | k8tems/TwitterQA | 387462c8e47e4c3dadeb9861e4009837e8b22f6b | 938fc29a050ab736d88446e9d794e2047850b4df | refs/heads/master | 2020-06-14T10:40:19.616309 | 2016-11-30T08:09:13 | 2016-11-30T08:09:13 | 75,195,384 | 0 | 1 | null | 2016-11-30T14:36:54 | 2016-11-30T14:36:53 | null | UTF-8 | Python | false | false | 3,924 | py | import json
import os
import re
import itertools
from TwitterAPI import TwitterAPI
with open("chatbot/credentials.json") as f:
credentials = json.load(f)
api = TwitterAPI(**credentials)
def get_tweets(screen_name, max_tweets=None):
show = api.request("users/show", {"screen_name": screen_name}).json()
max_tweets = max_tweets or show.get("statuses_count")
max_tweets = min(max_tweets, 3200)
print("Gathering {} tweets. Through API, 3200 is max possible".format(max_tweets))
user_tweets = []
query_params = {"screen_name": screen_name, "max_id": None, "count": 200}
last_seen = True
print("Gathering tweets for", screen_name)
while True:
try:
r = api.request("statuses/user_timeline", query_params)
timeline_tweets = r.json()
if timeline_tweets[-1]['id'] == last_seen:
break
last_seen = timeline_tweets[-1]['id']
user_tweets.extend(timeline_tweets)
query_params['max_id'] = timeline_tweets[-1]['id']
print("latest ID", query_params['max_id'],
"number of new tweets", len(timeline_tweets))
except Exception as e:
print("ERROR", e)
if len(user_tweets) >= max_tweets:
break
seen = set()
tweets = []
for x in user_tweets:
if x['id'] not in seen:
tweets.append(x)
seen.add(x['id'])
return tweets
def find_questions_for_tweets(tweets):
origins = {tweet['in_reply_to_status_id']: tweet
for tweet in tweets if tweet.get('in_reply_to_status_id')}
origin_gen = (x for x in origins)
questions = []
answers = []
print("Getting original tweets to which <user> replied")
while True:
orig = list(itertools.islice(origin_gen, 100))
if not orig:
break
id_query = ",".join([str(x) for x in orig])
orig_tweets = api.request("statuses/lookup", {"id": id_query}).json()
for ot in orig_tweets:
if ot['id'] in origins:
questions.append(ot['text'])
answers.append(origins[ot['id']]['text'])
print("collected question/answer pairs", len(questions), len(answers))
return questions, answers
def normalize_tweet(x):
x = " ".join(x.split())
x = x.lower()
x = re.sub("http[^ ]+", "LINK", x)
x = re.sub("#[^ ]+", "TAG", x)
x = re.sub("(@[^ ]+ )*@[^ ]+", "MENTION", x)
for punc in [".", ",", "?", "!"]:
x = re.sub("[{}]+".format(punc), " " + punc, x)
x = x.replace("n't", " not")
x = " ".join(x.split())
x = x.lstrip("MENTION ")
return x.strip()
def get_tweet_qa(twitter_username, max_tweets=None, normalize_tweets=True):
tweets = get_tweets(twitter_username, max_tweets)
questions, answers = find_questions_for_tweets(tweets)
if normalize_tweets:
questions = [normalize_tweet(x) for x in questions]
answers = [normalize_tweet(x) for x in answers]
return questions, answers
def get_rate_limits():
rates = api.request("application/rate_limit_status").json()
timeline = rates['resources']['statuses']['/statuses/user_timeline']
lookup = rates['resources']['users']['/users/lookup']
print("lookup", lookup)
print("timeline", timeline)
return timeline['remaining'] != 0 and lookup['remaining'] != 0
def store_question_answers(username, max_number=None):
questions, answers = get_tweet_qa(username, max_number)
d = "data/tweets/" if os.path.isdir("data/tweets") else "../data/tweets/"
d += "{}-{}.txt"
with open(d.format(username, "questions"), "w") as f:
f.write("\n".join(questions))
print("Saved", d.format(username, "questions"))
with open(d.format(username, "answers"), "w") as f:
f.write("\n".join(answers))
print("Saved", d.format(username, "answers"))
return questions, answers
| [
"kootenpv@gmail.com"
] | kootenpv@gmail.com |
edeff8f4740317cae44fe79e7ea6e421a6b1a75a | 50948d4cb10dcb1cc9bc0355918478fb2841322a | /azure-cognitiveservices-language-luis/azure/cognitiveservices/language/luis/runtime/models/composite_entity_model_py3.py | 45ac985fffc41176922ca0c663519fdecff97333 | [
"MIT"
] | permissive | xiafu-msft/azure-sdk-for-python | de9cd680b39962702b629a8e94726bb4ab261594 | 4d9560cfd519ee60667f3cc2f5295a58c18625db | refs/heads/master | 2023-08-12T20:36:24.284497 | 2019-05-22T00:55:16 | 2019-05-22T00:55:16 | 187,986,993 | 1 | 0 | MIT | 2020-10-02T01:17:02 | 2019-05-22T07:33:46 | Python | UTF-8 | Python | false | false | 1,607 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class CompositeEntityModel(Model):
"""LUIS Composite Entity.
All required parameters must be populated in order to send to Azure.
:param parent_type: Required. Type/name of parent entity.
:type parent_type: str
:param value: Required. Value for composite entity extracted by LUIS.
:type value: str
:param children: Required. Child entities.
:type children:
list[~azure.cognitiveservices.language.luis.runtime.models.CompositeChildModel]
"""
_validation = {
'parent_type': {'required': True},
'value': {'required': True},
'children': {'required': True},
}
_attribute_map = {
'parent_type': {'key': 'parentType', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'children': {'key': 'children', 'type': '[CompositeChildModel]'},
}
def __init__(self, *, parent_type: str, value: str, children, **kwargs) -> None:
super(CompositeEntityModel, self).__init__(**kwargs)
self.parent_type = parent_type
self.value = value
self.children = children
| [
"lmazuel@microsoft.com"
] | lmazuel@microsoft.com |
679b965482332ab123db5c021f5f98ac1c527663 | bbc972524d9f0ccf95fbac30de2f7a7c0a1badf2 | /doc/tutorials/pymunk_platformer/pymunk_demo_platformer_12.py | 2002801e290e61fed6eb1ae237dd36e203f5baa6 | [
"MIT"
] | permissive | JFincher42/arcade | e0049b84b0317a0fd9eee3702e4e6e843cc2ebac | f9eebfc4c6989e0e99d7b6dfe0409f248bfd5a44 | refs/heads/master | 2021-06-29T20:41:31.850895 | 2021-03-18T14:43:01 | 2021-03-18T14:43:01 | 226,012,595 | 0 | 0 | NOASSERTION | 2021-03-18T14:45:09 | 2019-12-05T04:00:26 | Python | UTF-8 | Python | false | false | 21,621 | py | """
Example of Pymunk Physics Engine Platformer
"""
import math
from typing import Optional
import arcade
SCREEN_TITLE = "PyMunk Platformer"
# How big are our image tiles?
SPRITE_IMAGE_SIZE = 128
# Scale sprites up or down
SPRITE_SCALING_PLAYER = 0.5
SPRITE_SCALING_TILES = 0.5
# Scaled sprite size for tiles
SPRITE_SIZE = int(SPRITE_IMAGE_SIZE * SPRITE_SCALING_PLAYER)
# Size of grid to show on screen, in number of tiles
SCREEN_GRID_WIDTH = 25
SCREEN_GRID_HEIGHT = 15
# Size of screen to show, in pixels
SCREEN_WIDTH = SPRITE_SIZE * SCREEN_GRID_WIDTH
SCREEN_HEIGHT = SPRITE_SIZE * SCREEN_GRID_HEIGHT
# --- Physics forces. Higher number, faster accelerating.
# Gravity
GRAVITY = 1500
# Damping - Amount of speed lost per second
DEFAULT_DAMPING = 1.0
PLAYER_DAMPING = 0.4
# Friction between objects
PLAYER_FRICTION = 1.0
WALL_FRICTION = 0.7
DYNAMIC_ITEM_FRICTION = 0.6
# Mass (defaults to 1)
PLAYER_MASS = 2.0
# Keep player from going too fast
PLAYER_MAX_HORIZONTAL_SPEED = 450
PLAYER_MAX_VERTICAL_SPEED = 1600
# Force applied while on the ground
PLAYER_MOVE_FORCE_ON_GROUND = 8000
# Force applied when moving left/right in the air
PLAYER_MOVE_FORCE_IN_AIR = 900
# Strength of a jump
PLAYER_JUMP_IMPULSE = 1800
# Close enough to not-moving to have the animation go to idle.
DEAD_ZONE = 0.1
# Constants used to track if the player is facing left or right
RIGHT_FACING = 0
LEFT_FACING = 1
# How many pixels to move before we change the texture in the walking animation
DISTANCE_TO_CHANGE_TEXTURE = 20
# How much force to put on the bullet
BULLET_MOVE_FORCE = 4500
# Mass of the bullet
BULLET_MASS = 0.1
# Make bullet less affected by gravity
BULLET_GRAVITY = 300
class PlayerSprite(arcade.Sprite):
""" Player Sprite """
def __init__(self,
ladder_list: arcade.SpriteList,
hit_box_algorithm):
""" Init """
# Let parent initialize
super().__init__()
# Set our scale
self.scale = SPRITE_SCALING_PLAYER
# Images from Kenney.nl's Character pack
# main_path = ":resources:images/animated_characters/female_adventurer/femaleAdventurer"
main_path = ":resources:images/animated_characters/female_person/femalePerson"
# main_path = ":resources:images/animated_characters/male_person/malePerson"
# main_path = ":resources:images/animated_characters/male_adventurer/maleAdventurer"
# main_path = ":resources:images/animated_characters/zombie/zombie"
# main_path = ":resources:images/animated_characters/robot/robot"
# Load textures for idle standing
self.idle_texture_pair = arcade.load_texture_pair(f"{main_path}_idle.png",
hit_box_algorithm=hit_box_algorithm)
self.jump_texture_pair = arcade.load_texture_pair(f"{main_path}_jump.png")
self.fall_texture_pair = arcade.load_texture_pair(f"{main_path}_fall.png")
# Load textures for walking
self.walk_textures = []
for i in range(8):
texture = arcade.load_texture_pair(f"{main_path}_walk{i}.png")
self.walk_textures.append(texture)
# Load textures for climbing
self.climbing_textures = []
texture = arcade.load_texture(f"{main_path}_climb0.png")
self.climbing_textures.append(texture)
texture = arcade.load_texture(f"{main_path}_climb1.png")
self.climbing_textures.append(texture)
# Set the initial texture
self.texture = self.idle_texture_pair[0]
# Hit box will be set based on the first image used.
self.hit_box = self.texture.hit_box_points
# Default to face-right
self.character_face_direction = RIGHT_FACING
# Index of our current texture
self.cur_texture = 0
# How far have we traveled horizontally since changing the texture
self.x_odometer = 0
self.y_odometer = 0
self.ladder_list = ladder_list
self.is_on_ladder = False
def pymunk_moved(self, physics_engine, dx, dy, d_angle):
""" Handle being moved by the pymunk engine """
# Figure out if we need to face left or right
if dx < -DEAD_ZONE and self.character_face_direction == RIGHT_FACING:
self.character_face_direction = LEFT_FACING
elif dx > DEAD_ZONE and self.character_face_direction == LEFT_FACING:
self.character_face_direction = RIGHT_FACING
# Are we on the ground?
is_on_ground = physics_engine.is_on_ground(self)
# Are we on a ladder?
if len(arcade.check_for_collision_with_list(self, self.ladder_list)) > 0:
if not self.is_on_ladder:
self.is_on_ladder = True
self.pymunk.gravity = (0, 0)
self.pymunk.damping = 0.0001
self.pymunk.max_vertical_velocity = PLAYER_MAX_HORIZONTAL_SPEED
else:
if self.is_on_ladder:
self.pymunk.damping = 1.0
self.pymunk.max_vertical_velocity = PLAYER_MAX_VERTICAL_SPEED
self.is_on_ladder = False
self.pymunk.gravity = None
# Add to the odometer how far we've moved
self.x_odometer += dx
self.y_odometer += dy
if self.is_on_ladder and not is_on_ground:
# Have we moved far enough to change the texture?
if abs(self.y_odometer) > DISTANCE_TO_CHANGE_TEXTURE:
# Reset the odometer
self.y_odometer = 0
# Advance the walking animation
self.cur_texture += 1
if self.cur_texture > 1:
self.cur_texture = 0
self.texture = self.climbing_textures[self.cur_texture]
return
# Jumping animation
if not is_on_ground:
if dy > DEAD_ZONE:
self.texture = self.jump_texture_pair[self.character_face_direction]
return
elif dy < -DEAD_ZONE:
self.texture = self.fall_texture_pair[self.character_face_direction]
return
# Idle animation
if abs(dx) <= DEAD_ZONE:
self.texture = self.idle_texture_pair[self.character_face_direction]
return
# Have we moved far enough to change the texture?
if abs(self.x_odometer) > DISTANCE_TO_CHANGE_TEXTURE:
# Reset the odometer
self.x_odometer = 0
# Advance the walking animation
self.cur_texture += 1
if self.cur_texture > 7:
self.cur_texture = 0
self.texture = self.walk_textures[self.cur_texture][self.character_face_direction]
class BulletSprite(arcade.SpriteSolidColor):
""" Bullet Sprite """
def pymunk_moved(self, physics_engine, dx, dy, d_angle):
""" Handle when the sprite is moved by the physics engine. """
# If the bullet falls below the screen, remove it
if self.center_y < -100:
self.remove_from_sprite_lists()
class GameWindow(arcade.Window):
""" Main Window """
def __init__(self, width, height, title):
""" Create the variables """
# Init the parent class
super().__init__(width, height, title)
# Player sprite
self.player_sprite: Optional[PlayerSprite] = None
# Sprite lists we need
self.player_list: Optional[arcade.SpriteList] = None
self.wall_list: Optional[arcade.SpriteList] = None
self.bullet_list: Optional[arcade.SpriteList] = None
self.item_list: Optional[arcade.SpriteList] = None
self.moving_sprites_list: Optional[arcade.SpriteList] = None
self.ladder_list: Optional[arcade.SpriteList] = None
# Track the current state of what key is pressed
self.left_pressed: bool = False
self.right_pressed: bool = False
self.up_pressed: bool = False
self.down_pressed: bool = False
# Physics engine
self.physics_engine = Optional[arcade.PymunkPhysicsEngine]
# Set background color
arcade.set_background_color(arcade.color.AMAZON)
def setup(self):
""" Set up everything with the game """
# Create the sprite lists
self.player_list = arcade.SpriteList()
self.bullet_list = arcade.SpriteList()
# Read in the tiled map
map_name = "pymunk_test_map.tmx"
my_map = arcade.tilemap.read_tmx(map_name)
# Read in the map layers
self.wall_list = arcade.tilemap.process_layer(my_map,
'Platforms',
SPRITE_SCALING_TILES,
hit_box_algorithm="Detailed")
self.item_list = arcade.tilemap.process_layer(my_map,
'Dynamic Items',
SPRITE_SCALING_TILES,
hit_box_algorithm="Detailed")
self.ladder_list = arcade.tilemap.process_layer(my_map,
'Ladders',
SPRITE_SCALING_TILES,
use_spatial_hash=True,
hit_box_algorithm="Detailed")
# Create player sprite
self.player_sprite = PlayerSprite(self.ladder_list, hit_box_algorithm="Detailed")
# Set player location
grid_x = 1
grid_y = 1
self.player_sprite.center_x = SPRITE_SIZE * grid_x + SPRITE_SIZE / 2
self.player_sprite.center_y = SPRITE_SIZE * grid_y + SPRITE_SIZE / 2
# Add to player sprite list
self.player_list.append(self.player_sprite)
# Moving Sprite
self.moving_sprites_list = arcade.tilemap.process_layer(my_map,
'Moving Platforms',
SPRITE_SCALING_TILES)
# --- Pymunk Physics Engine Setup ---
# The default damping for every object controls the percent of velocity
# the object will keep each second. A value of 1.0 is no speed loss,
# 0.9 is 10% per second, 0.1 is 90% per second.
# For top-down games, this is basically the friction for moving objects.
# For platformers with gravity, this should probably be set to 1.0.
# Default value is 1.0 if not specified.
damping = DEFAULT_DAMPING
# Set the gravity. (0, 0) is good for outer space and top-down.
gravity = (0, -GRAVITY)
# Create the physics engine
self.physics_engine = arcade.PymunkPhysicsEngine(damping=damping,
gravity=gravity)
def wall_hit_handler(bullet_sprite, _wall_sprite, _arbiter, _space, _data):
""" Called for bullet/wall collision """
bullet_sprite.remove_from_sprite_lists()
self.physics_engine.add_collision_handler("bullet", "wall", post_handler=wall_hit_handler)
def item_hit_handler(bullet_sprite, item_sprite, _arbiter, _space, _data):
""" Called for bullet/wall collision """
bullet_sprite.remove_from_sprite_lists()
item_sprite.remove_from_sprite_lists()
self.physics_engine.add_collision_handler("bullet", "item", post_handler=item_hit_handler)
# Add the player.
# For the player, we set the damping to a lower value, which increases
# the damping rate. This prevents the character from traveling too far
# after the player lets off the movement keys.
# Setting the moment to PymunkPhysicsEngine.MOMENT_INF prevents it from
# rotating.
# Friction normally goes between 0 (no friction) and 1.0 (high friction)
# Friction is between two objects in contact. It is important to remember
# in top-down games that friction moving along the 'floor' is controlled
# by damping.
self.physics_engine.add_sprite(self.player_sprite,
friction=PLAYER_FRICTION,
mass=PLAYER_MASS,
moment=arcade.PymunkPhysicsEngine.MOMENT_INF,
collision_type="player",
max_horizontal_velocity=PLAYER_MAX_HORIZONTAL_SPEED,
max_vertical_velocity=PLAYER_MAX_VERTICAL_SPEED)
# Create the walls.
# By setting the body type to PymunkPhysicsEngine.STATIC the walls can't
# move.
# Movable objects that respond to forces are PymunkPhysicsEngine.DYNAMIC
# PymunkPhysicsEngine.KINEMATIC objects will move, but are assumed to be
# repositioned by code and don't respond to physics forces.
# Dynamic is default.
self.physics_engine.add_sprite_list(self.wall_list,
friction=WALL_FRICTION,
collision_type="wall",
body_type=arcade.PymunkPhysicsEngine.STATIC)
# Create the items
self.physics_engine.add_sprite_list(self.item_list,
friction=DYNAMIC_ITEM_FRICTION,
collision_type="item")
# Add kinematic sprites
self.physics_engine.add_sprite_list(self.moving_sprites_list,
body_type=arcade.PymunkPhysicsEngine.KINEMATIC)
def on_key_press(self, key, modifiers):
"""Called whenever a key is pressed. """
if key == arcade.key.LEFT:
self.left_pressed = True
elif key == arcade.key.RIGHT:
self.right_pressed = True
elif key == arcade.key.UP:
self.up_pressed = True
# find out if player is standing on ground, and not on a ladder
if self.physics_engine.is_on_ground(self.player_sprite) \
and not self.player_sprite.is_on_ladder:
# She is! Go ahead and jump
impulse = (0, PLAYER_JUMP_IMPULSE)
self.physics_engine.apply_impulse(self.player_sprite, impulse)
elif key == arcade.key.DOWN:
self.down_pressed = True
def on_key_release(self, key, modifiers):
"""Called when the user releases a key. """
if key == arcade.key.LEFT:
self.left_pressed = False
elif key == arcade.key.RIGHT:
self.right_pressed = False
elif key == arcade.key.UP:
self.up_pressed = False
elif key == arcade.key.DOWN:
self.down_pressed = False
def on_mouse_press(self, x, y, button, modifiers):
""" Called whenever the mouse button is clicked. """
bullet = BulletSprite(20, 5, arcade.color.DARK_YELLOW)
self.bullet_list.append(bullet)
# Position the bullet at the player's current location
start_x = self.player_sprite.center_x
start_y = self.player_sprite.center_y
bullet.position = self.player_sprite.position
# Get from the mouse the destination location for the bullet
# IMPORTANT! If you have a scrolling screen, you will also need
# to add in self.view_bottom and self.view_left.
dest_x = x
dest_y = y
# Do math to calculate how to get the bullet to the destination.
# Calculation the angle in radians between the start points
# and end points. This is the angle the bullet will travel.
x_diff = dest_x - start_x
y_diff = dest_y - start_y
angle = math.atan2(y_diff, x_diff)
# What is the 1/2 size of this sprite, so we can figure out how far
# away to spawn the bullet
size = max(self.player_sprite.width, self.player_sprite.height) / 2
# Use angle to to spawn bullet away from player in proper direction
bullet.center_x += size * math.cos(angle)
bullet.center_y += size * math.sin(angle)
# Set angle of bullet
bullet.angle = math.degrees(angle)
# Gravity to use for the bullet
# If we don't use custom gravity, bullet drops too fast, or we have
# to make it go too fast.
# Force is in relation to bullet's angle.
bullet_gravity = (0, -BULLET_GRAVITY)
# Add the sprite. This needs to be done AFTER setting the fields above.
self.physics_engine.add_sprite(bullet,
mass=BULLET_MASS,
damping=1.0,
friction=0.6,
collision_type="bullet",
gravity=bullet_gravity,
elasticity=0.9)
# Add force to bullet
force = (BULLET_MOVE_FORCE, 0)
self.physics_engine.apply_force(bullet, force)
def on_update(self, delta_time):
""" Movement and game logic """
is_on_ground = self.physics_engine.is_on_ground(self.player_sprite)
# Update player forces based on keys pressed
if self.left_pressed and not self.right_pressed:
# Create a force to the left. Apply it.
if is_on_ground or self.player_sprite.is_on_ladder:
force = (-PLAYER_MOVE_FORCE_ON_GROUND, 0)
else:
force = (-PLAYER_MOVE_FORCE_IN_AIR, 0)
self.physics_engine.apply_force(self.player_sprite, force)
# Set friction to zero for the player while moving
self.physics_engine.set_friction(self.player_sprite, 0)
elif self.right_pressed and not self.left_pressed:
# Create a force to the right. Apply it.
if is_on_ground or self.player_sprite.is_on_ladder:
force = (PLAYER_MOVE_FORCE_ON_GROUND, 0)
else:
force = (PLAYER_MOVE_FORCE_IN_AIR, 0)
self.physics_engine.apply_force(self.player_sprite, force)
# Set friction to zero for the player while moving
self.physics_engine.set_friction(self.player_sprite, 0)
elif self.up_pressed and not self.down_pressed:
# Create a force to the right. Apply it.
if self.player_sprite.is_on_ladder:
force = (0, PLAYER_MOVE_FORCE_ON_GROUND)
self.physics_engine.apply_force(self.player_sprite, force)
# Set friction to zero for the player while moving
self.physics_engine.set_friction(self.player_sprite, 0)
elif self.down_pressed and not self.up_pressed:
# Create a force to the right. Apply it.
if self.player_sprite.is_on_ladder:
force = (0, -PLAYER_MOVE_FORCE_ON_GROUND)
self.physics_engine.apply_force(self.player_sprite, force)
# Set friction to zero for the player while moving
self.physics_engine.set_friction(self.player_sprite, 0)
else:
# Player's feet are not moving. Therefore up the friction so we stop.
self.physics_engine.set_friction(self.player_sprite, 1.0)
# Move items in the physics engine
self.physics_engine.step()
# For each moving sprite, see if we've reached a boundary and need to
# reverse course.
for moving_sprite in self.moving_sprites_list:
if moving_sprite.boundary_right and \
moving_sprite.change_x > 0 and \
moving_sprite.right > moving_sprite.boundary_right:
moving_sprite.change_x *= -1
elif moving_sprite.boundary_left and \
moving_sprite.change_x < 0 and \
moving_sprite.left > moving_sprite.boundary_left:
moving_sprite.change_x *= -1
if moving_sprite.boundary_top and \
moving_sprite.change_y > 0 and \
moving_sprite.top > moving_sprite.boundary_top:
moving_sprite.change_y *= -1
elif moving_sprite.boundary_bottom and \
moving_sprite.change_y < 0 and \
moving_sprite.bottom < moving_sprite.boundary_bottom:
moving_sprite.change_y *= -1
# Figure out and set our moving platform velocity.
# Pymunk uses velocity is in pixels per second. If we instead have
# pixels per frame, we need to convert.
velocity = (moving_sprite.change_x * 1 / delta_time, moving_sprite.change_y * 1 / delta_time)
self.physics_engine.set_velocity(moving_sprite, velocity)
def on_draw(self):
""" Draw everything """
arcade.start_render()
self.wall_list.draw()
self.ladder_list.draw()
self.moving_sprites_list.draw()
self.bullet_list.draw()
self.item_list.draw()
self.player_list.draw()
# for item in self.player_list:
# item.draw_hit_box(arcade.color.RED)
# for item in self.item_list:
# item.draw_hit_box(arcade.color.RED)
def main():
""" Main method """
window = GameWindow(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
window.setup()
arcade.run()
if __name__ == "__main__":
main()
| [
"jfincher42@gmail.com"
] | jfincher42@gmail.com |
3593906c7a346a04d30bc4f47ee8e891762d98c1 | 5e01b849530ceac9f62ef2fb85497792bbe4c15a | /Jan10/naver_top10.py | dd1cf2fe92f9dd24cc756974a264eb7c6063d04f | [] | no_license | cheesecat47/ML_DL_Jan2020 | 2206599c0eb20eebdd152d1e3b27e72ffa2c6900 | 15bffd8c9c19d9ff2871aa7afe95607f95e491fe | refs/heads/master | 2021-07-16T18:10:20.609018 | 2021-01-19T00:48:01 | 2021-01-19T00:48:01 | 232,076,415 | 0 | 0 | null | 2020-01-28T04:58:03 | 2020-01-06T10:23:19 | Python | UTF-8 | Python | false | false | 478 | py | if __name__ == "__main__":
import requests
from bs4 import BeautifulSoup
url = 'https://www.naver.com/'
keyword = 'span.ah_k'
source = requests.get(url).text
# print('source -> ', source)
soup = BeautifulSoup(source, 'html.parser')
hotkeys = soup.select(keyword)
# print('hotkeys -> ', hotkeys)
index = 0
for key in hotkeys:
index += 1
print(str(index) + ': ' + key.text)
if index >= 10:
break
| [
"cheesecat47@gmail.com"
] | cheesecat47@gmail.com |
89cfad90c8c829befcf8ed40a0e51857fee6b9fd | 794f225c248e84b29f03e5ae472bd995b8dd86a4 | /mossbauer/line_shape/02-15-Na4FeCN6-4.py | 5f3a05d48508b29c46b40f7c6fc791c0b557f4b9 | [] | no_license | yuyichao/jlab2s13 | 1113a537bf9f1d44ff96324f290a16abf265fb20 | 9b09c3af9f4d3311633996635ccf75f04a97c117 | refs/heads/master | 2023-02-21T18:08:12.739055 | 2013-09-14T03:52:57 | 2013-09-14T03:52:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31 | py | ../all_data/02-15-Na4FeCN6-4.py | [
"yyc1992@gmail.com"
] | yyc1992@gmail.com |
824b3247e9500207e27b7026be68e30c9b0945f5 | bbe7d6d59ef6d7364ff06377df9658367a19c425 | /minigame/ClerkPurchase.py | 11eb86228f3c0a79bcb7b11df4bc25c5c43d2f20 | [
"Apache-2.0"
] | permissive | DedMemez/ODS-August-2017 | 1b45c912ad52ba81419c1596644d8db2a879bd9b | 5d6214732e3245f63bfa250e3e9c881cc2dc28ad | refs/heads/master | 2021-01-22T18:37:51.626942 | 2017-08-19T02:04:51 | 2017-08-19T02:04:51 | 100,762,513 | 0 | 8 | null | null | null | null | UTF-8 | Python | false | false | 2,295 | py | # Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.minigame.ClerkPurchase
from PurchaseBase import *
from toontown.toonbase import ToontownTimer
COUNT_UP_RATE = 0.15
DELAY_BEFORE_COUNT_UP = 1.25
DELAY_AFTER_COUNT_UP = 1.75
COUNT_DOWN_RATE = 0.075
DELAY_AFTER_COUNT_DOWN = 0.0
DELAY_AFTER_CELEBRATE = 3.0
class ClerkPurchase(PurchaseBase):
activateMode = 'storePurchase'
def __init__(self, toon, remain, doneEvent):
PurchaseBase.__init__(self, toon, doneEvent)
self.remain = remain
def load(self):
purchaseModels = loader.loadModel('phase_4/models/gui/gag_shop_purchase_gui')
PurchaseBase.load(self, purchaseModels)
self.backToPlayground = DirectButton(parent=self.frame, relief=None, scale=1.04, pos=(0.71, 0, -0.045), image=(purchaseModels.find('**/PurchScrn_BTN_UP'), purchaseModels.find('**/PurchScrn_BTN_DN'), purchaseModels.find('**/PurchScrn_BTN_RLVR')), text=TTLocalizer.GagShopDoneShopping, text_fg=(0, 0.1, 0.7, 1), text_scale=0.05, text_pos=(0, 0.015, 0), command=self.__handleBackToPlayground)
self.timer = ToontownTimer.ToontownTimer()
self.timer.reparentTo(self.frame)
self.timer.posInTopRightCorner()
purchaseModels.removeNode()
return
def unload(self):
PurchaseBase.unload(self)
del self.backToPlayground
self.timer.destroy()
del self.timer
def __handleBackToPlayground(self):
self.toon.inventory.reparentTo(hidden)
self.toon.inventory.hide()
self.handleDone(0)
def __timerExpired(self):
self.handleDone(2)
def enterPurchase(self):
PurchaseBase.enterPurchase(self)
self.backToPlayground.reparentTo(self.toon.inventory.storePurchaseFrame)
self.pointDisplay.reparentTo(self.toon.inventory.storePurchaseFrame)
self.statusLabel.reparentTo(self.toon.inventory.storePurchaseFrame)
self.timer.countdown(self.remain, self.__timerExpired)
def exitPurchase(self):
PurchaseBase.exitPurchase(self)
self.backToPlayground.reparentTo(self.frame)
self.pointDisplay.reparentTo(self.frame)
self.statusLabel.reparentTo(self.frame)
self.ignore('purchaseStateChange') | [
"noreply@github.com"
] | DedMemez.noreply@github.com |
bf71278902d24a993bdc103a887f085dbdb8912a | 2729fff7cb053d2577985d38c8962043ee9f853d | /bokeh/models/scales.py | 0a037eca1337cf7ff71966fd9f2667aba5e93bf4 | [
"BSD-3-Clause"
] | permissive | modster/bokeh | 2c78c5051fa9cac48c8c2ae7345eafc54b426fbd | 60fce9003aaa618751c9b8a3133c95688073ea0b | refs/heads/master | 2020-03-29T01:13:35.740491 | 2018-09-18T06:08:59 | 2018-09-18T06:08:59 | 149,377,781 | 1 | 0 | BSD-3-Clause | 2018-09-19T02:02:49 | 2018-09-19T02:02:49 | null | UTF-8 | Python | false | false | 923 | py | '''
'''
from __future__ import absolute_import
from ..core.has_props import abstract
from .transforms import Transform
@abstract
class Scale(Transform):
''' Base class for ``Scale`` models that represent an invertible
computation to be carried out on the client-side.
JavaScript implementations should implement the following methods:
.. code-block: coffeescript
compute: (x) ->
# compute the transform of a single value
v_compute: (xs) ->
# compute the transform of an array of values
invert: (xprime) ->
# compute the inverse transform of a single value
v_invert: (xprimes) ->
# compute the inverse transform of an array of values
'''
pass
class LinearScale(Scale):
'''
'''
pass
class LogScale(Scale):
'''
'''
pass
class CategoricalScale(LinearScale):
'''
'''
pass
| [
"noreply@github.com"
] | modster.noreply@github.com |
25c6321f32ecd55981098fa2638aa02c4977a194 | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-5/ae37519b7fd0b4361acf63a040329b1ef9200f17-<get_symbol>-bug.py | ecd07ea2a9e15c3c4a911abc5f699d4508e5a9c2 | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,935 | py | def get_symbol(num_classes=1000, **kwargs):
data = mx.symbol.Variable(name='data')
conv1a_3_3 = ConvFactory(data=data, num_filter=32, kernel=(3, 3), stride=(2, 2))
conv2a_3_3 = ConvFactory(conv1a_3_3, 32, (3, 3))
conv2b_3_3 = ConvFactory(conv2a_3_3, 64, (3, 3), pad=(1, 1))
maxpool3a_3_3 = mx.symbol.Pooling(data=conv2b_3_3, kernel=(3, 3), stride=(2, 2), pool_type='max')
conv3b_1_1 = ConvFactory(maxpool3a_3_3, 80, (1, 1))
conv4a_3_3 = ConvFactory(conv3b_1_1, 192, (3, 3))
maxpool5a_3_3 = mx.symbol.Pooling(data=conv4a_3_3, kernel=(3, 3), stride=(2, 2), pool_type='max')
tower_conv = ConvFactory(maxpool5a_3_3, 96, (1, 1))
tower_conv1_0 = ConvFactory(maxpool5a_3_3, 48, (1, 1))
tower_conv1_1 = ConvFactory(tower_conv1_0, 64, (5, 5), pad=(2, 2))
tower_conv2_0 = ConvFactory(maxpool5a_3_3, 64, (1, 1))
tower_conv2_1 = ConvFactory(tower_conv2_0, 96, (3, 3), pad=(1, 1))
tower_conv2_2 = ConvFactory(tower_conv2_1, 96, (3, 3), pad=(1, 1))
tower_pool3_0 = mx.symbol.Pooling(data=maxpool5a_3_3, kernel=(3, 3), stride=(1, 1), pad=(1, 1), pool_type='avg')
tower_conv3_1 = ConvFactory(tower_pool3_0, 64, (1, 1))
tower_5b_out = mx.symbol.Concat(*[tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1])
net = repeat(tower_5b_out, 10, block35, scale=0.17, input_num_channels=320)
tower_conv = ConvFactory(net, 384, (3, 3), stride=(2, 2))
tower_conv1_0 = ConvFactory(net, 256, (1, 1))
tower_conv1_1 = ConvFactory(tower_conv1_0, 256, (3, 3), pad=(1, 1))
tower_conv1_2 = ConvFactory(tower_conv1_1, 384, (3, 3), stride=(2, 2))
tower_pool = mx.symbol.Pooling(net, kernel=(3, 3), stride=(2, 2), pool_type='max')
net = mx.symbol.Concat(*[tower_conv, tower_conv1_2, tower_pool])
net = repeat(net, 20, block17, scale=0.1, input_num_channels=1088)
tower_conv = ConvFactory(net, 256, (1, 1))
tower_conv0_1 = ConvFactory(tower_conv, 384, (3, 3), stride=(2, 2))
tower_conv1 = ConvFactory(net, 256, (1, 1))
tower_conv1_1 = ConvFactory(tower_conv1, 288, (3, 3), stride=(2, 2))
tower_conv2 = ConvFactory(net, 256, (1, 1))
tower_conv2_1 = ConvFactory(tower_conv2, 288, (3, 3), pad=(1, 1))
tower_conv2_2 = ConvFactory(tower_conv2_1, 320, (3, 3), stride=(2, 2))
tower_pool = mx.symbol.Pooling(net, kernel=(3, 3), stride=(2, 2), pool_type='max')
net = mx.symbol.Concat(*[tower_conv0_1, tower_conv1_1, tower_conv2_2, tower_pool])
net = repeat(net, 9, block8, scale=0.2, input_num_channels=2080)
net = block8(net, with_act=False, input_num_channel=2080)
net = ConvFactory(net, 1536, (1, 1))
net = mx.symbol.Pooling(net, kernel=(1, 1), global_pool=True, stride=(2, 2), pool_type='avg')
net = mx.symbol.Flatten(net)
net = mx.symbol.Dropout(data=net, p=0.2)
net = mx.symbol.FullyConnected(data=net, num_hidden=num_classes)
softmax = mx.symbol.SoftmaxOutput(data=net, name='softmax')
return softmax | [
"dg1732004@smail.nju.edu.cn"
] | dg1732004@smail.nju.edu.cn |
c966e69945ab0e2351d0e63be5dcf607c2581dbe | 3b93fc92d4d95dab66438ebf221f6cb4745aac5f | /src/content/serializers.py | 5d6310607bff4b88a555a6a08f16a50ae05b552f | [
"MIT"
] | permissive | vollov/django-restful | f20fb1236e2c4d16c62f7f7a6318e842dac0b6ce | ee796ded68470fd1609a9313fbf21e89481bccce | refs/heads/master | 2021-01-10T01:32:42.643182 | 2016-02-23T15:18:44 | 2016-02-23T15:18:44 | 46,464,534 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 239 | py | from django.contrib.auth.models import User, Group
from rest_framework import serializers
class PageSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Page
fields = ('url', 'title', 'created_at')
| [
"dike.zhang@gmail.com"
] | dike.zhang@gmail.com |
e29f67bd34420ecb4d41e6b4df9f51e438faa20f | ca17bd80ac1d02c711423ac4093330172002a513 | /goodyhandy/SubsetII.py | a0dcac809e84135cf1f348571b84dae783ec73ff | [] | no_license | Omega094/lc_practice | 64046dea8bbdaee99d767b70002a2b5b56313112 | e61776bcfd5d93c663b247d71e00f1b298683714 | refs/heads/master | 2020-03-12T13:45:13.988645 | 2018-04-23T06:28:32 | 2018-04-23T06:28:32 | 130,649,699 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 751 | py | class Solution(object):
def subsetHelper(self, currentLst, remainLst, k,solution):
if len(currentLst) == k:
solution.append(currentLst)
return
prev = None
for i in xrange(0, len(remainLst)):
if remainLst[i] == prev: continue
prev = remainLst[i]
self.subsetHelper(currentLst + [remainLst[i]], remainLst[i+1:], k , solution)
return
def subsetsWithDup(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
nums.sort()
solution = [[]]
k = len(nums)
for i in xrange(1,k + 1):
self.subsetHelper([], nums, i, solution)
return solution
| [
"zhao_j1@denison.edu"
] | zhao_j1@denison.edu |
10460c5c619b4240ff0b99f145ead047cb018d65 | 4c601eaa346e660c296e270cc2d79aea9a3721fe | /homeassistant/components/nexia/entity.py | 33962bb11c09d49123998860af8903052ae16907 | [
"Apache-2.0"
] | permissive | basnijholt/home-assistant | f55110af9ff602274c0a929c7298ef97a0ef282f | ba55b4b8338a2dc0ba3f1d750efea49d86571291 | refs/heads/dev | 2023-01-21T11:53:52.621353 | 2020-08-08T15:03:06 | 2020-08-08T15:03:06 | 220,313,680 | 5 | 1 | Apache-2.0 | 2023-01-13T06:04:49 | 2019-11-07T19:29:54 | Python | UTF-8 | Python | false | false | 3,620 | py | """The nexia integration base entity."""
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import (
ATTRIBUTION,
DOMAIN,
MANUFACTURER,
SIGNAL_THERMOSTAT_UPDATE,
SIGNAL_ZONE_UPDATE,
)
class NexiaEntity(Entity):
"""Base class for nexia entities."""
def __init__(self, coordinator, name, unique_id):
"""Initialize the entity."""
super().__init__()
self._unique_id = unique_id
self._name = name
self._coordinator = coordinator
@property
def available(self):
"""Return True if entity is available."""
return self._coordinator.last_update_success
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def name(self):
"""Return the name."""
return self._name
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
}
@property
def should_poll(self):
"""Return False, updates are controlled via coordinator."""
return False
async def async_added_to_hass(self):
"""Subscribe to updates."""
self.async_on_remove(
self._coordinator.async_add_listener(self.async_write_ha_state)
)
class NexiaThermostatEntity(NexiaEntity):
"""Base class for nexia devices attached to a thermostat."""
def __init__(self, coordinator, thermostat, name, unique_id):
"""Initialize the entity."""
super().__init__(coordinator, name, unique_id)
self._thermostat = thermostat
@property
def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self._thermostat.thermostat_id)},
"name": self._thermostat.get_name(),
"model": self._thermostat.get_model(),
"sw_version": self._thermostat.get_firmware(),
"manufacturer": MANUFACTURER,
}
async def async_added_to_hass(self):
"""Listen for signals for services."""
await super().async_added_to_hass()
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SIGNAL_THERMOSTAT_UPDATE}-{self._thermostat.thermostat_id}",
self.async_write_ha_state,
)
)
class NexiaThermostatZoneEntity(NexiaThermostatEntity):
"""Base class for nexia devices attached to a thermostat."""
def __init__(self, coordinator, zone, name, unique_id):
"""Initialize the entity."""
super().__init__(coordinator, zone.thermostat, name, unique_id)
self._zone = zone
@property
def device_info(self):
"""Return the device_info of the device."""
data = super().device_info
data.update(
{
"identifiers": {(DOMAIN, self._zone.zone_id)},
"name": self._zone.get_name(),
"via_device": (DOMAIN, self._zone.thermostat.thermostat_id),
}
)
return data
async def async_added_to_hass(self):
"""Listen for signals for services."""
await super().async_added_to_hass()
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SIGNAL_ZONE_UPDATE}-{self._zone.zone_id}",
self.async_write_ha_state,
)
)
| [
"noreply@github.com"
] | basnijholt.noreply@github.com |
da425728c015d049bacf282add75708ba0065c04 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02552/s287816980.py | bb5d69ee25ca2f4ab1965855322a65093f5f88bf | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 39 | py | x = int(input())
print(0 if x else 1)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
5ce9615a4c88d5518f60ae94f94389d009de2e7d | ad6f20ca36dc65e34b43c69db66f383554718fed | /matrix/MaximumSizeSquareSubMatrixWithAll1s.py | e71c6b396ccd19a920cd3ae3144e2eadfef0eb5e | [] | no_license | atulanandnitt/questionsBank | 3df734c7389959801ab6447c0959c85f1013dfb8 | 477accc02366b5c4507e14d2d54850a56947c91b | refs/heads/master | 2021-06-11T21:39:24.682159 | 2021-05-06T17:54:18 | 2021-05-06T17:54:18 | 175,861,522 | 0 | 1 | null | 2020-05-02T09:26:25 | 2019-03-15T17:10:06 | Python | UTF-8 | Python | false | false | 983 | py | # -*- coding: utf-8 -*-
"""
Created on Fri Oct 26 19:29:19 2018
@author: Atul Anand
"""
def MaximumSizeSquareSubMatrixWithAll1s(mat):
print(mat)
solMat=[[0 for j in range(len(mat[0]))] for i in range(len(mat))]
print(len(mat), len(mat[0]))
print(len(solMat), len(solMat[0]))
for i in range(len(mat)):
for j in range(len(mat[i])):
print(solMat, i, j)
if i ==0 or j ==0 or mat[i][j] ==0 :
solMat[i][j] = mat[i][j]
continue
elif mat[i][j] != 0:
solMat[i][j] = max(mat[i-1][j-1], mat[i-1][j], mat[i][j-1]) + 1
print(solMat)
mat=[ [0 ,1, 1,0,1],
[1,1,0,1,0],
[0,1,1,1,0],
[1,1,1,1,0],
[1,1,1,0,1],
[0,0,0,0,0]]
solMat=[[0 for j in range(len(mat))] for i in range(len(mat[0]))]
print(solMat)
MaximumSizeSquareSubMatrixWithAll1s(mat) | [
"atul.anand.nitt@gmail.com"
] | atul.anand.nitt@gmail.com |
a7cd0fc7c724c7705a289d6cdd963479b3160bdf | 358519772669c73092f625f630722c38e1d33783 | /DatabaseTopology/Force/AbstractAngle.py | c9c4fb2ea68e3a0faa2f0df8796e1238f9370815 | [] | no_license | minghao2016/mmtools | e7e61aca084498408ceae965dd6c9450ad89eafa | 3ade988afb51cd54ee5a4067d8deaad88afbb0fe | refs/heads/master | 2021-09-21T01:02:22.522187 | 2014-09-19T03:40:03 | 2014-09-19T03:40:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 637 | py | class AbstractAngle(object):
def __init__(self, atom1, atom2, atom3):
self.atom1 = atom1
self.atom2 = atom2
self.atom3 = atom3
def __eq__(self, object):
if ((self.atom1 == object.atom1
and self.atom2 == object.atom2
and self.atom3 == object.atom3)
or
(self.atom1 == object.atom3
and self.atom2 == object.atom2
and self.atom3 == object.atom1)):
return True
else:
return False
def __hash__(self):
return hash(tuple([self.atom1, self.atom2, self.atom3]))
| [
"choderaj@mskcc.org"
] | choderaj@mskcc.org |
314741efab3731b5875ac8b4f6eb9a3c6dfd630c | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5765824346324992_0/Python/xldrx/b_small.py | 602f0976eae08d0b2671638f6d4a89b2f1cbb49a | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,398 | py | #! /usr/bin/env python -u
# coding=utf-8
import sys
__author__ = 'xl'
def find_n(m_time):
global M
ans = 0
for m in M:
ans += m_time / m
return ans
def find_next_time(m_time):
global M
ans = 1e6
for i, m in enumerate(M):
ans = min((m - m_time % m) % m, ans)
return ans + m_time
if __name__ == "__main__":
fp = open("B.in")
sys.stdout = open("B.out", "w")
# fp = sys.stdin
T = int(fp.readline())
for t in range(T):
global M
B, N = map(int, fp.readline().split())
M = map(int, fp.readline().split())
min_time = 0
max_time = max(M) * N
p_time = -1
time = -2
while p_time != time:
p_time = time
time = (max_time + min_time) / 2
n = find_n(time)
if n == N - 1:
break
elif n > N - 1:
max_time = time
else:
min_time = time
ans = -1
index = N - n - 1
while ans < 0:
next_t = find_next_time(time)
for i, m in enumerate(M):
if next_t % m == 0:
if index == 0:
ans = i
break
else:
index -= 1
time = next_t + 1
print "Case #%s: %s" % (t + 1, ans + 1) | [
"eewestman@gmail.com"
] | eewestman@gmail.com |
2f2653ba38a9974a5cb6175428c4f23170ab8e86 | c2ee9d6d84e2270ba4c9d6062460a2be0ff5f19c | /205. Isomorphic Strings.py | 771d7e09dcbe8419877d10b7e415c0b9bf96c449 | [] | no_license | Peiyu-Rang/LeetCode | 0dd915638e8c41c560952d86b4047c85b599d630 | f79886ed3022664c3291e4e78129bd8d855cf929 | refs/heads/master | 2021-11-27T23:48:39.946840 | 2021-11-09T12:47:48 | 2021-11-09T12:47:48 | 157,296,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 695 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Aug 16 17:43:45 2020
@author: Caven
"""
class Solution:
def isIsomorphic(self, s: str, t: str) -> bool:
seen = {}
for ss, tt in zip(s,t):
if ss in seen:
if seen[ss] == tt:
continue
else:
return False
else:
seen[ss] = tt
seen = {}
for ss, tt in zip(t,s):
if ss in seen:
if seen[ss] == tt:
continue
else:
return False
else:
seen[ss] = tt
return True | [
"prang3@gatech.edu"
] | prang3@gatech.edu |
7f429adacf2c1c7bbf71a839e4fb77302477a243 | aa265e03e73f718d4008cfe30ada7ee32c852eec | /ABC_C/ABC195_C.py | 2c195025d72f69ce4a0f715bd0d10c3639be8ffd | [
"MIT"
] | permissive | ryosuke0825/atcoder_python | 4fb9de9733cd9ef41c2ad9ad38b3f190f49d3ad5 | 52d037d0bc9ef2c721bf2958c1c2ead558cb0cf5 | refs/heads/master | 2023-03-11T22:47:56.963089 | 2023-03-05T01:21:06 | 2023-03-05T01:21:06 | 181,768,029 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 116 | py | N = int(input())
ans = 0
for i in range(3, 16, 3):
A = 10**i
if N >= A:
ans += N-(A-1)
print(ans)
| [
"ayakobon@gmail.com"
] | ayakobon@gmail.com |
32d2f686c52afd000a755700d21c23ebcfedfdd7 | d8cf93900e6d86240ceb7643fd78bd2841b38152 | /test/str_mainpulation_test/test_Unicode_bytes_bytearray.py | 874ebae0cd1e269f91aee5e9a376b059d0e359db | [] | no_license | Onebigbera/Daily_Practice | 165cee0ee7883b90bcf126b23ff993fed0ceffef | 8f1018a9c1e17c958bce91cbecae88b0bb3c946b | refs/heads/master | 2020-04-09T01:20:48.857114 | 2019-01-03T03:24:59 | 2019-01-03T03:24:59 | 159,900,636 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,129 | py | # -*-coding:utf-8 -*-
# File :test_Unicode_bytes_bytearray.py
# Author:George
# Date : 2019/1/3
# motto: Someone always give up while someone always try!
# Python字符串使用Unicode编码来表示文本,大致而言,每个Unicode字符都用一个码点(code point)表示,而
# 码点是Unicode标准给每个字符指定的数字。这让你能够以任何现
# 代软件都能识别的方式表示129个文字系统中的12万个以上的字
# 符。当然,鉴于计算机键盘不可能包含几十万个键,因此有一种指
# 定Unicode字符的通用机制:使用16或32位的十六进制字面量(分
# 别加上前缀\u或\U)或者使用字符的Unicode名称(\N{name})。
# 详情参考 http://unicode-table.com
"""
教材上演示代码
"""
import math
print("\u00C6")
# print("\U0001F60A")
cat = "This is a cat:\N{cat}"
print(cat)
print("\U0001F60A")
"""
使用 ASCLL 、UTF-8、UTF-32编码将字符串转换为bytes
"""
"""
为了实现多文字符号的实现和内存的浪费,Python中使用可变长度编码来编码字符即对于不同的字符,使用不同数量的字节进行编码。这种
编码方式主要出自计算机先锋Kenneth Thompson之手。通过使用这
种编码,可节省占用的空间,就像摩尔斯码使用较少的点和短线表
示常见的字母,从而减少工作量一样 。具体地说,进行单字节编
码时,依然使用ASCII编码,以便与较旧的系统兼容;但对于不在
这个范围内的字符,使用多个字节(最多为6个)进行编码。下面
来使用ASCII、UTF-8和UTF-32编码将字符串转换为bytes。
"""
str = "Hello, world!"
print(str.encode("ASCII"))
print(str.encode("UTF-8"))
print(str.encode("UTF-32"))
# 比较相同字符串经过编码方式编码后的长度对比
str = "How long is this?"
print(len(str.encode("UTF-8"))) # 17
print(len(str.encode("UTF-32"))) # 72
"""
可不使用方法encode和decode,而直接创建bytes和str(即字符
串)对象,如下所示:
"""
# string = bytes(("Hællå, wørld!", encoding='utf-8')
# string = str(b'H\xc3\xa6ll\xc3\xa5, w\xc3\xb8rld!', encoding="utf-8")
"""
Python还提供了bytearray,它是bytes的可变版。从某种
意义上说,它就像是可修改的字符串——常规字符串是不能修改
的。然而,bytearray其实是为在幕后使用而设计的,因此作为类
字符串使用时对用户并不友好。例如,要替换其中的字符,必须将
其指定为0~255的值。因此,要插入字符,必须使用ord获取其序
数值(ordinal value)
"""
x = bytearray(b"Hello!")
x[1] = ord(b"u")
print(x)
print(abs(-43))
print(float(454.34))
"""
四舍五入
int() 获取数字的整数部分
math.floor() 获取数字的整数部分(不大于该数的整数)
math.ceil() 获取不小于该数的整数
round() 四舍五入 当小数位5时 取偶数
"""
print(int(3.1))
print(int(3.9))
print(math.floor(3.1))
print(math.floor(3.9))
print(math.ceil(3.1))
print(math.ceil(3.9))
print(round(3.1))
print(round(3.5))
print(round(3.9)) | [
"2578288992@qq.com"
] | 2578288992@qq.com |
f897cb71f4618b8576015ce323dfbf1a9d1943b8 | 14252ea933a08056363230c6df89223b996a0da2 | /app/users/models.py | 4f39bdd9a656c75d57734bd0ce1fd9a66d26467e | [
"MIT"
] | permissive | S3Infosoft/mvr-insights | eeb02aa2e6767e6a23818d4e09f7be7ce29f80cb | ac73feff03c1592d5efd8e0b82f72dd4dbd3e921 | refs/heads/master | 2020-05-29T14:08:11.070784 | 2020-04-23T19:46:57 | 2020-04-23T19:46:57 | 189,184,619 | 0 | 1 | MIT | 2020-04-23T19:46:58 | 2019-05-29T08:35:56 | CSS | UTF-8 | Python | false | false | 6,123 | py | from django.db import models
from django.urls import reverse
from django.contrib.auth import base_user, models as auth_models
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from django.core.files.base import ContentFile
from django.db.models.signals import post_save
from PIL import Image
class CustomUserManager(base_user.BaseUserManager):
"""
CustomUser manager for CustomUser for authentication using email and
password
"""
def create_user(self, email, password, **extra_fields):
"""
Create a user with given email and password
"""
if email:
user = self.model(email=email, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
raise ValueError(_("Email must entered to create a user"))
def create_superuser(self, email, password, **extra_fields):
"""
Create a superuser with given email, password and other credentials
"""
extra_fields.setdefault("is_active", True)
extra_fields.setdefault("is_staff", True)
extra_fields.setdefault("is_superuser", True)
if not extra_fields.get("is_staff"):
raise ValueError(_("Superuser must have is_staff=True"))
if not extra_fields.get("is_superuser"):
raise ValueError(_("Superuser must have is_superuser=True"))
return self.create_user(email, password, **extra_fields)
def save_image(instance, filename):
user_id = instance.id
extension = filename.rsplit(".", 1)[-1]
timestamp = str(now().date())
filename = f"{timestamp}.{extension}"
return "/".join(("profile", str(user_id), filename))
def save_thumb(instance, filename):
user_id = instance.id
timestamp = str(now().date())
extension = filename.rsplit(".", 1)[-1]
filename = f"{timestamp}.{extension}"
return "/".join(("profile", str(user_id), "thumb", filename))
class CustomUser(auth_models.AbstractUser):
"""
CustomUser model with email and password for authentication
"""
username = None
email = models.EmailField(_("email address"), unique=True)
image = models.ImageField(upload_to=save_image, blank=True, null=True)
image_thumb = models.ImageField(upload_to=save_thumb,
blank=True,
null=True)
objects = CustomUserManager()
USERNAME_FIELD = "email"
REQUIRED_FIELDS = []
def __str__(self):
return self.email
def __init__(self, *args, **kwargs):
super(CustomUser, self).__init__(*args, **kwargs)
# Store the current image to check for a change while updating image
self._curr_image = self.image
@staticmethod
def get_absolute_url():
return reverse("profile")
def save(self, *args, **kwargs):
created = self._state.adding # created or updated
image_updated = False
if not created:
# Store the new image
image = self.image
if image and image.name != self._curr_image.name:
image_updated = True
# Deleting the previous image and its thumnail
self._curr_image.delete(False)
self.image_thumb.delete(False)
# Assigning the image field with the new image
self.image = image
image_name = image.name.rsplit("/", 1)[-1]
# Create a new image for thumbnail
thumb_image = ContentFile(image.read())
# Save the thumbnail but do not commit to the database
self.image_thumb.save(image_name, thumb_image, False)
# Save the model
super(CustomUser, self).save(*args, **kwargs)
if image_updated:
# Get the thumbnail image from its path to resize it
thumb_image = Image.open(self.image.path)
if thumb_image.height > 140 or thumb_image.height > 140:
output_size = (140, 140)
thumb_image.thumbnail(output_size)
# Save the resized image to its path
thumb_image.save(self.image_thumb.path)
def delete(self, *args, **kwargs):
# Delete the user image or anything after object is deleted
if self.image:
self.image.delete(False)
self.image_thumb.delete(False)
super(CustomUser, self).delete(*args, **kwargs)
class GlobalInfo(models.Model):
"""Model to store extra user information accecible by everyone"""
logo = models.ImageField(upload_to="logo/", blank=True, null=True)
address = models.CharField(max_length=350, blank=True, null=True)
def __init__(self, *args, **kwargs):
super(GlobalInfo, self).__init__(*args, **kwargs)
self._current_logo = self.logo
def save(self, *args, **kwargs):
"""
- Overriding save to enforce only single instance of the model
- Delete the previous image files on update
"""
if self.__class__.objects.count():
self.pk = self.__class__.objects.first().pk
created = self._state.adding # Whether object created or updated
logo_updated = False
if not created:
logo = self.logo
if logo and self._current_logo.name != logo.name:
self._current_logo.delete(False)
self.logo = logo
logo_updated = True
super(GlobalInfo, self).save(*args, **kwargs)
if logo_updated:
logo = Image.open(self.logo.path)
if logo.width > 300 or logo.height > 300:
output_size = (300, 300)
logo.thumbnail(output_size)
logo.save(self.logo.path)
@staticmethod
def get_absolute_url():
return reverse("global_settings")
def create_global_info(sender, instance, created, *args, **kwargs):
if created:
GlobalInfo.objects.get_or_create()
post_save.connect(create_global_info, sender=CustomUser)
| [
"abhie.lp@gmail.com"
] | abhie.lp@gmail.com |
67bf7155188ac9ad6196f709cc8f1055cfa2148c | 4cbc8b81d197bc392d1b57856254300331b9738f | /python/tutorial_env/bin/virtualenv | 73d447dc889101d6aedde9c0a52c9b20a8ff657c | [
"MIT"
] | permissive | vcatafesta/chili | 87b9606f17cda645ba44cbf2bb4cc4637e18d211 | 5c734ac88454db76eb2f4e92c13364a5bbc7a93a | refs/heads/main | 2023-09-01T01:39:09.457448 | 2023-08-29T21:23:28 | 2023-08-29T21:23:28 | 171,972,556 | 2 | 2 | null | 2019-02-22T01:38:49 | 2019-02-22T01:26:46 | null | UTF-8 | Python | false | false | 267 | #!/github/chili/python/tutorial_env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from virtualenv.__main__ import run_with_catch
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(run_with_catch())
| [
"vcatafesta@gmail.com"
] | vcatafesta@gmail.com | |
b35fca13f4a26f547668416fc0650b4da2010002 | 07e6fc323f657d1fbfc24f861a278ab57338b80a | /python/pySimE/space/exp/OrbitalTransferOpt/OrbOpt_Cos/OrbitalOptCos_profile.py | 9e3b7306b0af6b7d12671d76c2f1f9c87fc2bfa2 | [
"MIT"
] | permissive | ProkopHapala/SimpleSimulationEngine | 99cf2532501698ee8a03b2e40d1e4bedd9a12609 | 47543f24f106419697e82771289172d7773c7810 | refs/heads/master | 2022-09-05T01:02:42.820199 | 2022-08-28T10:22:41 | 2022-08-28T10:22:41 | 40,007,027 | 35 | 4 | null | null | null | null | UTF-8 | Python | false | false | 6,048 | py | #!/usr/bin/env python
from pylab import *
from basiset import *
from Simplex_optimization import Simplex
from Random_optimization import MCBias_Run,MCBias2_Run
nnodes = 8
nsamp = 64
Gen = [0.0]*2*nnodes
def R2omega(R):
return sqrt(1.0/R**3)
T = 2.0; R0 = 1.0; R1 = 0.2;
v0=R2omega(sqrt(1.0/R0**3)); v1=sqrt(1.0/R1**3);
ph = 0.4*T*sqrt(v0**2+v1**2)
P0=array( [ R0 , 0 ] ); V0=array( [ 1.0, v0 ] );
P1=array( [ R1 , ph ] ); V1=array( [ 0, v1 ] );
Bs = evalDiCosBasisset( nnodes, nsamp, T)
scalesR = 1.0/array(range(1,nnodes+1))**2
scalesO = 2.0/array(range(1,nnodes+1))**1.5
#scalesR = 1.0/array(range(1,nnodes+1))
#scalesO = 2.0/array(range(1,nnodes+1))
ts = arange(0,1.000001,1.0/nsamp )
timescales = b = matrix([1.0, 1.0/T, 1.0/T**2] ).transpose();
Rt0 = array(multiply( timescales, evalPoly4( ts, poly4coefs_x0x1v0v1(P0[0], P1[0], V0[0]*T, V1[0]*T) ) ) )
Ot0 = array(multiply( timescales, evalPoly4( ts, poly4coefs_x0x1v0v1(P0[1], P1[1], V0[1]*T, V1[1]*T) ) ) )
ts *= T
nEvaluations = 0
maxThrust = 2.0
def fitnesFunc( Fs ):
global nEvaluations;
nEvaluations +=1
fsum = 0
tsum = 0
#print "len(Fs) ", len(Fs[4])," len(ts) ", len(ts)
for i in range(len(Fs[4])-1):
dt=(ts[i+1]-ts[i])
df=0.5*(Fs[4][i+1]+Fs[4][i])
fsum+=df*dt
tsum+=dt
#df_over = df-maxThrust
#if(df_over>0):
# fsum+= (df_over**2) * dt # penalty for overloading engine
return -sqrt(fsum/tsum)
#return -T* sqrt((Fs[4]**2).sum()) /len(ts)
def evalFitness( Gen ):
global Os,Rs,Fs
cR = Gen[nnodes:] * scalesR
cO = Gen[:nnodes] * scalesO
Os,Rs,Fs = evalTrajectoryPolar( Rt0, Ot0, Bs, cR, cO )
#print " evalFitness shape Os,Rs,Fs", shape(Rs),shape(Rs), shape(Fs)
fitness = fitnesFunc(Fs)
return -fitness
def plotTrj( Os,Rs,Fs, i, clr="k" ):
print shape(ts), shape(Rs), shape(Rs), shape(Fs)
subplot(2,5,1, polar=True); plot( Os[0], Rs[0], '-'+clr); title(" Trajectory ");
subplot(2,5,2); plot( ts, Rs[0],'-'+clr ); plot( ts, Os[0], '--'+clr ); grid(); title(" Position ");
subplot(2,5,3); plot( ts, Rs[1],'-'+clr ); plot( ts, Os[1], '--'+clr ); grid(); title(" Velocity ");
subplot(2,5,5+i);
plot( ts, Rs[2],'r--' ); plot( ts, Os[2], 'b--' );
plot( ts, Fs[1],'r-' ); plot( ts, Fs[0], 'b-' );
plot( ts, Fs[2],'g-'); # G
plot( ts, Fs[3],'m-'); # FTR
plot( ts, sqrt(Fs[4]),'k.-' ); # FT
title(" acclereations ");
grid()
def map2D( X, U1, U2, f1, f2, n1, n2 ):
#print " X: ",X
M = zeros((2*n1+1,2*n1+1))
for i in range(-n1,n1+1):
d1 = array(U1)*(i*f1/n1)
for j in range(-n2,n2+1):
d2 = array(U2)*(j*f2/n2)
M[i+n1,j+n2] = evalFitness( array(X)+d1 +d2 )
return M
def plotMaps(irow,nrow, Gen):
for i in range(nnodes):
U1 = zeros(2*nnodes); U1[i ]=1.0
U2 = zeros(2*nnodes); U2[i+nnodes]=1.0
print " maping node",i," U1: ",U1," U2: ", U2
subplot(nrow, nnodes, nnodes*irow+i+1 )
mapa = map2D( Gen, U1, U2, 0.1, 0.1, 3, 3 )
imshow(mapa, interpolation='bicubic', cmap='jet'); colorbar( )
CS = contour(mapa, colors="g"); clabel(CS, inline=0.5, fontsize=8)
def TryNew( GenBest, fitnessBest, stepSize ):
hit = False
GenNew = GenBest[:] + (rand(nnodes*2)[:]-0.5)*stepSize
ts,Os,Rs,Fs = evalGen ( ti, GenNew )
fitnessNew = fitnesFunc(Fs)
#fitnessNew = evalFitness( GenNew )
if(fitnessNew > fitnessBest ):
hit = True
GenBest = GenNew
fitnessBest = fitnessNew
#print " Better is ",GenBest," fitness = ",fitnessBest,
#print " fitness: ",fitnessBest, " stepSize: ", stepSize
subplot(2,5,5); plot( ts, Fs[4], '-', lw=0.25 ); grid()
return GenBest, fitnessBest,hit
def Simplex_Run(Gen,steps, GenHistory):
print
print " ========= Simplex Optimization ================= "
Simp = Simplex(evalFitness, Gen, steps )
#values, err, niter = SimplexOpt.minimize()
old_low = 10000000000
lastImprovement = 0
for i in range(0, 10000):
converged, err,low,hi = Simp.simplexStep( 0.0001 )
if converged:
print " converged in ",i," steps "
break;
if(low < old_low):
lastImprovement = i
old_low = low
subplot(2,5,5); plot( ts, Fs[4], '-', lw=0.25 ); grid()
GenHistory.append(list(Simp.simplex[Simp.lowest]))
print " new_low : ", low, " iter: ", i, " err ", err
if(i-lastImprovement)>(nnodes*16):
print " Not able to improve => Exiting .... "
break;
print Simp.simplex[Simp.lowest]
return Simp.simplex[Simp.lowest]
# ================ MAIN PROGRAM BODY =========================
figure(num=None, figsize=(20, 10))
GenHistory = []
print " Initial Gen : ", Gen
evalFitness( Gen )
Gen0 = array(Gen).copy()
Opt = True
if Opt:
nEvaluations=0
Gen = MCBias2_Run( evalFitness, Gen, 0.5, 0.01, 4*4*nnodes, 2*nnodes, 2000, GenHistory, wStep=0.5, fBias = 3.0, kBias0=0.8, kBiasf = 0.97 ) # good
GenRnd = array(Gen).copy()
print "===== nEvaluations : ", nEvaluations
steps = ones(nnodes*2)*0.05
nEvaluations=0
Gen = Simplex_Run(Gen,steps, GenHistory)
GenSimp = array(Gen).copy()
print "===== nEvaluations : ", nEvaluations
'''
if len(GenHistory)>2:
GenHistory = transpose(array(GenHistory ))
subplot(2,5,10);
for i in range(nnodes):
plot( GenHistory[i ]-Gen0[i ], 'r-' );
plot( GenHistory[i+nnodes]-Gen0[i+nnodes], 'b-' );
#legend( bbox_to_anchor=(0.5, 1.00, 1., 0.000) )
if Opt:
print " ===== Random Fittness ", evalFitness( GenRnd )
plotTrj( Os,Rs,Fs, 2, "g" )
subplot(2,5,5); plot( ts, Fs[4], 'g-', lw=2 ); grid()
print " ===== Simplex Fittness ", evalFitness( GenSimp )
plotTrj( Os,Rs,Fs, 3, "k" )
subplot(2,5,5); plot( ts, Fs[4], 'k-', lw=2 ); grid()
print " ===== Initial Fittness ", evalFitness( Gen0 )
plotTrj( Os,Rs,Fs, 1, "r" )
#subplot(2,5,5); autoscale(False); plot( ts, Fs[4], 'r-', lw=2 ); grid(), title("propelant \n consumption");
'''
#savefig("plost.png", bbox_inches='tight')
'''
figure(num=None, figsize=(20, 5))
plotMaps(0,2, Gen0)
plotMaps(1,2, Gen )
savefig("valley.png", bbox_inches='tight')
'''
#show()
| [
"ProkopHapala@gmail.com"
] | ProkopHapala@gmail.com |
f85334f39cfa415b73ca89a1cfa22453ea4916e7 | 2ad9a73cb3e2da46fb15ae56a6dee11407fe8845 | /ports/kodi/addons/plugin.video.transistortv/scrapers/local_scraper.py | 06c7ae7a3925f2c85a4975534ba43cc2f04195b2 | [] | no_license | hpduong/retropie_configs | cde596b35897a3faeedefabd742fc15820d58255 | ed4e39146e5bebc0212dcef91108541a128d9325 | refs/heads/master | 2021-07-12T15:46:17.589357 | 2018-11-11T19:10:54 | 2018-11-11T19:10:54 | 157,111,040 | 1 | 2 | null | 2020-07-24T03:43:29 | 2018-11-11T18:59:52 | Python | UTF-8 | Python | false | false | 7,649 | py | """
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import xbmc
import kodi
import log_utils # @UnusedImport
from transistortv_lib import scraper_utils
from transistortv_lib.constants import FORCE_NO_MATCH
from transistortv_lib.constants import SORT_KEYS
from transistortv_lib.constants import VIDEO_TYPES
import scraper
logger = log_utils.Logger.get_logger()
BASE_URL = ''
class Scraper(scraper.Scraper):
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT): # @UnusedVariable
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
self.def_quality = int(kodi.get_setting('%s-def-quality' % (self.get_name())))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.TVSHOW, VIDEO_TYPES.EPISODE, VIDEO_TYPES.MOVIE])
@classmethod
def get_name(cls):
return 'Local'
def get_sources(self, video):
hosters = []
source_url = self.get_url(video)
if not source_url or source_url == FORCE_NO_MATCH: return hosters
params = scraper_utils.parse_query(source_url)
if video.video_type == VIDEO_TYPES.MOVIE:
cmd = '{"jsonrpc": "2.0", "method": "VideoLibrary.GetMovieDetails", "params": {"movieid": %s, "properties" : ["file", "playcount", "streamdetails"]}, "id": "libMovies"}'
result_key = 'moviedetails'
else:
cmd = '{"jsonrpc": "2.0", "method": "VideoLibrary.GetEpisodeDetails", "params": {"episodeid": %s, "properties" : ["file", "playcount", "streamdetails"]}, "id": "libTvShows"}'
result_key = 'episodedetails'
run = cmd % (params['id'])
meta = xbmc.executeJSONRPC(run)
meta = scraper_utils.parse_json(meta)
logger.log('Source Meta: %s' % (meta), log_utils.LOGDEBUG)
if result_key in meta.get('result', []):
details = meta['result'][result_key]
def_quality = [item[0] for item in sorted(SORT_KEYS['quality'].items(), key=lambda x:x[1])][self.def_quality]
host = {'multi-part': False, 'class': self, 'url': details['file'], 'host': 'XBMC Library', 'quality': def_quality, 'views': details['playcount'], 'rating': None, 'direct': True}
stream_details = details['streamdetails']
if len(stream_details['video']) > 0 and 'width' in stream_details['video'][0]:
host['quality'] = scraper_utils.width_get_quality(stream_details['video'][0]['width'])
hosters.append(host)
return hosters
def _get_episode_url(self, show_url, video):
params = scraper_utils.parse_query(show_url)
cmd = '{"jsonrpc": "2.0", "method": "VideoLibrary.GetEpisodes", "params": {"tvshowid": %s, "season": %s, "filter": {"field": "%s", "operator": "is", "value": "%s"}, \
"limits": { "start" : 0, "end": 25 }, "properties" : ["title", "season", "episode", "file", "streamdetails"], "sort": { "order": "ascending", "method": "label", "ignorearticle": true }}, "id": "libTvShows"}'
base_url = 'video_type=%s&id=%s'
episodes = []
force_title = scraper_utils.force_title(video)
if not force_title:
run = cmd % (params['id'], video.season, 'episode', video.episode)
meta = xbmc.executeJSONRPC(run)
meta = scraper_utils.parse_json(meta)
logger.log('Episode Meta: %s' % (meta), log_utils.LOGDEBUG)
if 'result' in meta and 'episodes' in meta['result']:
episodes = meta['result']['episodes']
else:
logger.log('Skipping S&E matching as title search is forced on: %s' % (video.trakt_id), log_utils.LOGDEBUG)
if (force_title or kodi.get_setting('title-fallback') == 'true') and video.ep_title and not episodes:
run = cmd % (params['id'], video.season, 'title', video.ep_title)
meta = xbmc.executeJSONRPC(run)
meta = scraper_utils.parse_json(meta)
logger.log('Episode Title Meta: %s' % (meta), log_utils.LOGDEBUG)
if 'result' in meta and 'episodes' in meta['result']:
episodes = meta['result']['episodes']
for episode in episodes:
if episode['file'].endswith('.strm'):
continue
return base_url % (video.video_type, episode['episodeid'])
@classmethod
def get_settings(cls):
settings = super(cls, cls).get_settings()
name = cls.get_name()
settings.append(' <setting id="%s-def-quality" type="enum" label=" Default Quality" values="None|Low|Medium|High|HD720|HD1080" default="0" visible="eq(-3,true)"/>' % (name))
return settings
def search(self, video_type, title, year, season=''): # @UnusedVariable
filter_str = '{{"field": "title", "operator": "contains", "value": "{search_title}"}}'
if year: filter_str = '{{"and": [%s, {{"field": "year", "operator": "is", "value": "%s"}}]}}' % (filter_str, year)
if video_type == VIDEO_TYPES.MOVIE:
cmd = '{"jsonrpc": "2.0", "method": "VideoLibrary.GetMovies", "params": { "filter": %s, "limits": { "start" : 0, "end": 25 }, "properties" : ["title", "year", "file", "streamdetails"], \
"sort": { "order": "ascending", "method": "label", "ignorearticle": true } }, "id": "libMovies"}'
result_key = 'movies'
id_key = 'movieid'
else:
cmd = '{"jsonrpc": "2.0", "method": "VideoLibrary.GetTVShows", "params": { "filter": %s, "limits": { "start" : 0, "end": 25 }, "properties" : ["title", "year"], \
"sort": { "order": "ascending", "method": "label", "ignorearticle": true } }, "id": "libTvShows"}'
result_key = 'tvshows'
id_key = 'tvshowid'
command = cmd % (filter_str.format(search_title=title))
results = self.__get_results(command, result_key, video_type, id_key)
norm_title = self.__normalize_title(title)
if not results and norm_title and norm_title != title:
command = cmd % (filter_str.format(search_title=norm_title))
results = self.__get_results(command, result_key, video_type, id_key)
return results
def __normalize_title(self, title):
norm_title = re.sub('[^A-Za-z0-9 ]', ' ', title)
return re.sub('\s+', ' ', norm_title)
def __get_results(self, cmd, result_key, video_type, id_key):
results = []
logger.log('Search Command: %s' % (cmd), log_utils.LOGDEBUG)
meta = xbmc.executeJSONRPC(cmd)
meta = scraper_utils.parse_json(meta)
logger.log('Search Meta: %s' % (meta), log_utils.LOGDEBUG)
for item in meta.get('result', {}).get(result_key, {}):
if video_type == VIDEO_TYPES.MOVIE and item['file'].endswith('.strm'):
continue
result = {'title': item['title'], 'year': item['year'], 'url': 'video_type=%s&id=%s' % (video_type, item[id_key])}
results.append(result)
return results
| [
"henryduong@gmail.com"
] | henryduong@gmail.com |
14f5e4b6406aa2e955eeec030b5a49af05d538a6 | 380a47268c5975473a2e7c38c747bc3bdbd981b1 | /benchmark/third_party/DeepSpeed/deepspeed/ops/transformer/inference/diffusers_attention.py | 45a206a0a94ea5620ca300dafd47b0e16fad537b | [
"Apache-2.0",
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | FMInference/FlexGen | 07aa9b1918c19b02077e13ad07e76840843810dd | d34f7b4b43ed87a374f394b0535ed685af66197b | refs/heads/main | 2023-07-24T02:29:51.179817 | 2023-07-21T22:38:31 | 2023-07-21T22:38:31 | 602,270,517 | 6,821 | 411 | Apache-2.0 | 2023-07-07T22:59:24 | 2023-02-15T21:18:53 | Python | UTF-8 | Python | false | false | 11,039 | py | '''
Copyright 2022 The Microsoft DeepSpeed Team
'''
import math
import torch
from torch.autograd import Function
from ... import op_builder
import torch.nn as nn
from packaging import version as pkg_version
from deepspeed.utils.logging import log_dist
# Cuda modules will be imported if needed
inference_cuda_module = None
minus_inf = -10000.0
triton_flash_attn = None
def load_triton_flash_attn():
global triton_flash_attn
try:
import triton
except ImportError:
raise ImportError("Please install triton 2.0+ or `pip install deepspeed[sd]`")
if pkg_version.parse(triton.__version__) < pkg_version.parse("2.0"):
raise ImportError("Please install triton 2.0+ or `pip install deepspeed[sd]`")
from .triton_ops import triton_flash_attn
class DeepSpeedDiffusersAttentionFunction(Function):
@staticmethod
def forward(ctx,
input,
context,
input_mask,
config,
attn_qkvw,
attn_qw,
attn_kw,
attn_vw,
attn_qkvb,
num_attention_heads_per_partition,
norm_factor,
hidden_size_per_partition,
attn_ow,
attn_ob,
do_out_bias,
score_context_func,
linear_func,
triton_flash_attn_kernel):
def _transpose_for_context(x):
x = x.permute(0, 2, 1, 3)
new_x_layer_shape = x.size()[:-2] + \
(hidden_size_per_partition,)
return x.reshape(*new_x_layer_shape)
def _transpose_for_scores(x):
attention_head_size = x.shape[-1] // num_attention_heads_per_partition
new_x_shape = x.size()[:-1] + (num_attention_heads_per_partition,
attention_head_size)
x = x.reshape(*new_x_shape)
x = x.permute(0, 2, 1, 3)
return x.contiguous()
def selfAttention_fp(input, context, input_mask):
if config.fp16 and input.dtype == torch.float32:
input = input.half()
head_size = input.shape[-1] // config.heads
do_flash_attn = (head_size <= 128)
scale = (1 / norm_factor) * (1 / norm_factor)
if do_flash_attn and context == None:
qkv_out = linear_func(input,
attn_qkvw,
attn_qkvb if attn_qkvb is not None else attn_qkvw,
attn_qkvb is not None,
do_flash_attn,
config.heads)
context_layer = triton_flash_attn_kernel(qkv_out[0],
qkv_out[1],
qkv_out[2],
scale,
input.shape[-2] % 128 == 0)
context_layer = _transpose_for_context(context_layer[:,:,:,:head_size])
else:
do_flash_attn = False
if context is not None:
query = torch.matmul(input, attn_qw)
key = torch.matmul(context, attn_kw)
value = torch.matmul(context, attn_vw)
else:
qkv = torch.matmul(input, attn_qkvw)
query, key, value = qkv.chunk(3, dim=-1)
query = query.contiguous()
key = key.contiguous()
value = value.contiguous()
query, key, value = inference_cuda_module.pad_transform_fp16(query, key, value, config.heads, do_flash_attn)
attention_scores = (torch.matmul(query,
key.transpose(-1,
-2)) *
scale).softmax(dim=-1)
context_layer = _transpose_for_context(
torch.matmul(attention_scores,
value))
output = linear_func(context_layer,
attn_ow,
attn_ob,
do_out_bias,
False,
config.heads)
return output
output = selfAttention_fp(input, context, input_mask)
return output
@staticmethod
def backward(ctx, grad_output, grad_output1, grad_output2, grad_output3):
raise RuntimeError('You are running with DeepSpeed Inference mode. \
Please switch to Training mode for running backward!')
class DeepSpeedDiffusersAttention(nn.Module):
"""Initialize the DeepSpeed Transformer Layer.
Arguments:
layer_id: The layer index starting from 0, e.g. if model has 24 transformer layers,
layer_id will be 0,1,2...23 when each layer object is instantiated
config: An object of DeepSpeedInferenceConfig
"""
layer_id = 0
def __init__(
self,
config,
):
super(DeepSpeedDiffusersAttention, self).__init__()
self.config = config
self.config.layer_id = DeepSpeedDiffusersAttention.layer_id
DeepSpeedDiffusersAttention.layer_id += 1
device = torch.cuda.current_device() if config.bigscience_bloom else 'cpu'
qkv_size_per_partition = (self.config.hidden_size // self.config.mp_size) * 3
data_type = torch.int8 if config.q_int8 else torch.half if config.fp16 else torch.float
data_type_fp = torch.half if config.fp16 else torch.float
global inference_cuda_module
if inference_cuda_module is None:
builder = op_builder.InferenceBuilder()
inference_cuda_module = builder.load()
if DeepSpeedDiffusersAttention.layer_id == 1:
log_dist(f"DeepSpeed-Attention config: {self.config.__dict__}", [0])
self.attn_qkvw = nn.Parameter(torch.empty(self.config.hidden_size,
qkv_size_per_partition,
dtype=data_type,
device=device),
requires_grad=False)
self.attn_kw = nn.Parameter(torch.empty(self.config.hidden_size,
self.config.hidden_size,
dtype=data_type,
device=device),
requires_grad=False)
self.attn_vw = nn.Parameter(torch.empty(self.config.hidden_size,
self.config.hidden_size,
dtype=data_type,
device=device),
requires_grad=False)
self.attn_qw = nn.Parameter(torch.empty(self.config.hidden_size,
self.config.hidden_size,
dtype=data_type,
device=device),
requires_grad=False)
self.attn_qkvb = nn.Parameter(torch.empty(qkv_size_per_partition,
dtype=data_type_fp,
device=device),
requires_grad=False)
out_size_per_partition = self.config.hidden_size // self.config.mp_size
self.attn_ow = nn.Parameter(torch.empty(out_size_per_partition,
self.config.hidden_size,
dtype=data_type,
device=device),
requires_grad=False)
self.attn_ob = nn.Parameter(torch.empty(self.config.hidden_size,
dtype=data_type_fp,
device=device),
requires_grad=False)
self.do_out_bias = True
if triton_flash_attn is None:
load_triton_flash_attn()
self.triton_flash_attn_kernel = triton_flash_attn()
self.num_attention_heads_per_partition = self.config.heads // self.config.mp_size
self.hidden_size_per_partition = self.config.hidden_size // self.config.mp_size
self.hidden_size_per_attention_head = self.config.hidden_size // self.config.heads
self.norm_factor = math.sqrt(
math.sqrt(self.config.hidden_size // self.config.heads))
if self.config.scale_attn_by_inverse_layer_idx is True:
self.norm_factor *= math.sqrt(self.config.layer_id + 1)
# https://github.com/huggingface/transformers/blob/v4.24.0/src/transformers/models/gpt2/modeling_gpt2.py#L191
self.score_context_func = inference_cuda_module.softmax_context_fp32 if (not config.fp16) else \
inference_cuda_module.softmax_context_fp16
self.linear_func = inference_cuda_module.linear_layer_fp16 if config.fp16 else \
inference_cuda_module.linear_layer_fp32
self.allocate_workspace = inference_cuda_module.allocate_workspace_fp32 if not (config.fp16) else \
inference_cuda_module.allocate_workspace_fp16
def forward(self, input, context=None, input_mask=None):
if self.config.layer_id == 0:
self.allocate_workspace(self.config.hidden_size,
self.config.heads,
input.size()[1],
input.size()[0],
DeepSpeedDiffusersAttention.layer_id,
self.config.mp_size,
False,
0,
self.config.max_out_tokens)
output = DeepSpeedDiffusersAttentionFunction.apply(
input,
context,
input_mask,
self.config,
self.attn_qkvw,
self.attn_qw,
self.attn_kw,
self.attn_vw,
self.attn_qkvb,
self.num_attention_heads_per_partition,
self.norm_factor,
self.hidden_size_per_partition,
self.attn_ow,
self.attn_ob,
self.do_out_bias,
self.score_context_func,
self.linear_func,
self.triton_flash_attn_kernel)
return output
| [
"sqy1415@gmail.com"
] | sqy1415@gmail.com |
e3ef3305915ba7db4c208240f59f5157b65fa40e | e629c850bd473f0a8f17a569bbd51584f67a33e6 | /blog/views.py | c8f7bbe14984c2abde28bbd92a20072811eec802 | [] | no_license | buxizhizhoum/django_web | d2047c2375d508dd5a6fe47d5191cac4862911f3 | f94174ac63c8af84627891c9d6b1836ff5f4e850 | refs/heads/master | 2021-01-12T04:04:55.999433 | 2017-02-18T07:03:25 | 2017-02-18T07:03:25 | 77,494,986 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,702 | py | # coding: utf-8
from django.shortcuts import render
from django.shortcuts import HttpResponse
from django.shortcuts import render_to_response
from django.shortcuts import redirect
from django.contrib.auth.decorators import login_required
import datetime
import Queue
import time
from blog import forms
import json
from blog.models import Notes, NotesType, User, UserType, Reply, ChatContent
from django.views.decorators.cache import cache_page # 缓存
# Create your views here.
class CJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(obj, datetime.date):
return obj.strftime('%Y-%m-%d')
else:
return json.JSONEncoder.default(self, obj)
# used to json serializable datetime
GLOBAL_GROUP_CHAT_DICT = {}
GLOBAL_SIZE = {}
def login_0(request):
if request.method == 'POST':
form = forms.UserInfo(request.POST)
if form.is_valid():
form_clean_data = form.cleaned_data
# get username and password form database and verification,
# just think that username
# in the database is Jerry and password is sb
username = 'jerry'
password = 'sb'
if form_clean_data['username'] == username and \
form_clean_data['password'] == password:
return render_to_response('index.html')
else:
return redirect('/blog/login/')
else:
return render_to_response('login_v1.html', {'form': forms.UserInfo()})
def login(request):
if request.method == 'POST':
username = request.POST.get('username')
password = request.POST.get('password')
remember_me = request.POST.get('remember_me')
if remember_me:
# something could be done here according to the value of remember_me.
print 'remember asked!'
if User.objects.filter(username=username, password=password).count() == 1:
# 在User寻找用户名和密码匹配的对象个数。
request.session["login_info"] = {"username": username, "password": password}
return render_to_response('index.html')
else:
return redirect('/blog/login/')
else:
return render_to_response('login.html', {'error': 'Wrong!'})
def logout(request):
del request.session["login_info"]
return HttpResponse("Logged out.")
def signin(request):
data = {"statues": 0, "message": ""}
# print request.FILES.get("Portrait", None)
if request.method == "POST":
try:
username = request.POST.get("username", None)
password = request.POST.get("password", None)
email = request.POST.get("email", None)
portrait = request.FILES.get("Portrait", None) # 获取portrait对象
# 将头像文件上传到指定目录
if portrait:
portrait_name = "blog/static/portrait/%s" % portrait.name # 获取对象名字
print "portrait name:", portrait_name
with open(portrait_name, 'wb') as f:
for chunk in portrait.chunks():
f.write(chunk)
else:
portrait_name = None
print portrait_name
# user_type = request.POST.get("user_type")
# here chages the sign in logic, so all are sign in as guest
user_type = "Guest"
user_type_obj = UserType.objects.get(user_type=user_type)
# 新建一个User对象
user_obj = User.objects.create(username=username,password=password,
email=email,portrait=portrait_name,
user_type=user_type_obj)
data["statues"] = 1 # 提供了这个变量,但是这里还没有用到。
request.session["login_info"] = {"username": username, "password": password}
return redirect("/blog/signinsuccess/")
except Exception, e:
print e.message
data["message"] = e.message
return redirect("/blog/signin/")
else:
return render_to_response("signin.html")
@cache_page(60*5)
def index(request):
print request.user
# 这里如果使用Django自带用户登录验证框架,若登录应当显示登录用户,
# 目前由于没使用Django自带用户验证框架,所以无论登录与否都显示匿名用户。
return render_to_response("index.html")
def notes(request):
# get the data form database and send them to template
# add notes whose type is Python to section Python.
NotesType.objects.filter(notes_type="Python")
notes_Python = Notes.objects.filter(notes_type = 1 )
python_portrait = {}
# 先这样写,后续优化代码。
# 在一个类型的notes, username_id 相同的头像,可以覆盖其他的。
# 找到每个用户对应的头像,后续尝试用两层for循环代替这样的循环。
for note_Python in notes_Python:
print note_Python
username_id = note_Python.username_id
python_portrait[username_id] = User.objects.get(id=username_id).portrait.name
# 需要文件对象的name
notes_Django = Notes.objects.filter(notes_type = 2)
django_portrait = {}
# 在一个类型的notes, username_id 相同的头像,可以覆盖其他的。
for note_Django in notes_Django:
print note_Django
username_id = note_Django.username_id
django_portrait[username_id] = User.objects.get(id=username_id).portrait.name
notes_HTML = Notes.objects.filter(notes_type = 3)
html_portrait = {}
# 在一个类型的notes, username_id 相同的头像,可以覆盖其他的。
for note_HTML in notes_HTML:
print note_HTML
username_id = note_HTML.username_id
html_portrait[username_id] = User.objects.get(id=username_id).portrait.name
notes_Javascript = Notes.objects.filter(notes_type = 4)
javascript_portrait = {}
# 在一个类型的notes, username_id 相同的头像,可以覆盖其他的。
for note_Javascript in notes_Javascript:
print note_Javascript
username_id = note_Javascript.username_id
javascript_portrait[username_id] = User.objects.get(id=username_id).portrait.name
notes_jQuery = Notes.objects.filter(notes_type = 5)
jquery_portrait = {}
# 在一个类型的notes, username_id 相同的头像,可以覆盖其他的。
for note_jQuery in notes_jQuery:
print note_jQuery
username_id = note_jQuery.username_id
jquery_portrait[username_id] = User.objects.get(id=username_id).portrait.name
return render_to_response("notes.html",{"notes_Python": notes_Python,
"notes_Django": notes_Django,
"notes_HTML": notes_HTML,
"notes_Javascript": notes_Javascript,
"notes_jQuery": notes_jQuery,
"python_portrait": python_portrait,
"django_portrait": django_portrait,
"html_portrait": html_portrait,
"jquery_portrait": jquery_portrait,
"javascript_portrait": javascript_portrait,
})
# 对于图片对象,不能直接传数据库取到的对象,需要传名字。
def addfavor(request):
login_statues = request.session.get("login_info", None)
data = {"statues": 0, "favor_count": "", "message": ""}
if login_statues:
print login_statues
#print login_statues['username']
if request.method == "POST":
try:
# 根据id找到对应的Note,favor count 加一。
id = request.POST.get("id")
temp_obj = Notes.objects.get(id = id)
temp_favor = temp_obj.favor_count + 1 # add favor count
temp_obj.favor_count = temp_favor # update database
temp_obj.save()
data["favor_count"] = temp_favor # update data will be sent to ajax
data["statues"] = 1 # update statues to 1
except Exception, e:
data["message"] = e.message
else:
data["message"] = "error request method"
return HttpResponse(json.dumps(data))
else:
return redirect("/blog/login/")
def getreply(request):
login_statues = request.session.get("login_info", None)
data = {"statues":0, "reply_count":'', "replies": '', "message": ''}
if login_statues:
if request.method == "POST":
try:
# 根据ajax发送的数据去查回复
id = request.POST.get("id")
# 每个Note对应多条回复,所以需要用filter
temp_reply_obj = Reply.objects.filter(notes_id = id).order_by("-id").values("content","user__username")
temp_notes_obj = Notes.objects.get(id = id)
replies = list(temp_reply_obj)
reply_count = temp_notes_obj.reply_count
data["replies"] = replies
data["reply_count"] = reply_count
data["statues"] = 1
except Exception, e:
data["message"] = e.message
return HttpResponse(json.dumps(data))
else:
return redirect("/blog/login/")
def submitreply(request):
login_statues = request.session.get("login_info", None)
if login_statues:
data = {"statues": 0, "message":"", "replies": "", "username": ""}
if request.method == "POST":
try:
# data from javascript
replies = request.POST.get("replies", None) # POST not post
id = request.POST.get("id", None)
# store data
notes_obj = Notes.objects.get(id = id)
# about the User, need to add cookies and session
user_obj = User.objects.get(id = 1) # 这里写死了,实际上应该获取当前用户的id
Reply.objects.create(content = replies, notes = notes_obj, user = user_obj)
notes_obj.reply_count += 1
# reply能不能直接通过查询数据库获取?可以避免出错后count和数据库实际内容不一致
notes_obj.save()
# change the value of data dict
data["statues"] = 1
data["replies"] = replies
data["username"] = user_obj.username
except Exception , e:
data["message"] = e.message
return HttpResponse(json.dumps(data))
else:
return redirect("/blog/login/")
def addnote(request):
login_statues = request.session.get("login_info", None)
data = {"statues":0, "message":""}
if login_statues:
if request.method == "POST":
# deal with the data and redirect to success pages
try:
title = request.POST.get("InputTitle", None)
content = request.POST.get("InputNoteContent", None)
username = request.POST.get("InputUsername", None)
# 此处硬编码了用户
user_obj = User.objects.get(username = "BinshanMa")
notes_type = request.POST.get("NoteType", None)
notes_type_obj = NotesType.objects.get(notes_type = notes_type)
print title, content, username, notes_type
notes_obj = Notes.objects.create(title = title, content=content,
username = user_obj, notes_type = notes_type_obj)
notes_obj.save() # 用create是不是不用用save()? 是不用用。
'''
另一种方法是note_obj = Notes(title = title, content=content,username = user_obj, notes_type = notes_type_obj)
note_obj.save()
'''
data["statues"] = 1
# if there is no mistake, redirect to a new page
return redirect("/blog/addnotesuccess/")
except Exception, e:
data["message"] = e.message
print e.message
# if there is error, redirect to addnote ,
# here might need changes to keep the content that user input
return redirect("/blog/addnote/")
else:
return render_to_response("addnote.html")
else:
return redirect("/blog/login/")
def addnotesuccess(request):
# add interval and redirect the pages to notes after a certain time
return render_to_response("addnotesuccess.html")
def signinsuccess(request):
return render_to_response("signinsuccess.html")
# @login_required()
def chat(request):
login_statues = request.session.get("login_info", None)
# 此处应当完善判断,未登录应该重定向至登录页面.先略过
username = login_statues['username']
user_obj = User.objects.get(username = username)
# print "user:", user_obj.friends.all()
return render_to_response("webchat.html", {'user_obj': user_obj})
# @login_required
def sendchat(request):
data = {"statues":0, "content": "", "user": "", "message": "","time":'', "username_to": ''}
login_statues = request.session.get("login_info", None)
username_from = login_statues['username']
username_to = request.POST.get('username_to')
print 'the request in send chat js:', request.POST
print 'the username_to in send chat js:', username_to
if username_from not in GLOBAL_GROUP_CHAT_DICT:
# if there is no queue for user, create a queue.
GLOBAL_GROUP_CHAT_DICT[username_from] = Queue.Queue()# create a queue for every user
if username_to not in GLOBAL_GROUP_CHAT_DICT:
GLOBAL_GROUP_CHAT_DICT[username_to] = Queue.Queue() # create a queue for every user
if request.method == "POST":
try:
content = request.POST.get('content')
print content
user_obj = User.objects.get(username = username_from)
chat_obj = ChatContent.objects.create(content=content, user = user_obj)
data["statues"] = 1
data["content"] = content
data['time'] = datetime.datetime.now()
data["user"] = login_statues['username']
data['username_to'] = username_to
GLOBAL_GROUP_CHAT_DICT[username_to].put(data) # put chatting data to queue
except Exception, e:
data["statues"] = 0
data["message"] = e.message
return HttpResponse(e.message)
else:
chat_list = []
chat_list.append(data)
return HttpResponse(json.dumps(chat_list, cls=CJsonEncoder))
else:
return HttpResponse("fail")
# add judgement, get the msg of active user. something is need to distinguish where the msg comes from
def get_new_messages(request):
if request.POST.get("username_from", None):
# test whether there is usename in request, test one is enough
username_from = request.POST.get("username_from")
username_to = request.POST.get("username_to")
print "username:", username_from, username_to
# if username_from not in GLOBAL_GROUP_CHAT_DICT:
# # if there is no queue for user, create a queue.
# GLOBAL_GROUP_CHAT_DICT[username_from][username_to] = Queue.Queue() # create a queue for every user
# if username_to not in GLOBAL_GROUP_CHAT_DICT:
# GLOBAL_GROUP_CHAT_DICT[username_to][username_from] = Queue.Queue() # create a queue for every user
chat_list = []
# have a judge if this is the first time and username_from is not in the GLOBAL_dict
if username_from in GLOBAL_GROUP_CHAT_DICT: # from or to?
messages_count = GLOBAL_GROUP_CHAT_DICT[username_from].qsize() # get message from queue
print "username_from in dict", GLOBAL_GROUP_CHAT_DICT[username_from]
# something need here, username_from will get all the messages that send to username_from
# regard less of who send it.
else:
messages_count = 0
# there is need to judge the value of messages_count, because if is 0, the program
# will not go into the loop, and the get method of Queue will not block, the program
# will out of control, since there is javascript function recursion(递归).
if messages_count:
for i in range(messages_count):
print "message count not = 0"
try:
chat_list.append(GLOBAL_GROUP_CHAT_DICT[username_from].get(timeout=60))
except Exception, e:
print e.message
else:
print "message count = 0"
try:
chat_list.append(GLOBAL_GROUP_CHAT_DICT[username_from].get(timeout=60))
except Exception, e:
print "error"
print e.message
print "chat list in get new message.", chat_list
return HttpResponse(json.dumps(chat_list, cls=CJsonEncoder))
else:
print "username_dict is missing..."
return HttpResponse("username_dict is missing...")
def upload_file(request):
# print request.POST
# print request.FILES
# where to get filename
file_obj = request.FILES.get("file", None)
if file_obj:
# 获取文件,在前端formdata用file做名字,此时同名获取。
file_name = "blog/static/files/%s" % file_obj.name
receive_size = 0
with open(file_name,"wb") as file:
# 分块写文件
for chunk in file_obj.chunks():
file.write(chunk)
receive_size += len(chunk)
GLOBAL_SIZE = {file_obj.name, receive_size }
print "File: %s ---uploaded successfully!" % file_obj.name
return HttpResponse("/".join(file_name.split("/")[1:]))
else:
return HttpResponse("file in request is None, may be it is because it is TemporaryFile.")
# def upload_progress(request):
# # get filename from request, look for dict and get the size of file, then send to backend
# filename = request.GET.get("filename")
# progress = GLOBAL_SIZE[filename]
# print "uploading progress %s" % progress
# return HttpResponse(progress)
| [
"mapeaks@126.com"
] | mapeaks@126.com |
dce5fda1a159a2df6e4b0f055c71dcd1dcc4353e | 3801829d505d9fe2ba56d59eeeb601d2a6a918b2 | /tencentcloud/gse/v20191112/gse_client.py | d7bb3fa9d01c6445b0ef2e96f9c0628b7c045e49 | [
"Apache-2.0"
] | permissive | leolu8/tencentcloud-sdk-python | 5f02dc2fb93d5f32e3a37dbe121e33f1ec6ac95c | 7f14e67a6ce94050916b7cdb5bd284d6aaf7f2ac | refs/heads/master | 2022-12-22T09:10:07.158810 | 2020-09-29T00:31:05 | 2020-09-29T00:31:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 63,020 | py | # -*- coding: utf8 -*-
# Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
from tencentcloud.common.abstract_client import AbstractClient
from tencentcloud.gse.v20191112 import models
class GseClient(AbstractClient):
_apiVersion = '2019-11-12'
_endpoint = 'gse.tencentcloudapi.com'
def AttachCcnInstances(self, request):
"""本接口(AttachCcnInstances)用于关联云联网实例
:param request: Request instance for AttachCcnInstances.
:type request: :class:`tencentcloud.gse.v20191112.models.AttachCcnInstancesRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.AttachCcnInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("AttachCcnInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.AttachCcnInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateAlias(self, request):
"""本接口(CreateAlias)用于创建别名
:param request: Request instance for CreateAlias.
:type request: :class:`tencentcloud.gse.v20191112.models.CreateAliasRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.CreateAliasResponse`
"""
try:
params = request._serialize()
body = self.call("CreateAlias", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateAliasResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateAsset(self, request):
"""本接口(CreateAsset)用于创建生成包。
通过获取上传cos的临时秘钥,将文件上传至cos,然后将生成包的zip名称下发给[CreateAsset](https://cloud.tencent.com/document/product/1165/48731)完成接口创建。上传文件至 cos支持俩种方式:
- 获取预签名方式,COS 简单上传
1. [GetUploadCredentials](https://cloud.tencent.com/document/product/1165/48727) 获取预签名信息
2. 使用 COS API 进行上传
- 临时密钥方式,COS 简单上传或者分块上传方式
1. [GetUploadCredentials](https://cloud.tencent.com/document/product/1165/48727)(获取上传 bucket 第一次调用需要,后续可以不用调用)
2. [GetUploadFederationToken](https://cloud.tencent.com/document/product/1165/48742) 获取临时密钥
3. 使用 COS API 进行上传
具体使用场景可以参考 [GetUploadCredentials](https://cloud.tencent.com/document/product/1165/48727) , [GetUploadFederationToken](https://cloud.tencent.com/document/product/1165/48742)和下面 CreateAsset 示例。
:param request: Request instance for CreateAsset.
:type request: :class:`tencentcloud.gse.v20191112.models.CreateAssetRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.CreateAssetResponse`
"""
try:
params = request._serialize()
body = self.call("CreateAsset", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateAssetResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateGameServerSession(self, request):
"""本接口(CreateGameServerSession)用于创建游戏服务会话
:param request: Request instance for CreateGameServerSession.
:type request: :class:`tencentcloud.gse.v20191112.models.CreateGameServerSessionRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.CreateGameServerSessionResponse`
"""
try:
params = request._serialize()
body = self.call("CreateGameServerSession", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateGameServerSessionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteAlias(self, request):
"""本接口(DeleteAlias)用于删除别名
:param request: Request instance for DeleteAlias.
:type request: :class:`tencentcloud.gse.v20191112.models.DeleteAliasRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DeleteAliasResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteAlias", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteAliasResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteAsset(self, request):
"""本接口(DeleteAsset)用于删除生成包
:param request: Request instance for DeleteAsset.
:type request: :class:`tencentcloud.gse.v20191112.models.DeleteAssetRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DeleteAssetResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteAsset", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteAssetResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteFleet(self, request):
"""本接口(DeleteFleet)用于删除服务器舰队
:param request: Request instance for DeleteFleet.
:type request: :class:`tencentcloud.gse.v20191112.models.DeleteFleetRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DeleteFleetResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteFleet", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteFleetResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteScalingPolicy(self, request):
"""本接口(DeleteScalingPolicy)用于删除扩缩容配置
:param request: Request instance for DeleteScalingPolicy.
:type request: :class:`tencentcloud.gse.v20191112.models.DeleteScalingPolicyRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DeleteScalingPolicyResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteScalingPolicy", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteScalingPolicyResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeAlias(self, request):
"""本接口(DescribeAlias)用于获取别名详情
:param request: Request instance for DescribeAlias.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeAliasRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeAliasResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeAlias", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeAliasResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeAsset(self, request):
"""本接口(DescribeAsset)获取生成包信息
:param request: Request instance for DescribeAsset.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeAssetRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeAssetResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeAsset", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeAssetResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeAssets(self, request):
"""本接口(DescribeAssets)用于获取生成包列表
:param request: Request instance for DescribeAssets.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeAssetsRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeAssetsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeAssets", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeAssetsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeCcnInstances(self, request):
"""本接口(DescribeCcnInstances)用于查询云联网实例
:param request: Request instance for DescribeCcnInstances.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeCcnInstancesRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeCcnInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeCcnInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeCcnInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeFleetAttributes(self, request):
"""本接口(DescribeFleetAttributes)用于查询服务器舰队属性
:param request: Request instance for DescribeFleetAttributes.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeFleetAttributesRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeFleetAttributesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeFleetAttributes", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeFleetAttributesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeFleetEvents(self, request):
"""本接口(DescribeFleetEvents)用于查询部署服务器舰队相关的事件列表
:param request: Request instance for DescribeFleetEvents.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeFleetEventsRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeFleetEventsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeFleetEvents", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeFleetEventsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeFleetPortSettings(self, request):
"""本接口(DescribeFleetPortSettings)用于获取服务器舰队安全组信息
:param request: Request instance for DescribeFleetPortSettings.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeFleetPortSettingsRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeFleetPortSettingsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeFleetPortSettings", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeFleetPortSettingsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeFleetUtilization(self, request):
"""本接口(DescribeFleetUtilization)用于查询服务器舰队的利用率信息
:param request: Request instance for DescribeFleetUtilization.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeFleetUtilizationRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeFleetUtilizationResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeFleetUtilization", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeFleetUtilizationResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGameServerSessionDetails(self, request):
"""本接口(DescribeGameServerSessionDetails)用于查询游戏服务器会话详情列表
:param request: Request instance for DescribeGameServerSessionDetails.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeGameServerSessionDetailsRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeGameServerSessionDetailsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGameServerSessionDetails", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGameServerSessionDetailsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGameServerSessionPlacement(self, request):
"""本接口(DescribeGameServerSessionPlacement)用于查询游戏服务器会话的放置
:param request: Request instance for DescribeGameServerSessionPlacement.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeGameServerSessionPlacementRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeGameServerSessionPlacementResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGameServerSessionPlacement", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGameServerSessionPlacementResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGameServerSessionQueues(self, request):
"""本接口(DescribeGameServerSessionQueues)用于查询游戏服务器会话队列
:param request: Request instance for DescribeGameServerSessionQueues.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeGameServerSessionQueuesRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeGameServerSessionQueuesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGameServerSessionQueues", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGameServerSessionQueuesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGameServerSessions(self, request):
"""本接口(DescribeGameServerSessions)用于查询游戏服务器会话列表
:param request: Request instance for DescribeGameServerSessions.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeGameServerSessionsRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeGameServerSessionsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGameServerSessions", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGameServerSessionsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeInstanceTypes(self, request):
"""本接口(DescribeInstanceTypes)用于获取服务器实例类型列表
:param request: Request instance for DescribeInstanceTypes.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeInstanceTypesRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeInstanceTypesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeInstanceTypes", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeInstanceTypesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeInstances(self, request):
"""本接口(DescribeInstances)用于查询服务器实例列表
:param request: Request instance for DescribeInstances.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeInstancesRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePlayerSessions(self, request):
"""本接口(DescribePlayerSessions)用于获取玩家会话列表
:param request: Request instance for DescribePlayerSessions.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribePlayerSessionsRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribePlayerSessionsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePlayerSessions", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePlayerSessionsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeRuntimeConfiguration(self, request):
"""本接口(DescribeRuntimeConfiguration)用于获取服务器舰队运行配置
:param request: Request instance for DescribeRuntimeConfiguration.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeRuntimeConfigurationRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeRuntimeConfigurationResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeRuntimeConfiguration", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeRuntimeConfigurationResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeScalingPolicies(self, request):
"""本接口(DescribeScalingPolicies)用于查询服务部署的动态扩缩容配置
:param request: Request instance for DescribeScalingPolicies.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeScalingPoliciesRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeScalingPoliciesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeScalingPolicies", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeScalingPoliciesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeUserQuota(self, request):
"""本接口(DescribeUserQuota)获取用户单个模块配额
:param request: Request instance for DescribeUserQuota.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeUserQuotaRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeUserQuotaResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeUserQuota", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeUserQuotaResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeUserQuotas(self, request):
"""本接口(DescribeUserQuotas)用于获取用户配额
:param request: Request instance for DescribeUserQuotas.
:type request: :class:`tencentcloud.gse.v20191112.models.DescribeUserQuotasRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DescribeUserQuotasResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeUserQuotas", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeUserQuotasResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DetachCcnInstances(self, request):
"""本接口(DetachCcnInstances)用于解关联云联网实例
:param request: Request instance for DetachCcnInstances.
:type request: :class:`tencentcloud.gse.v20191112.models.DetachCcnInstancesRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.DetachCcnInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("DetachCcnInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DetachCcnInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def GetGameServerSessionLogUrl(self, request):
"""本接口(GetGameServerSessionLogUrl)用于获取游戏服务器会话的日志URL
:param request: Request instance for GetGameServerSessionLogUrl.
:type request: :class:`tencentcloud.gse.v20191112.models.GetGameServerSessionLogUrlRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.GetGameServerSessionLogUrlResponse`
"""
try:
params = request._serialize()
body = self.call("GetGameServerSessionLogUrl", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.GetGameServerSessionLogUrlResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def GetInstanceAccess(self, request):
"""本接口(GetInstanceAccess)用于获取实例登录所需要的凭据
:param request: Request instance for GetInstanceAccess.
:type request: :class:`tencentcloud.gse.v20191112.models.GetInstanceAccessRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.GetInstanceAccessResponse`
"""
try:
params = request._serialize()
body = self.call("GetInstanceAccess", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.GetInstanceAccessResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def GetUploadCredentials(self, request):
"""本接口(GetUploadCredentials)获取上传文件授权信息。
通过 [GetUploadCredentials](https://cloud.tencent.com/document/product/1165/48727) 接口获取临时授权信息后,调用 COS API将数据上传,根据上传的 BucketKey 信息进行生成包 [CreateAsset](https://cloud.tencent.com/document/product/1165/48731) 的创建。参考下面的示例部分。
:param request: Request instance for GetUploadCredentials.
:type request: :class:`tencentcloud.gse.v20191112.models.GetUploadCredentialsRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.GetUploadCredentialsResponse`
"""
try:
params = request._serialize()
body = self.call("GetUploadCredentials", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.GetUploadCredentialsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def GetUploadFederationToken(self, request):
"""本接口(GetUploadFederationToken)用于 获取生成包上传所需要的临时密钥
:param request: Request instance for GetUploadFederationToken.
:type request: :class:`tencentcloud.gse.v20191112.models.GetUploadFederationTokenRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.GetUploadFederationTokenResponse`
"""
try:
params = request._serialize()
body = self.call("GetUploadFederationToken", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.GetUploadFederationTokenResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def JoinGameServerSession(self, request):
"""本接口(JoinGameServerSession)用于加入游戏服务器会话
:param request: Request instance for JoinGameServerSession.
:type request: :class:`tencentcloud.gse.v20191112.models.JoinGameServerSessionRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.JoinGameServerSessionResponse`
"""
try:
params = request._serialize()
body = self.call("JoinGameServerSession", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.JoinGameServerSessionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ListAliases(self, request):
"""本接口(ListAliases)用于检索帐户下的所有别名
:param request: Request instance for ListAliases.
:type request: :class:`tencentcloud.gse.v20191112.models.ListAliasesRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.ListAliasesResponse`
"""
try:
params = request._serialize()
body = self.call("ListAliases", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ListAliasesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ListFleets(self, request):
"""本接口(ListFleets)用于获取服务器舰队列表
:param request: Request instance for ListFleets.
:type request: :class:`tencentcloud.gse.v20191112.models.ListFleetsRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.ListFleetsResponse`
"""
try:
params = request._serialize()
body = self.call("ListFleets", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ListFleetsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def PutScalingPolicy(self, request):
"""本接口(PutScalingPolicy)用于设置动态扩缩容配置
:param request: Request instance for PutScalingPolicy.
:type request: :class:`tencentcloud.gse.v20191112.models.PutScalingPolicyRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.PutScalingPolicyResponse`
"""
try:
params = request._serialize()
body = self.call("PutScalingPolicy", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.PutScalingPolicyResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ResolveAlias(self, request):
"""本接口(ResolveAlias)用于获取别名当前指向的fleetId
:param request: Request instance for ResolveAlias.
:type request: :class:`tencentcloud.gse.v20191112.models.ResolveAliasRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.ResolveAliasResponse`
"""
try:
params = request._serialize()
body = self.call("ResolveAlias", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ResolveAliasResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def SearchGameServerSessions(self, request):
"""本接口(SearchGameServerSessions)用于搜索游戏服务器会话列表
:param request: Request instance for SearchGameServerSessions.
:type request: :class:`tencentcloud.gse.v20191112.models.SearchGameServerSessionsRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.SearchGameServerSessionsResponse`
"""
try:
params = request._serialize()
body = self.call("SearchGameServerSessions", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.SearchGameServerSessionsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def SetServerWeight(self, request):
"""设置服务器权重
:param request: Request instance for SetServerWeight.
:type request: :class:`tencentcloud.gse.v20191112.models.SetServerWeightRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.SetServerWeightResponse`
"""
try:
params = request._serialize()
body = self.call("SetServerWeight", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.SetServerWeightResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def StartFleetActions(self, request):
"""本接口(StartFleetActions)用于启用服务器舰队自动扩缩容
:param request: Request instance for StartFleetActions.
:type request: :class:`tencentcloud.gse.v20191112.models.StartFleetActionsRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.StartFleetActionsResponse`
"""
try:
params = request._serialize()
body = self.call("StartFleetActions", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.StartFleetActionsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def StartGameServerSessionPlacement(self, request):
"""本接口(StartGameServerSessionPlacement)用于开始放置游戏服务器会话
:param request: Request instance for StartGameServerSessionPlacement.
:type request: :class:`tencentcloud.gse.v20191112.models.StartGameServerSessionPlacementRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.StartGameServerSessionPlacementResponse`
"""
try:
params = request._serialize()
body = self.call("StartGameServerSessionPlacement", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.StartGameServerSessionPlacementResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def StartMatchPlacement(self, request):
"""本接口(StartMatchPlacement)用于开始匹配放置游戏服务器会话
:param request: Request instance for StartMatchPlacement.
:type request: :class:`tencentcloud.gse.v20191112.models.StartMatchPlacementRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.StartMatchPlacementResponse`
"""
try:
params = request._serialize()
body = self.call("StartMatchPlacement", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.StartMatchPlacementResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def StopFleetActions(self, request):
"""本接口(StopFleetActions)用于停止服务器舰队自动扩缩容,改为手动扩缩容
:param request: Request instance for StopFleetActions.
:type request: :class:`tencentcloud.gse.v20191112.models.StopFleetActionsRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.StopFleetActionsResponse`
"""
try:
params = request._serialize()
body = self.call("StopFleetActions", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.StopFleetActionsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def StopGameServerSessionPlacement(self, request):
"""本接口(StopGameServerSessionPlacement)用于停止放置游戏服务器会话
:param request: Request instance for StopGameServerSessionPlacement.
:type request: :class:`tencentcloud.gse.v20191112.models.StopGameServerSessionPlacementRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.StopGameServerSessionPlacementResponse`
"""
try:
params = request._serialize()
body = self.call("StopGameServerSessionPlacement", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.StopGameServerSessionPlacementResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateAlias(self, request):
"""本接口(UpdateAlias)用于更新别名的属性
:param request: Request instance for UpdateAlias.
:type request: :class:`tencentcloud.gse.v20191112.models.UpdateAliasRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.UpdateAliasResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateAlias", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateAliasResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateAsset(self, request):
"""本接口(UpdateAsset)用于修改生成包信息
:param request: Request instance for UpdateAsset.
:type request: :class:`tencentcloud.gse.v20191112.models.UpdateAssetRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.UpdateAssetResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateAsset", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateAssetResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateFleetAttributes(self, request):
"""本接口(UpdateFleetAttributes)用于更新服务器舰队属性
:param request: Request instance for UpdateFleetAttributes.
:type request: :class:`tencentcloud.gse.v20191112.models.UpdateFleetAttributesRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.UpdateFleetAttributesResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateFleetAttributes", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateFleetAttributesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateFleetPortSettings(self, request):
"""本接口(UpdateFleetPortSettings)用于更新服务器舰队安全组
:param request: Request instance for UpdateFleetPortSettings.
:type request: :class:`tencentcloud.gse.v20191112.models.UpdateFleetPortSettingsRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.UpdateFleetPortSettingsResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateFleetPortSettings", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateFleetPortSettingsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateGameServerSession(self, request):
"""本接口(UpdateGameServerSession)用于更新游戏服务器会话
:param request: Request instance for UpdateGameServerSession.
:type request: :class:`tencentcloud.gse.v20191112.models.UpdateGameServerSessionRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.UpdateGameServerSessionResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateGameServerSession", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateGameServerSessionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateRuntimeConfiguration(self, request):
"""本接口(UpdateRuntimeConfiguration)用于更新服务器舰队配置
:param request: Request instance for UpdateRuntimeConfiguration.
:type request: :class:`tencentcloud.gse.v20191112.models.UpdateRuntimeConfigurationRequest`
:rtype: :class:`tencentcloud.gse.v20191112.models.UpdateRuntimeConfigurationResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateRuntimeConfiguration", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateRuntimeConfigurationResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) | [
"tencentcloudapi@tenent.com"
] | tencentcloudapi@tenent.com |
71c6c990cb067a053461d52af67c6a7f6bfa3c21 | 075c07c4e6efebbcdec670c07712281ed7ba659e | /traceback_format.py | fd82ab2d76aa08c4033e50b691553f48e7a1258d | [] | no_license | crystalDf/Automate-the-Boring-Stuff-with-Python-Chapter-10-Debugging | 86380e0e4a71656bd7638255252d470fe27b35e1 | 75dfc3bb0272fc799f6c618a7ccdef6820702bb0 | refs/heads/master | 2021-01-20T19:39:16.394050 | 2016-07-27T15:30:22 | 2016-07-27T15:30:22 | 63,609,305 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | import traceback
try:
raise Exception('This is the error message.')
except:
error_file = open('errorInfo.txt', 'w')
print(error_file.write(traceback.format_exc()))
error_file.close()
print('The traceback info was written to errorInfo.txt.')
| [
"chendong333@gmail.com"
] | chendong333@gmail.com |
b76a62675bc28f59873d2f2c5a1d84bcb2675553 | 58e09fac582a76428819e167e42e60765d11bb11 | /space/lib/python3.7/io.py | a9f5be7852a0d74823a1951ad571ddbbe08fb411 | [] | no_license | shanthimadugundi/DB_Project | 25eb2a0e7504f81484ad11c0fa9e902b038c85b4 | b5ba55af1bcddde164cecc60d331d615dd477165 | refs/heads/master | 2020-04-27T05:14:56.107466 | 2019-03-06T05:31:23 | 2019-03-06T05:31:23 | 174,075,941 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53 | py | /Users/shanthimadugundi/anaconda3/lib/python3.7/io.py | [
"shanthimadugundi@Shanthis-MacBook-Pro.local"
] | shanthimadugundi@Shanthis-MacBook-Pro.local |
f562169114f72ae7fd9906281c117a34e96029bb | f7a20374403b55189cc5db6e8fa34d0ba290387c | /modules/smm_marketing/__openerp__.py | 407d9650426c5d52c098a18e603a7044ae1b3ba3 | [] | no_license | dark-ice/upink_modules | 1a7b5a165cc5e05396c62cf33c261b907c23e33c | c497bf87a39796f1df3877542359b1927bec3a76 | refs/heads/master | 2021-05-01T04:40:16.436666 | 2014-04-12T15:09:31 | 2014-04-12T15:09:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,683 | py | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'SMM - Marketing',
'version': '1.0',
'category': 'SMM tools',
'description': """
Accounts SMM
""",
'author': 'Upsale dep IS',
'website': 'http://www.upsale.ru',
'depends': ['hr',],
'update_xml': [
'security/smm_marketing_security.xml',
'smm_socialnet_view.xml',
'smm_fotohost_view.xml',
'smm_videohost_view.xml',
'smm_email_view.xml',
'smm_blogs_view.xml',
'smm_stpres_view.xml',
'smm_forum_view.xml',
'smm_mobphone_view.xml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | [
"karbanovich.andrey@gmail.com"
] | karbanovich.andrey@gmail.com |
fa237d48bc666c059cf88d567f64c6ae48cb8b0d | 271dbb5f0c23ae40f19a8df7dd3f15a44fbe5ae1 | /EdmureBlog/EdmureBlog/settings.py | 65db68c5b7535414c71de5b01c5cb3823e4aa1fa | [] | no_license | obligate/python3-king | a4d1c5c145c3b1c42efe059cf2bbd797d0b3c528 | 2b31400468c7a2621f29f24f82e682eb07c0e17d | refs/heads/master | 2020-05-02T11:45:16.218771 | 2019-03-27T08:05:39 | 2019-03-27T08:05:39 | 177,938,256 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,756 | py | """
Django settings for EdmureBlog project.
Generated by 'django-admin startproject' using Django 1.10.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%q_ke6ok7im7x_-=0mdz+9*!rxvraey(xje=92f$(an4s)-7ls'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1', ]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'repository',
'backend',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'EdmureBlog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates'), ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'EdmureBlog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
# LOGGING = {
# 'version': 1,
# 'disable_existing_loggers': False,
# 'handlers': {
# 'file': {
# 'level': 'DEBUG',
# 'class': 'logging.FileHandler',
# 'filename': os.path.join(BASE_DIR,'debug.log'),
# },
# },
# 'loggers': {
# 'django': {
# 'handlers': ['file'],
# 'level': 'DEBUG',
# 'propagate': True,
# },
# },
# }
SESSION_COOKIE_AGE = 60 * 60 * 24
SESSION_SAVE_EVERY_REQUEST = True
| [
"peter@tidebuy.net"
] | peter@tidebuy.net |
eeb786c8a7d518628fe96db228bbf2089d27276b | 0a48086ea4dd24cf696aab16fc3969c5980f1442 | /gcpdjango/apps/users/utils.py | c34df67721671cfd06d7fcd7d74ca1fa7676693e | [
"MIT"
] | permissive | stanford-rc/gcp-django-stanford | 67b7b0b532b3c4b7236ec80ad66892e979b52dda | a8d72130e03f96c7d9636b951b780e478594309d | refs/heads/master | 2022-11-21T03:25:03.980764 | 2020-07-31T18:40:56 | 2020-07-31T18:40:56 | 282,962,179 | 2 | 0 | MIT | 2020-07-31T18:40:58 | 2020-07-27T17:03:03 | HTML | UTF-8 | Python | false | false | 3,245 | py | import string
import random
from gcpdjango.settings import SENDGRID_API_KEY, SENDGRID_SENDER_EMAIL
from django.contrib import messages
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import (
Mail,
Email,
To,
Content,
Attachment,
FileContent,
FileName,
FileType,
Disposition,
)
import base64
import os
def send_email(
email_to,
message,
subject,
attachment=None,
filetype="application/pdf",
request=None,
filename=None,
):
"""given an email, a message, and an attachment, and a SendGrid API key is defined in
settings, send an attachment to the user. We return a message to print to
the interface.
Parameters
==========
email_to: the email to send the message to
message: the html content for the body
subject: the email subject
attachment: the attachment file on the server
"""
if not SENDGRID_API_KEY or not SENDGRID_SENDER_EMAIL:
if request is not None:
messages.warning(
request,
"SendGrid secrets were not found in the environment. Please see https://vsoch.github.io/gcpdjango/docs/getting-started/#sendgrid-secrets",
)
return False
mail = Mail(
Email(SENDGRID_SENDER_EMAIL),
To(email_to),
subject,
Content("text/plain", message),
)
# If the user has provided an attachment, add it
if attachment:
message.attachment = generate_attachment(
filepath=attachment, filetype=filetype, filename=filename
)
try:
sg = SendGridAPIClient(api_key=SENDGRID_API_KEY)
response = sg.client.mail.send.post(request_body=mail.get())
print(response.status_code)
print(response.headers)
return True
except Exception as e:
print(e.message)
return False
def generate_attachment(filepath, filetype="application/pdf", filename=None):
"""given a filepath, generate an attachment object for SendGrid by reading
it in and encoding in base64.
Parameters
==========
filepath: the file path to attach on the server.
filetype: MIME content type (defaults to application/pdf)
filename: a filename for the attachment (defaults to basename provided)
"""
if not os.path.exists(filepath):
return
# Read in the attachment, base64 encode it
with open(filepath, "rb") as filey:
data = filey.read()
# The filename can be provided, or the basename of actual file
if not filename:
filename = os.path.basename(filepath)
encoded = base64.b64encode(data).decode()
attachment = Attachment()
attachment.file_content = FileContent(encoded)
attachment.file_type = FileType(filetype)
attachment.file_name = FileName(filename)
attachment.disposition = Disposition("attachment")
return attachment
def generate_random_password(length=10):
"""Generate a random password with letters, numbers, and special characters
"""
password_characters = string.ascii_letters + string.digits
password = "".join(random.choice(password_characters) for i in range(length))
return password
| [
"vsochat@stanford.edu"
] | vsochat@stanford.edu |
9c10f9833c7b2a77775984733c373b5641329b23 | 23556b966ee6a3abbe42ee5b66e13af3dce0cf71 | /u24_lymphocyte/third_party/treeano/nodes/tests/composite_test.py | eccc06bc2a77e4301650a679bfcae56d78ee2209 | [
"BSD-3-Clause"
] | permissive | SBU-BMI/quip_classification | 74dbf6d41e579755a952ba475f8a89bd82ac8530 | be61e5f047093243404f6f2dc8e837e27e8e1eb3 | refs/heads/master | 2022-07-30T20:29:48.459298 | 2022-07-17T16:59:15 | 2022-07-17T16:59:15 | 162,736,219 | 5 | 14 | BSD-3-Clause | 2022-02-11T02:53:02 | 2018-12-21T16:36:39 | Python | UTF-8 | Python | false | false | 5,315 | py | import nose.tools as nt
import numpy as np
import theano
import theano.tensor as T
import treeano
import treeano.nodes as tn
fX = theano.config.floatX
def test_dense_node_serialization():
tn.check_serialization(tn.DenseNode("a"))
tn.check_serialization(tn.DenseNode("a", num_units=100))
def test_dense_combine_node_serialization():
tn.check_serialization(tn.DenseCombineNode("a", []))
tn.check_serialization(tn.DenseCombineNode("a", [], num_units=100))
def test_dense_node():
network = tn.SequentialNode(
"seq",
[tn.InputNode("in", shape=(3, 4, 5)),
tn.DenseNode("fc1", num_units=6),
tn.DenseNode("fc2", num_units=7),
tn.DenseNode("fc3", num_units=8)]
).network()
x = np.random.randn(3, 4, 5).astype(fX)
fn = network.function(["in"], ["fc3"])
res = fn(x)[0]
nt.assert_equal(res.shape, (3, 8))
def test_dense_combine_node():
network = tn.SequentialNode(
"seq",
[tn.InputNode("in", shape=(3, 4, 5)),
tn.DenseCombineNode("fc1", [tn.IdentityNode("i1")], num_units=6),
tn.DenseCombineNode("fc2", [tn.IdentityNode("i2")], num_units=7),
tn.DenseCombineNode("fc3", [tn.IdentityNode("i3")], num_units=8)]
).network()
x = np.random.randn(3, 4, 5).astype(fX)
fn = network.function(["in"], ["fc3"])
res = fn(x)[0]
nt.assert_equal(res.shape, (3, 8))
def test_dense_node_and_dense_combine_node1():
# testing that dense node and dense combine node with identity child
# return the same thing
network1 = tn.HyperparameterNode(
"hp",
tn.SequentialNode(
"seq",
[tn.InputNode("in", shape=(3, 4, 5)),
tn.DenseNode("fc1", num_units=6),
tn.DenseNode("fc2", num_units=7),
tn.DenseNode("fc3", num_units=8)]
),
inits=[treeano.inits.ConstantInit(1)]
).network()
network2 = tn.HyperparameterNode(
"hp",
tn.SequentialNode(
"seq",
[tn.InputNode("in", shape=(3, 4, 5)),
tn.DenseCombineNode("fc1", [tn.IdentityNode("i1")], num_units=6),
tn.DenseCombineNode("fc2", [tn.IdentityNode("i2")], num_units=7),
tn.DenseCombineNode("fc3", [tn.IdentityNode("i3")], num_units=8)]
),
inits=[treeano.inits.ConstantInit(1)]
).network()
x = np.random.randn(3, 4, 5).astype(fX)
fn1 = network1.function(["in"], ["fc3"])
fn2 = network2.function(["in"], ["fc3"])
np.testing.assert_allclose(fn1(x), fn2(x))
def test_dense_node_and_dense_combine_node2():
# testing that summing the output of 2 dense nodes is the same as
# applying a dense combine node with 2 identities (+ bias)
# and the same as multiplying the output of 1 dense node by 2
network0 = tn.HyperparameterNode(
"hp",
tn.SequentialNode(
"seq",
[tn.InputNode("in", shape=(3, 4, 5)),
tn.DenseNode("dense1", num_units=6),
tn.MultiplyConstantNode("mul", value=2)]
),
inits=[treeano.inits.ConstantInit(1)]
).network()
network1 = tn.HyperparameterNode(
"hp",
tn.SequentialNode(
"seq",
[tn.InputNode("in", shape=(3, 4, 5)),
tn.ElementwiseSumNode(
"sum",
[tn.DenseNode("dense1", num_units=6),
tn.DenseNode("dense2", num_units=6)])]
),
inits=[treeano.inits.ConstantInit(1)]
).network()
network2 = tn.HyperparameterNode(
"hp",
tn.SequentialNode(
"seq",
[tn.InputNode("in", shape=(3, 4, 5)),
tn.DenseCombineNode(
"fc",
[tn.IdentityNode("i1"),
tn.IdentityNode("i2")],
num_units=6),
tn.AddBiasNode("bias")]
),
inits=[treeano.inits.ConstantInit(1)]
).network()
x = np.random.randn(3, 4, 5).astype(fX)
fn0 = network0.function(["in"], ["hp"])
fn1 = network1.function(["in"], ["hp"])
fn2 = network2.function(["in"], ["hp"])
np.testing.assert_allclose(fn0(x), fn1(x))
np.testing.assert_allclose(fn0(x), fn2(x))
def test_dense_combine_node_uses_children():
network1 = tn.HyperparameterNode(
"hp",
tn.SequentialNode(
"seq",
[tn.InputNode("in", shape=(3, 4, 5)),
tn.MultiplyConstantNode("mul", value=2),
tn.DenseCombineNode(
"fc",
[tn.IdentityNode("i1"),
tn.IdentityNode("i2")],
num_units=6)]
),
inits=[treeano.inits.ConstantInit(1)]
).network()
network2 = tn.HyperparameterNode(
"hp",
tn.SequentialNode(
"seq",
[tn.InputNode("in", shape=(3, 4, 5)),
tn.DenseCombineNode(
"fc",
[tn.MultiplyConstantNode("mul1", value=2),
tn.MultiplyConstantNode("mul2", value=2)],
num_units=6)]
),
inits=[treeano.inits.ConstantInit(1)]
).network()
x = np.random.randn(3, 4, 5).astype(fX)
fn1 = network1.function(["in"], ["hp"])
fn2 = network2.function(["in"], ["hp"])
np.testing.assert_allclose(fn1(x), fn2(x))
| [
"sabousamra@cs.stonybrook.edu"
] | sabousamra@cs.stonybrook.edu |
55ff4fe9356b729e0c86ab973ecb0755cbe6c0bf | 6418b2bca8c9d95a69109e2fe4b0a827f9a87ddd | /cloudmesh/pi/board/led.py | b6abe06d0b12d0aae28eb57e5922230ca9ef7ccd | [
"Apache-2.0",
"Python-2.0"
] | permissive | nitesh-jaswal/cloudmesh-pi-cluster | 49d31baecd08eef3e8fc355fbf72fbcd655ebd35 | 804a7f0f93fb06161bccb4c9ff0fcecc93854747 | refs/heads/master | 2022-11-26T12:52:20.313904 | 2020-08-04T21:52:11 | 2020-08-04T21:52:11 | 273,601,432 | 0 | 0 | NOASSERTION | 2020-08-04T21:52:12 | 2020-06-19T23:02:02 | Python | UTF-8 | Python | false | false | 7,945 | py | import os
import time
from pprint import pprint
from cloudmesh.common.Host import Host
from cloudmesh.common.Tabulate import Printer
from cloudmesh.common.parameter import Parameter
"""
From: https://www.raspberrypi.org/forums/viewtopic.php?t=12530
If you want the LED to go back to its default function:
echo mmc0 >/sys/class/leds/led0/trigger
As an aside, there are a couple of kernel modules you can load up (ledtrig_timer
and ledtrig_heartbeat) which will flash the LED for you.
modprobe ledtrig_heartbeat
echo heartbeat >/sys/class/leds/led0/trigger
Once you have turned off the mmc0 trigger, you can use GPIO16 to control the
LED. It's active-low, so you need to set the pin low to turn the LED on, and
high to turn it off.
"""
class LED:
"""
Implements:
pi led (red|green) VALUE
pi led (red|green) VALUE NAMES [--user=USER]
pi led list NAMES [--user=USER]
pi led blink (red|green) NAMES [--user=USER] [--rate=SECONDS]
"""
# TODO: implement: cms pi led reset # to original setting
# TODO: implement: cms pi led list --trogger list, list the triggers
def __init__(self):
pass
def Print(self, arguments, results):
if arguments.output == 'table':
print(Printer.write(results,
order=['host', 'success', 'stdout']))
else:
pprint(results)
def Print_leds(self, arguments, results):
if arguments.output == 'table':
print(Printer.write(results,
order=['host', 'green', 'red']))
else:
pprint(results)
def execute(self, arguments):
if arguments.red:
arguments.number = 1
elif arguments.green:
arguments.number = 0
if arguments.sequence:
results = LED.sequence_remote(
led=arguments.number,
hosts=arguments.NAMES,
username=arguments.user,
rate=arguments.RATE,
processors=3)
self.Print_leds(arguments, results)
elif arguments.blink:
results = LED.blink_remote(
led=arguments.number,
hosts=arguments.NAMES,
username=arguments.user,
rate=arguments.RATE,
processors=3)
self.Print_leds(arguments, results)
elif arguments.list:
results = LED.list_remote(
hosts=arguments.NAMES,
username=arguments.user,
processors=3)
self.Print_leds(arguments, results)
elif arguments.reset and not arguments.NAMES and arguments.led:
LED.reset()
elif arguments.reset and arguments.NAMES and arguments.led:
results = LED.reset_remote(
hosts=arguments.NAMES,
username=arguments.user,
processors=3)
self.Print(arguments, results)
# elif not arguments.NAMES and arguments.led:
# LED.set(led=arguments.number, value=arguments.VALUE)
elif arguments.NAMES and arguments.led:
results = LED.set_remote(
led=arguments.number,
value=arguments.VALUE,
hosts=arguments.NAMES,
username=arguments.user,
processors=3)
self.Print(arguments, results)
@staticmethod
def get_state(value):
state = value
if type(value) == str:
if value.lower() in ["1", "on", "true", "+"]:
state = 1
elif value.lower() in ["0", "off", "false", "-"]:
state = 0
else:
state = int(value)
return state
@staticmethod
def reset():
command = f"echo mmc0 >/sys/class/leds/led0/trigger"
os.system(command)
@staticmethod
def reset_remote(
hosts=None,
username=None,
processors=3):
command = f"echo mmc0 >/sys/class/leds/led0/trigger"
result = Host.ssh(hosts=hosts,
command=command,
username=username,
key="~/.ssh/id_rsa.pub",
processors=processors,
executor=os.system)
@staticmethod
def set(led=None, value=1):
if led not in [1, 0]:
raise ValueError("Led number is wrong")
state = LED.get_state(value)
if led == 0:
# switch it first off, technically we should disable the trigger
# first
# then we do not have to switch it off
command = f"echo 0 | " \
"sudo tee /sys/class/leds/led{led}/brightness " \
">> /dev/null"
os.system(command)
command = f"echo {state} | " \
"sudo tee /sys/class/leds/led{led}/brightness >> /dev/null"
os.system(command)
@staticmethod
def set_remote(
led=None,
value=1,
hosts=None,
username=None,
processors=3):
if led not in [1, 0]:
raise ValueError("Led number is wrong")
state = LED.get_state(value)
command = f"echo {state} |" \
f" sudo tee /sys/class/leds/led{led}/brightness" \
f" >> /dev/null"
print ("command", command)
result = Host.ssh(hosts=hosts,
command=command,
username=username,
key="~/.ssh/id_rsa.pub",
processors=processors,
executor=os.system)
return result
@staticmethod
def blink_remote(
led=None,
hosts=None,
username=None,
rate=None,
processors=3):
if led not in [1, 0]:
raise ValueError("Led number is wrong")
rate = float(rate or 0.5)
for i in range(0, 3):
state = 0
LED.set_remote(
led=led,
value="0",
hosts=hosts,
username=username,
processors=processors)
time.sleep(rate)
LED.set_remote(
led=led,
value="1",
hosts=hosts,
username=username,
processors=processors)
time.sleep(rate)
return None
@staticmethod
def sequence_remote(
led=None,
hosts=None,
username=None,
rate=None,
processors=3):
if led not in [1, 0]:
raise ValueError("Led number is wrong")
rate = float(rate or 0.5)
hosts = Parameter.expand(hosts)
for host in hosts:
LED.set_remote(
led=led,
value="0",
hosts=host,
username=username,
processors=processors)
time.sleep(rate)
LED.set_remote(
led=led,
value="1",
hosts=host,
username=username,
processors=processors)
time.sleep(rate)
return None
@staticmethod
def list_remote(
hosts=None,
username=None,
processors=3):
command = f"cat" \
" /sys/class/leds/led0/brightness" \
" /sys/class/leds/led1/brightness"
results = Host.ssh(hosts=hosts,
command=command,
username=username,
key="~/.ssh/id_rsa.pub",
processors=processors,
executor=os.system)
for result in results:
result["green"], result["red"] = result["stdout"].split("\n", 1)
return results
| [
"laszewski@gmail.com"
] | laszewski@gmail.com |
178ebe48b20de522d522befa1f047f91edf82428 | 4289fcc440e0cf3d2ecaca03bd2cb1a40933f8fc | /dtformats/recycler.py | 7c2f3c368d1e9f32eebd78e298c581e9be311590 | [
"Apache-2.0"
] | permissive | ydkhatri/dtformats | 4251563ad8a42dbfb8f293890ed844e29b2c856a | 692d53616f7c89e5ff4d6950778c46d3b94a0130 | refs/heads/master | 2020-04-20T23:41:11.816017 | 2019-02-03T12:40:55 | 2019-02-03T14:32:54 | 169,174,860 | 2 | 0 | Apache-2.0 | 2019-02-05T01:16:42 | 2019-02-05T01:16:41 | null | UTF-8 | Python | false | false | 4,752 | py | # -*- coding: utf-8 -*-
"""Windows Recycler INFO2 files."""
from __future__ import unicode_literals
from dtformats import data_format
from dtformats import errors
class RecyclerInfo2File(data_format.BinaryDataFile):
"""Windows Recycler INFO2 file."""
_DEFINITION_FILE = 'recycler.yaml'
_DEBUG_INFO_FILE_ENTRY = [
('original_filename', 'Original filename (ANSI)', '_FormatANSIString'),
('index', 'Index', '_FormatIntegerAsDecimal'),
('drive_number', 'Drive number', '_FormatIntegerAsDecimal'),
('deletion_time', 'Deletion time', '_FormatIntegerAsFiletime'),
('original_file_size', 'Original file size', '_FormatIntegerAsDecimal')]
_DEBUG_INFO_FILE_HEADER = [
('unknown1', 'Unknown1', '_FormatIntegerAsHexadecimal8'),
('number_of_file_entries', 'Number of file entries',
'_FormatIntegerAsDecimal'),
('unknown2', 'Unknown2', '_FormatIntegerAsHexadecimal8'),
('file_entry_size', 'File entry size', '_FormatIntegerAsDecimal'),
('unknown3', 'Unknown3', '_FormatIntegerAsHexadecimal8')]
def __init__(self, debug=False, output_writer=None):
"""Initializes a Windows Recycler INFO2 file.
Args:
debug (Optional[bool]): True if debug information should be written.
output_writer (Optional[OutputWriter]): output writer.
"""
super(RecyclerInfo2File, self).__init__(
debug=debug, output_writer=output_writer)
self._codepage = 'cp1252'
self._file_entry_data_size = 0
def _FormatANSIString(self, string):
"""Formats an ANSI string.
Args:
string (str): string.
Returns:
str: formatted ANSI string.
Raises:
ParseError: if the string could not be decoded.
"""
# The string can contain remnant data after the end-of-string character.
string = string.split(b'\x00')[0]
try:
return string.decode(self._codepage)
except UnicodeDecodeError as exception:
raise errors.ParseError(
'Unable to decode ANSI string with error: {0!s}.'.format(exception))
def _ReadFileEntry(self, file_object):
"""Reads the file entry.
Args:
file_object (file): file-like object.
Raises:
ParseError: if the file entry cannot be read.
"""
file_offset = file_object.tell()
file_entry_data = self._ReadData(
file_object, file_offset, self._file_entry_data_size, 'file entry')
data_type_map = self._GetDataTypeMap('recycler_info2_file_entry')
try:
file_entry = self._ReadStructureFromByteStream(
file_entry_data, file_offset, data_type_map, 'file entry')
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError((
'Unable to map file entry data at offset: 0x{0:08x} with error: '
'{1!s}').format(file_offset, exception))
if self._debug:
self._DebugPrintStructureObject(file_entry, self._DEBUG_INFO_FILE_ENTRY)
if self._file_entry_data_size > 280:
file_offset += 280
data_type_map = self._GetDataTypeMap(
'recycler_info2_file_entry_utf16le_string')
try:
original_filename = self._ReadStructureFromByteStream(
file_entry_data[280:], file_offset, data_type_map, 'file entry')
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError((
'Unable to map file entry data at offset: 0x{0:08x} with error: '
'{1!s}').format(file_offset, exception))
if self._debug:
self._DebugPrintValue('Original filename (Unicode)', original_filename)
if self._debug:
self._DebugPrintText('\n')
def _ReadFileHeader(self, file_object):
"""Reads the file header.
Args:
file_object (file): file-like object.
Raises:
ParseError: if the file header cannot be read.
"""
data_type_map = self._GetDataTypeMap('recycler_info2_file_header')
file_header, _ = self._ReadStructureFromFileObject(
file_object, 0, data_type_map, 'file header')
if self._debug:
self._DebugPrintStructureObject(file_header, self._DEBUG_INFO_FILE_HEADER)
if file_header.file_entry_size not in (280, 800):
raise errors.ParseError('Unsupported file entry size: {0:d}'.format(
file_header.file_entry_size))
self._file_entry_data_size = file_header.file_entry_size
def ReadFileObject(self, file_object):
"""Reads a Windows Recycler INFO2 file-like object.
Args:
file_object (file): file-like object.
Raises:
ParseError: if the file cannot be read.
"""
self._ReadFileHeader(file_object)
file_offset = file_object.tell()
while file_offset < self._file_size:
self._ReadFileEntry(file_object)
file_offset += self._file_entry_data_size
| [
"joachim.metz@gmail.com"
] | joachim.metz@gmail.com |
8d35fadf93c1e4e0d7f0a894831ab7b3ad385070 | 2194b6c17f3153c5976d6ac4a9ab78211027adab | /otoroshi_admin_api_client/models/otoroshimodels_infra_provider_match_type.py | 94a545a8ff9ec8adb0fa47d5f3f07861625e89b3 | [] | no_license | krezreb/otoroshi-admin-api-client | 7fab5e873c9c5950d77fffce6bcf80d3fdf4c319 | 9b3156c11eac227024cfe4a26c0129618deb2c4d | refs/heads/master | 2023-05-08T08:32:00.982987 | 2021-05-27T09:55:00 | 2021-05-27T09:55:00 | 371,324,636 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 265 | py | from enum import Enum
class OtoroshimodelsInfraProviderMatchType(str, Enum):
ALWAYSMATCH = "AlwaysMatch"
NETWORKLOCATIONMATCH = "NetworkLocationMatch"
GEOLOCATIONMATCH = "GeolocationMatch"
def __str__(self) -> str:
return str(self.value)
| [
"josephbeeson@gmail.com"
] | josephbeeson@gmail.com |
1a3371799d39e8df80c7bf8ff391b29ea564c666 | 482ca5a05c3e34eb0c5b9eb239b86288fa3ea5be | /lilac2/const.py | 5d623e05a3db8e66eda7abb6976aecbbc8e377b5 | [] | no_license | renyuneyun/lilac | de8462deb6275f8ea8e540ad71de10313d976250 | c224a65ac810a8aaba05c410c5b07683a5055868 | refs/heads/master | 2020-03-14T04:23:42.518149 | 2019-10-28T16:20:10 | 2019-10-28T16:21:17 | 131,441,132 | 1 | 0 | null | 2018-04-28T20:15:02 | 2018-04-28T20:15:02 | null | UTF-8 | Python | false | false | 335 | py | from pathlib import Path
import types
mydir = Path('~/.lilac').expanduser()
AUR_REPO_DIR = mydir / 'aur'
AUR_REPO_DIR.mkdir(parents=True, exist_ok=True)
SPECIAL_FILES = ('package.list', 'lilac.py', 'lilac.yaml', '.gitignore')
_G = types.SimpleNamespace()
# repo: Repo
# mod: LilacMod
# pkgver: Optional[str]
# pkgrel: Optional[str]
| [
"lilydjwg@gmail.com"
] | lilydjwg@gmail.com |
6ed4c3efbfd2ae0af22e5a4be77058890cced371 | 83de24182a7af33c43ee340b57755e73275149ae | /aliyun-python-sdk-facebody/aliyunsdkfacebody/request/v20191230/MonitorExaminationRequest.py | 91ad61a32de73924cfe85364b9c031859e147480 | [
"Apache-2.0"
] | permissive | aliyun/aliyun-openapi-python-sdk | 4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f | 83fd547946fd6772cf26f338d9653f4316c81d3c | refs/heads/master | 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 | NOASSERTION | 2023-09-14T08:51:06 | 2015-07-23T09:39:45 | Python | UTF-8 | Python | false | false | 1,621 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkfacebody.endpoint import endpoint_data
class MonitorExaminationRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'facebody', '2019-12-30', 'MonitorExamination','facebody')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Type(self): # Long
return self.get_body_params().get('Type')
def set_Type(self, Type): # Long
self.add_body_params('Type', Type)
def get_ImageURL(self): # String
return self.get_body_params().get('ImageURL')
def set_ImageURL(self, ImageURL): # String
self.add_body_params('ImageURL', ImageURL)
| [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
c3ee35fe9436a493ef738377774073c953dba7cc | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /n4JA3je7FEFfZKaWp_11.py | 7b07527a21b2f44574d4b5255563e5c16f1eaaeb | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,465 | py | """
You landed your dream job. They pay in geometric progression (see resources).
In your first month of work, you will get your first paycheck `first_month`.
For every month after, your paycheck will be `multiplier` times bigger than
the previous paycheck.
Create a function that takes the `first_month`'s paycheck and the `multiplier`
and returns the number of months it took for you to save up more than one
million. The problem assumes you save 100% of every paycheck.
### Examples
million_in_month(10, 2) ➞ 17
million_in_month(100, 1.01) ➞ 464
million_in_month(50, 100) ➞ 4
# Month 1 = 50 (paycheck 50)
# Month 2 = 5050 (paycheck 5,000 + 50 already saved)
# Month 3 = 505050 (paycheck 500,000 + 5,050 already saved)
# Month 4 = 50505050 (paycheck 50,000,000 + 505,050 already saved)
### Notes
* Don't forget to return the result in the number of months.
* Return when your savings are greater than 1,000,000.
"""
def million_in_month(first_month, multiplier):
goal = 1000000
amount_of_money = 0
amount_of_months = 0
amount_of_money += first_month
amount_of_months += 2
new_new_first_month = first_month
while amount_of_money <= goal:
new_new_first_month = new_new_first_month * multiplier
amount_of_money += new_new_first_month * multiplier
amount_of_months += 1
return amount_of_months
print(million_in_month(50, 100))
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
33f6368a00941a84a7f75fc5b8debc37bf78b59e | 3d626f6034eddda6122feb81a5c98c8a3dab9d20 | /08-def-type-hints/messages/no_hints/messages.py | df037e7fa4ee7666a450ae417e60e5916bf32bae | [
"MIT"
] | permissive | eliaskousk/example-code-2e | b8d4f9ce86a55c2e7b905d2d1f94a5c867485fa2 | 28d6d033156831a77b700064997c05a40a83805f | refs/heads/master | 2022-07-13T19:12:57.425453 | 2022-04-24T20:41:30 | 2022-04-24T20:41:30 | 216,843,265 | 2 | 0 | MIT | 2019-10-22T15:09:18 | 2019-10-22T15:09:17 | null | UTF-8 | Python | false | false | 371 | py | """
# tag::SHOW_COUNT_DOCTEST[]
>>> show_count(99, 'bird')
'99 birds'
>>> show_count(1, 'bird')
'1 bird'
>>> show_count(0, 'bird')
'no birds'
# end::SHOW_COUNT_DOCTEST[]
"""
# tag::SHOW_COUNT[]
def show_count(count, word):
if count == 1:
return f'1 {word}'
count_str = str(count) if count else 'no'
return f'{count_str} {word}s'
# end::SHOW_COUNT[]
| [
"luciano@ramalho.org"
] | luciano@ramalho.org |
526677bfe45d954fc32b6679c20729126f9eda0a | 14ddda0c376f984d2a3f7dcd0ca7aebb7c49648d | /bnn_mcmc_examples/examples/mlp/penguins/hmc/pilot_run.py | 97dd6cec9d34ff10a9f74c188fb2a7a85f034672 | [
"MIT"
] | permissive | papamarkou/bnn_mcmc_examples | 62dcd9cc0cf57cda39aa46c2f2f237bbcd2d35bb | 7bb4ecfb33db4c30a8e61e31f528bda0efb24e3d | refs/heads/main | 2023-07-12T20:51:28.302981 | 2021-08-22T13:06:17 | 2021-08-22T13:06:17 | 316,554,634 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 755 | py | # %% Import packages
from datetime import timedelta
from timeit import default_timer as timer
from bnn_mcmc_examples.examples.mlp.penguins.constants import num_burnin_epochs, num_epochs, verbose, verbose_step
from bnn_mcmc_examples.examples.mlp.penguins.hmc.constants import sampler_output_pilot_path
from bnn_mcmc_examples.examples.mlp.penguins.hmc.sampler import sampler
# %% Run HMC sampler
start_time = timer()
sampler.run(num_epochs=num_epochs, num_burnin_epochs=num_burnin_epochs, verbose=verbose, verbose_step=verbose_step)
end_time = timer()
print("Time taken: {}".format(timedelta(seconds=end_time-start_time)))
# %% Save chain array
sampler.get_chain().to_chainfile(keys=['sample', 'accepted'], path=sampler_output_pilot_path, mode='w')
| [
"theodore.papamarkou@gmail.com"
] | theodore.papamarkou@gmail.com |
adabad459eeb610321cf8aca55eaa46f883670a0 | 209aae9f40657d48461bed5e081c4f235f86090a | /2019/day23-1.py | 84c2010cfdb302f687417c5081575342cd025e26 | [] | no_license | scheidguy/Advent_of_Code | 6e791132157179928e1415f49467ad221ef1e258 | fbc09e4d26502b9a77e0c8d2840b11ec85a3c478 | refs/heads/main | 2023-03-05T12:34:15.343642 | 2021-02-20T00:27:58 | 2021-02-20T00:27:58 | 329,106,711 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,618 | py |
from copy import deepcopy
# import numpy as np
f = open('day23-1_input.txt')
# f = open('day23-1_debug.txt')
text = f.readlines()
f.close()
program = [int(i) for i in text[0].strip().split(',')]
program.extend([0 for _ in range(10**4)])
NIC = [deepcopy(program)for computer in range(50)]
inputs = [[network_address] for network_address in range(50)]
foundit = False
poss = [0 for network_address in range(50)]
rels = [0 for network_address in range(50)]
numpackets = -1
while True:
numpackets += 1
for comp in range(50):
program = NIC[comp]
outputs = []
pos = poss[comp]
rel = rels[comp]
while True:
command = str(program[pos])
command = '0'*(5-len(command)) + command
opcode = command[-2:]
modes = command[0:-2]
if opcode == '99':
# print(out)
break
if modes[2] == '0': first = program[program[pos+1]]
if modes[2] == '1': first = program[pos+1]
if modes[2] == '2': first = program[rel + program[pos+1]]
if opcode in ['01', '02', '05', '06', '07', '08']:
if modes[1] == '0': second = program[program[pos+2]]
if modes[1] == '1': second = program[pos+2]
if modes[1] == '2': second = program[rel + program[pos+2]]
if opcode in ['01', '02', '07', '08']:
if modes[0] == '0': third = program[pos+3]
if modes[0] == '1': third = pos+3
if modes[0] == '2': third = rel + program[pos+3]
if opcode == '01':
program[third] = first + second
pos += 4
elif opcode == '02':
program[third] = first * second
pos += 4
elif opcode == '03':
if len(inputs[comp]) == 0: inp = -1
else: inp = inputs[comp].pop(0)
if modes[2] == '0': program[program[pos+1]] = inp
if modes[2] == '1': program[pos+1] = inp
if modes[2] == '2': program[rel + program[pos+1]] = inp
pos += 2
if len(inputs[comp]) % 2 == 0:
poss[comp] = pos
rels[comp] = rel
break
elif opcode == '04':
out = first
outputs.append(out)
pos += 2
if len(outputs) == 3:
if outputs[0] == 255: print(outputs[2]);foundit=True;break
if outputs[0] < 0 or outputs[0] > 49: print(f'address: {outputs[0]}')
inputs[outputs[0]].append(outputs[1])
inputs[outputs[0]].append(outputs[2])
outputs = []
poss[comp] = pos
rels[comp] = rel
break
elif opcode == '05':
if first == 0: pos += 3
elif first != 0: pos = second
elif opcode == '06':
if first != 0: pos += 3
elif first == 0: pos = second
elif opcode == '07':
if first < second: program[third] = 1
else: program[third] = 0
pos += 4
elif opcode == '08':
if first == second: program[third] = 1
else: program[third] = 0
pos += 4
elif opcode == '09':
rel += first
pos += 2
else: print('ERROR');break
if foundit: break
if foundit: break
| [
"scheidguy@gmail.com"
] | scheidguy@gmail.com |
2f7bc0bae1d87b15295111cba5e7d06c0d38d474 | 80b7f2a10506f70477d8720e229d7530da2eff5d | /uhd_restpy/testplatform/sessions/ixnetwork/quicktest/learnframes_eb6bce41bb67df974840a112bf5ea44c.py | 3999e3bab765dea389e502027ec0ca16e8789c74 | [
"MIT"
] | permissive | OpenIxia/ixnetwork_restpy | 00fdc305901aa7e4b26e4000b133655e2d0e346a | c8ecc779421bffbc27c906c1ea51af3756d83398 | refs/heads/master | 2023-08-10T02:21:38.207252 | 2023-07-19T14:14:57 | 2023-07-19T14:14:57 | 174,170,555 | 26 | 16 | MIT | 2023-02-02T07:02:43 | 2019-03-06T15:27:20 | Python | UTF-8 | Python | false | false | 24,837 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from uhd_restpy.base import Base
from uhd_restpy.files import Files
if sys.version_info >= (3, 5):
from typing import List, Any, Union
class LearnFrames(Base):
"""The learning frames that IxNetwork sends during the test.
The LearnFrames class encapsulates a required learnFrames resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'learnFrames'
_SDM_ATT_MAP = {
'LearnFrameSize': 'learnFrameSize',
'LearnFrequency': 'learnFrequency',
'LearnNumFrames': 'learnNumFrames',
'LearnRate': 'learnRate',
'LearnWaitTime': 'learnWaitTime',
'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit',
'LearningCountRandomFrameSize': 'learningCountRandomFrameSize',
'LearningFrameSizeMode': 'learningFrameSizeMode',
'LearningFramesizeFixedValue': 'learningFramesizeFixedValue',
'LearningFramesizeList': 'learningFramesizeList',
'LearningMaxIncrementFrameSize': 'learningMaxIncrementFrameSize',
'LearningMaxRandomFrameSize': 'learningMaxRandomFrameSize',
'LearningMinIncrementFrameSize': 'learningMinIncrementFrameSize',
'LearningMinRandomFrameSize': 'learningMinRandomFrameSize',
'LearningStepIncrementFrameSize': 'learningStepIncrementFrameSize',
}
_SDM_ENUM_MAP = {
'learnFrequency': ['onBinaryIteration'],
'learningFrameSizeMode': ['custom', 'fixed', 'increment', 'random'],
}
def __init__(self, parent, list_op=False):
super(LearnFrames, self).__init__(parent, list_op)
@property
def LearnFrameSize(self):
# type: () -> int
"""
Returns
-------
- number: Specifies the size of the learning frames.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize'])
@LearnFrameSize.setter
def LearnFrameSize(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value)
@property
def LearnFrequency(self):
# type: () -> str
"""
Returns
-------
- str(onBinaryIteration): Allows to choose how frequently IxNetwork sends learning frames during the test.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency'])
@LearnFrequency.setter
def LearnFrequency(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value)
@property
def LearnNumFrames(self):
# type: () -> int
"""
Returns
-------
- number: Specifies the number of learning frames that IxNetwork sends for each address.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames'])
@LearnNumFrames.setter
def LearnNumFrames(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value)
@property
def LearnRate(self):
# type: () -> int
"""
Returns
-------
- number: Specifies the rate at which IxNetwork sends learn frames to the DUT.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnRate'])
@LearnRate.setter
def LearnRate(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value)
@property
def LearnWaitTime(self):
# type: () -> int
"""
Returns
-------
- number: Specifies the length of time in ms that IxNetwork pauses before sending all the learning frames from all the ports.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime'])
@LearnWaitTime.setter
def LearnWaitTime(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value)
@property
def LearnWaitTimeBeforeTransmit(self):
# type: () -> int
"""
Returns
-------
- number: The time in ms that IxNetwork waits before sending all the learning frames from all the ports.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'])
@LearnWaitTimeBeforeTransmit.setter
def LearnWaitTimeBeforeTransmit(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value)
@property
def LearningCountRandomFrameSize(self):
# type: () -> str
"""
Returns
-------
- str: NOT DEFINED
"""
return self._get_attribute(self._SDM_ATT_MAP['LearningCountRandomFrameSize'])
@LearningCountRandomFrameSize.setter
def LearningCountRandomFrameSize(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['LearningCountRandomFrameSize'], value)
@property
def LearningFrameSizeMode(self):
# type: () -> str
"""
Returns
-------
- str(custom | fixed | increment | random): NOT DEFINED
"""
return self._get_attribute(self._SDM_ATT_MAP['LearningFrameSizeMode'])
@LearningFrameSizeMode.setter
def LearningFrameSizeMode(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['LearningFrameSizeMode'], value)
@property
def LearningFramesizeFixedValue(self):
# type: () -> int
"""
Returns
-------
- number: NOT DEFINED
"""
return self._get_attribute(self._SDM_ATT_MAP['LearningFramesizeFixedValue'])
@LearningFramesizeFixedValue.setter
def LearningFramesizeFixedValue(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['LearningFramesizeFixedValue'], value)
@property
def LearningFramesizeList(self):
# type: () -> str
"""
Returns
-------
- str: NOT DEFINED
"""
return self._get_attribute(self._SDM_ATT_MAP['LearningFramesizeList'])
@LearningFramesizeList.setter
def LearningFramesizeList(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['LearningFramesizeList'], value)
@property
def LearningMaxIncrementFrameSize(self):
# type: () -> str
"""
Returns
-------
- str: NOT DEFINED
"""
return self._get_attribute(self._SDM_ATT_MAP['LearningMaxIncrementFrameSize'])
@LearningMaxIncrementFrameSize.setter
def LearningMaxIncrementFrameSize(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['LearningMaxIncrementFrameSize'], value)
@property
def LearningMaxRandomFrameSize(self):
# type: () -> str
"""
Returns
-------
- str: NOT DEFINED
"""
return self._get_attribute(self._SDM_ATT_MAP['LearningMaxRandomFrameSize'])
@LearningMaxRandomFrameSize.setter
def LearningMaxRandomFrameSize(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['LearningMaxRandomFrameSize'], value)
@property
def LearningMinIncrementFrameSize(self):
# type: () -> str
"""
Returns
-------
- str: NOT DEFINED
"""
return self._get_attribute(self._SDM_ATT_MAP['LearningMinIncrementFrameSize'])
@LearningMinIncrementFrameSize.setter
def LearningMinIncrementFrameSize(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['LearningMinIncrementFrameSize'], value)
@property
def LearningMinRandomFrameSize(self):
# type: () -> str
"""
Returns
-------
- str: NOT DEFINED
"""
return self._get_attribute(self._SDM_ATT_MAP['LearningMinRandomFrameSize'])
@LearningMinRandomFrameSize.setter
def LearningMinRandomFrameSize(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['LearningMinRandomFrameSize'], value)
@property
def LearningStepIncrementFrameSize(self):
# type: () -> str
"""
Returns
-------
- str: NOT DEFINED
"""
return self._get_attribute(self._SDM_ATT_MAP['LearningStepIncrementFrameSize'])
@LearningStepIncrementFrameSize.setter
def LearningStepIncrementFrameSize(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['LearningStepIncrementFrameSize'], value)
def update(self, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None, LearningCountRandomFrameSize=None, LearningFrameSizeMode=None, LearningFramesizeFixedValue=None, LearningFramesizeList=None, LearningMaxIncrementFrameSize=None, LearningMaxRandomFrameSize=None, LearningMinIncrementFrameSize=None, LearningMinRandomFrameSize=None, LearningStepIncrementFrameSize=None):
# type: (int, str, int, int, int, int, str, str, int, str, str, str, str, str, str) -> LearnFrames
"""Updates learnFrames resource on the server.
Args
----
- LearnFrameSize (number): Specifies the size of the learning frames.
- LearnFrequency (str(onBinaryIteration)): Allows to choose how frequently IxNetwork sends learning frames during the test.
- LearnNumFrames (number): Specifies the number of learning frames that IxNetwork sends for each address.
- LearnRate (number): Specifies the rate at which IxNetwork sends learn frames to the DUT.
- LearnWaitTime (number): Specifies the length of time in ms that IxNetwork pauses before sending all the learning frames from all the ports.
- LearnWaitTimeBeforeTransmit (number): The time in ms that IxNetwork waits before sending all the learning frames from all the ports.
- LearningCountRandomFrameSize (str): NOT DEFINED
- LearningFrameSizeMode (str(custom | fixed | increment | random)): NOT DEFINED
- LearningFramesizeFixedValue (number): NOT DEFINED
- LearningFramesizeList (str): NOT DEFINED
- LearningMaxIncrementFrameSize (str): NOT DEFINED
- LearningMaxRandomFrameSize (str): NOT DEFINED
- LearningMinIncrementFrameSize (str): NOT DEFINED
- LearningMinRandomFrameSize (str): NOT DEFINED
- LearningStepIncrementFrameSize (str): NOT DEFINED
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(self, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None, LearningCountRandomFrameSize=None, LearningFrameSizeMode=None, LearningFramesizeFixedValue=None, LearningFramesizeList=None, LearningMaxIncrementFrameSize=None, LearningMaxRandomFrameSize=None, LearningMinIncrementFrameSize=None, LearningMinRandomFrameSize=None, LearningStepIncrementFrameSize=None):
# type: (int, str, int, int, int, int, str, str, int, str, str, str, str, str, str) -> LearnFrames
"""Finds and retrieves learnFrames resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve learnFrames resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all learnFrames resources from the server.
Args
----
- LearnFrameSize (number): Specifies the size of the learning frames.
- LearnFrequency (str(onBinaryIteration)): Allows to choose how frequently IxNetwork sends learning frames during the test.
- LearnNumFrames (number): Specifies the number of learning frames that IxNetwork sends for each address.
- LearnRate (number): Specifies the rate at which IxNetwork sends learn frames to the DUT.
- LearnWaitTime (number): Specifies the length of time in ms that IxNetwork pauses before sending all the learning frames from all the ports.
- LearnWaitTimeBeforeTransmit (number): The time in ms that IxNetwork waits before sending all the learning frames from all the ports.
- LearningCountRandomFrameSize (str): NOT DEFINED
- LearningFrameSizeMode (str(custom | fixed | increment | random)): NOT DEFINED
- LearningFramesizeFixedValue (number): NOT DEFINED
- LearningFramesizeList (str): NOT DEFINED
- LearningMaxIncrementFrameSize (str): NOT DEFINED
- LearningMaxRandomFrameSize (str): NOT DEFINED
- LearningMinIncrementFrameSize (str): NOT DEFINED
- LearningMinRandomFrameSize (str): NOT DEFINED
- LearningStepIncrementFrameSize (str): NOT DEFINED
Returns
-------
- self: This instance with matching learnFrames resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of learnFrames data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the learnFrames resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def Apply(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the apply operation on the server.
Applies the specified Quick Test.
apply(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('apply', payload=payload, response_object=None)
def ApplyAsync(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the applyAsync operation on the server.
applyAsync(async_operation=bool)
--------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('applyAsync', payload=payload, response_object=None)
def ApplyAsyncResult(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[bool, None]
"""Executes the applyAsyncResult operation on the server.
applyAsyncResult(async_operation=bool)bool
------------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns bool:
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('applyAsyncResult', payload=payload, response_object=None)
def ApplyITWizardConfiguration(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the applyITWizardConfiguration operation on the server.
Applies the specified Quick Test.
applyITWizardConfiguration(async_operation=bool)
------------------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('applyITWizardConfiguration', payload=payload, response_object=None)
def GenerateReport(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the generateReport operation on the server.
Generate a PDF report for the last succesfull test run.
generateReport(async_operation=bool)string
------------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: This method is asynchronous and has no return value.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('generateReport', payload=payload, response_object=None)
def Run(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the run operation on the server.
Starts the specified Quick Test and waits for its execution to finish.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
run(async_operation=bool)list
-----------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): This method is synchronous and returns the result of the test.
run(InputParameters=string, async_operation=bool)list
-----------------------------------------------------
- InputParameters (str): The input arguments of the test.
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): This method is synchronous and returns the result of the test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('run', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the start operation on the server.
Starts the specified Quick Test.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
start(InputParameters=string, async_operation=bool)
---------------------------------------------------
- InputParameters (str): The input arguments of the test.
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the stop operation on the server.
Stops the currently running Quick Test.
stop(async_operation=bool)
--------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None)
def WaitForTest(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the waitForTest operation on the server.
Waits for the execution of the specified Quick Test to be completed.
waitForTest(async_operation=bool)list
-------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): This method is synchronous and returns the result of the test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('waitForTest', payload=payload, response_object=None)
| [
"andy.balogh@keysight.com"
] | andy.balogh@keysight.com |
aa1107a2f5107739a1951e5cb4948883dd283ca3 | 1fe0b680ce53bb3bb9078356ea2b25e572d9cfdc | /venv/lib/python2.7/site-packages/ansible/module_utils/network/checkpoint/checkpoint.py | 7545ebfe34554f4f7f9eeede21eb5bc3b8718fe6 | [
"MIT"
] | permissive | otus-devops-2019-02/devopscourses_infra | 1929c4a9eace3fdb0eb118bf216f3385fc0cdb1c | e42e5deafce395af869084ede245fc6cff6d0b2c | refs/heads/master | 2020-04-29T02:41:49.985889 | 2019-05-21T06:35:19 | 2019-05-21T06:35:19 | 175,780,457 | 0 | 1 | MIT | 2019-05-21T06:35:20 | 2019-03-15T08:35:54 | HCL | UTF-8 | Python | false | false | 2,516 | py | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2018 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
checkpoint_argument_spec = dict(auto_publish_session=dict(type='bool', default=True),
policy_package=dict(type='str', default='standard'),
auto_install_policy=dict(type='bool', default=True),
targets=dict(type='list')
)
def publish(connection, uid=None):
payload = None
if uid:
payload = {'uid': uid}
connection.send_request('/web_api/publish', payload)
def discard(connection, uid=None):
payload = None
if uid:
payload = {'uid': uid}
connection.send_request('/web_api/discard', payload)
def install_policy(connection, policy_package, targets):
payload = {'policy-package': policy_package,
'targets': targets}
connection.send_request('/web_api/install-policy', payload)
| [
"skydevapp@gmail.com"
] | skydevapp@gmail.com |
c21fe724fc485be315124ec386b0530e099af7b8 | 5c58587ebfbf56192b3dc6ed6f43bc002c8e2cff | /core/migrations/0032_market.py | cf05224f92656563bdf5d67622b60b565cf3d04c | [] | no_license | hossamelneily/nexchange | fb9a812cfc72ac00b90cf64d6669a8129c2d2d4b | 6d69274cd3808989abe2f5276feb772d1f0fa8b4 | refs/heads/release | 2022-12-13T09:20:47.297943 | 2019-02-12T08:20:34 | 2019-02-12T08:20:34 | 210,064,740 | 1 | 2 | null | 2022-12-09T00:54:01 | 2019-09-21T23:19:34 | Python | UTF-8 | Python | false | false | 963 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-09-27 13:08
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0031_merge_20170921_1946'),
]
operations = [
migrations.CreateModel(
name='Market',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_on', models.DateTimeField(auto_now_add=True)),
('modified_on', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=50, unique=True)),
('code', models.CharField(max_length=10, unique=True)),
('is_main_market', models.BooleanField(default=False, max_length=10)),
],
options={
'abstract': False,
},
),
]
| [
"noreply@github.com"
] | hossamelneily.noreply@github.com |
9344fa509479d8e7ecc91673c3b50f17ddea5d7d | df4fd380b3e1720a970573c4692eb0a32faf8f47 | /string_matching/rabin_karp.py | a335b90d14aa9c110503ed37eb35d99914e008e6 | [] | no_license | Taeheon-Lee/Algorithm | 99dd21e1e0ddba31190a16d6c9646a9f393f4c4b | 64ebacf24dfdf25194b5bce39f4ce43c4bc87141 | refs/heads/master | 2023-07-10T20:26:10.121214 | 2021-08-07T17:26:26 | 2021-08-07T17:26:26 | 383,803,983 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,793 | py | "Rabin-Karp 라빈-카프 알고리즘"
# 패턴의 해시값과 본문 안에 있는 하위 문자열의 해시값만을 비교하여 탐색
# 즉 문자열 패턴을 수치로 바꾸어 문자열의 비교를 수치 비교로 전환해 매칭하는 방법
# 해싱은 인덱스만 계산하면 바로 값을 참조할 수 있기 떄문에 연산 속도가 O(1)로 매우 빠름
# 해싱 값은 문자열이 달라도 같을 수 있기 때문에 해싱 값이 같을 경우, 단순 비교를 시작
# 따라서 최악의 경우, 해싱이 모두 같고 매칭이 다른 경우가 발생할 수 있어 시간 복잡도가 O(mn)
# 평균적인 시간 복잡도는 선형에 가까운 O(m+n)으로 매우 빠름
def rabin_karp(T, P):
n = len(T) # 본문의 길이
m = len(P) # 패턴의 길이
hash_p = hash(P) # 패턴의 해시 값
for i in range(n+1-m): # 처음부터 탐색 시작
if hash_p == hash(T[i:i+m]): # 해시값이 서로 같은 경우, 단순 비교 시작
cnt = 0 # 매칭을 위한 인덱스 초기화
while cnt != m: # 매칭 확인 시작
if P[cnt] != T[i+cnt]: # 매칭 실패 경우
break
cnt += 1
if cnt == m: # 매칭 성공 경우
return i # 처음 위치 리턴
else:
continue # 실패 시 다음으로 넘어가 탐색
return -1 # 탐색 실패
P = input("Input pattern: ") # 패턴 입력
T = input("Input String: ") # 본문(string) 입력
ans = rabin_karp(T, P)
print("Matching fail" if ans == -1 else ans) | [
"taeheon714@gmail.com"
] | taeheon714@gmail.com |
5fd492e636197ff8865032922e0ce3ac8d9b7f52 | 8d213a21ac532d6713f1239449ffc08497a77476 | /drf_api/wsgi.py | d73b8908123d55b0bf0f1c22f994774fe1a86fe5 | [] | no_license | anykate/drf_api | 3453c2a27a3d7ab8e1560848d84dd6f7b985d5ec | 7fe9c7c2950ae0a84f6a9f9d33a9f2dccd723560 | refs/heads/master | 2020-09-04T03:32:44.440439 | 2019-11-05T03:22:08 | 2019-11-05T03:22:08 | 219,648,542 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 407 | py | """
WSGI config for drf_api project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'drf_api.settings')
application = get_wsgi_application()
| [
"aryamane.aniket@gmail.com"
] | aryamane.aniket@gmail.com |
6fb44519ef839fdb96140c65a58683b9d72f9322 | 9b7291d81a416bde2ec181229601eb2e33c7b8b2 | /monophoton/configs/TagAndProbe2016/trigger.py | b9028479a55860739488b7b49c1131b654df69a0 | [] | no_license | MiT-HEP/MonoX | ab1528e72dad2590a0ae64f1a1d47195139e1749 | 224ee01107a94cedf8563c497edb2f326b99d9b1 | refs/heads/master | 2021-01-24T06:04:16.645559 | 2019-11-15T09:18:40 | 2019-11-15T09:18:40 | 41,823,403 | 1 | 9 | null | 2018-07-19T17:05:30 | 2015-09-02T19:33:33 | Python | UTF-8 | Python | false | false | 7,143 | py | import ROOT
ROOT.gSystem.Load('libPandaTreeObjects.so')
e = ROOT.panda.Event
ROOT.gROOT.ProcessLine('int val;')
def getEnum(cls, name):
ROOT.gROOT.ProcessLine('val = panda::' + cls + '::TriggerObject::' + name + ';')
return ROOT.val
measurements = {
('photon', 'sel'): ('sel-16b-m', 'tpegLowPt', 'probes.medium && !probes.pixelVeto && tp.mass > 60. && tp.mass < 120. && TMath::Abs(TVector2::Phi_mpi_pi(probes.phi_ - tags.phi_)) > 0.6', 'probes'),
('photon', 'selBCD'): (['sel-16b-m', 'sel-16c-m', 'sel-16d-m'], 'tpegLowPt', 'probes.medium && !probes.pixelVeto && tp.mass > 60. && tp.mass < 120. && TMath::Abs(TVector2::Phi_mpi_pi(probes.phi_ - tags.phi_)) > 0.6 && runNumber < 276525', 'probes'), # for photon75
('photon', 'dy'): (['dy-50@', 'dy-50-*'], 'tpegLowPt', 'probes.medium && !probes.pixelVeto && tp.mass > 60. && tp.mass < 120. && TMath::Abs(TVector2::Phi_mpi_pi(probes.phi_ - tags.phi_)) > 0.6', 'probes'),
('photon', 'elmu'): (['smu-16*-m'], 'elmu', 'photons.mediumX[][2]', 'photons'),
('photon', 'elmuBCD'): (['smu-16b-m', 'smu-16c-m', 'smu-16d-m'], 'elmu', 'photons.mediumX[][2]', 'photons'),
('photon', 'ph75'): (['sph-16b-m', 'sph-16c-m', 'sph-16d-m'], 'ph75', 'photons.medium && HLT_Photon50 && runNumber < 276525', 'photons'),
('photon', 'ph75h'): (['sph-16b-m', 'sph-16c-m', 'sph-16d-m'], 'ph75', 'photons.medium && HLT_Photon75 && runNumber < 276525', 'photons'),
('photon', 'mcph75'): (['gj04-*'], 'ph75', 'photons.medium && HLT_Photon50', 'photons'),
('electron', 'sel'): ('sel-16*-m', 'tp2e', 'probes.tight && tp.mass > 60. && tp.mass < 120.', 'probes'),
('muon', 'smu'): ('smu-16*', 'tp2m', 'probes.tight && tp.mass > 60. && tp.mass < 120.', 'probes'),
('vbf', 'selBCD'): (['sel-16b-m', 'sel-16c-m', 'sel-16d-m'], 'vbfe', 'electrons.triggerMatch[][%d] && dijet.size > 0 && runNumber < 276525' % getEnum('Electron', 'fEl75EBR9IsoPh'), ''),
('vbf', 'ph75h'): (['sph-16b-m', 'sph-16c-m', 'sph-16d-m'], 'ph75', 'photons.triggerMatch[][%d] && dijet.size > 0 && runNumber < 276525' % getEnum('Photon', 'fPh75EBR9Iso'), ''),
('vbf', 'dy'): (['dy-50@*', 'dy-50-*'], 'vbfe', 'electrons.triggerMatch[][%d] && dijet.size > 0' % getEnum('Electron', 'fEl75EBR9IsoPh'), ''),
('vbf', 'mcph75h'): (['gj04-*'], 'ph75', 'photons.triggerMatch[][%d] && dijet.size > 0' % getEnum('Photon', 'fPh75EBR9Iso'), ''),
('vbf', 'wlnu'): (['wlnu-*'], 'vbfe', 'electrons.triggerMatch[][%d] && dijet.size > 0' % getEnum('Electron', 'fEl75EBR9IsoPh'), '')
}
confs = {
'photon': {
'l1eg40': ('{col}.triggerMatch[][%d]' % getEnum('Photon', 'fSEG34IorSEG40'), '', 'L1 seed', {
'pt': ('p_{T}^{#gamma} (GeV)', '{col}.pt_', '', (50, 0., 100.)),
'ptwide': ('p_{T}^{#gamma} (GeV)', '{col}.pt_', '', [30. + 5. * x for x in range(14)] + [100. + 10. * x for x in range(10)] + [200. + 20. * x for x in range(5)] + [300., 350., 400.]),
'hOverE': ('H/E', '{col}.hOverE', '{col}.pt_ > 175.', (25, 0., 0.05)),
'hcalE': ('E^{HCAL} (GeV)', '{col}.pt_ * TMath::CosH({col}.eta_) * {col}.hOverE', '{col}.pt_ > 175.', (25, 0., 5))
}),
'l1all': ('{col}.triggerMatch[][%d] || {col}.triggerMatch[][%d] || {col}.triggerMatch[][%d]' % (getEnum('Photon', 'fSEG34IorSEG40'), getEnum('Photon', 'fSEG40IorSJet200'), getEnum('Photon', 'fSEG34IorSEG40IorSJet200')), '', 'L1 seed', {
'pt': ('p_{T}^{#gamma} (GeV)', '{col}.pt_', '', (50, 0., 100.)),
'ptwide': ('p_{T}^{#gamma} (GeV)', '{col}.pt_', '', [30. + 5. * x for x in range(14)] + [100. + 10. * x for x in range(10)] + [200. + 20. * x for x in range(5)] + [300., 350., 400.]),
'hOverE': ('H/E', '{col}.hOverE', '{col}.pt_ > 175.', (25, 0., 0.05)),
'hcalE': ('E^{HCAL} (GeV)', '{col}.pt_ * TMath::CosH({col}.eta_) * {col}.hOverE', '{col}.pt_ > 175.', (25, 0., 5))
}),
'sph165abs': ('{col}.triggerMatch[][%d]' % getEnum('Photon', 'fPh165HE10'), '', 'L1&HLT', {
'pt': ('p_{T}^{#gamma} (GeV)', '{col}.pt_', '', [30. + 5. * x for x in range(14)] + [100. + 10. * x for x in range(10)] + [200. + 20. * x for x in range(5)] + [300. + 50. * x for x in range(10)]),
'ptzoom': ('p_{T}^{#gamma} (GeV)', '{col}.pt_', '', [30. + 5. * x for x in range(34)] + [200. + 15. * x for x in range(11)]),
'hOverE': ('H/E', '{col}.hOverE', '{col}.pt_ > 175.', (25, 0., 0.05)),
'hcalE': ('E^{HCAL} (GeV)', '{col}.pt_ * TMath::CosH({col}.eta_) * {col}.hOverE', '{col}.pt_ > 175.', (25, 0., 5)),
'run': ('Run', 'runNumber', '{col}.pt_ > 175.', (26, 271050., 284050.))
}),
'ph75r9iso': ('{col}.triggerMatch[][%d]' % getEnum('Photon', 'fPh75EBR9Iso'), '{col}.isEB', 'Photon75Iso40R9', {
'pt': ('p_{T}^{#gamma} (GeV)', '{col}.pt_', '{col}.r9 > 0.9', (50, 0., 100.)),
'ptwide': ('p_{T}^{#gamma} (GeV)', '{col}.pt_', '{col}.r9 > 0.9', [30. + 10. * x for x in range(7)] + [100., 120., 140., 160., 200., 300., 400., 600.]),
'r9': ('R^{9}', '{col}.r9', '{col}.pt_ > 80.', (30, 0.7, 1.))
})
},
'electron': {
'el27': ('{col}.triggerMatch[][%d]' % getEnum('Electron', 'fEl27Tight'), '', 'HLT', {
'ptzoom': ('p_{T}^{e} (GeV)', '{col}.pt_', '', (50, 0., 50.)),
'ptwide': ('p_{T}^{e} (GeV)', '{col}.pt_', '', [30. + 2. * x for x in range(85)] + [200. + 10. * x for x in range(10)]),
'hOverE': ('H/E', '{col}.hOverE', '{col}.pt_ > 200.', (25, 0., 0.05)),
'hcalE': ('E^{HCAL} (GeV)', '{col}.pt_ * TMath::CosH({col}.eta_) * {col}.hOverE', '{col}.pt_ > 200.', (25, 0., 5)),
'run': ('Run', 'runNumber', '{col}.pt_ > 200.', (350, 271000., 274500.)),
'pt': ('p_{T}^{e} (GeV)', '{col}.pt_', '', [0. + 5. * x for x in range(10)] + [50. + 10. * x for x in range(6)]),
'eta': ('#eta^{e}', '{col}.eta_', '{col}.pt_ > 50.', (25, -2.5, 2.5))
})
},
'muon': {
'mu24ortrk24': ('{col}.triggerMatch[][%d] || {col}.triggerMatch[][%d]' % (getEnum('Muon', 'fIsoMu24'), getEnum('Muon', 'fIsoTkMu24')), '', 'HLT', {
'ptzoom': ('p_{T}^{#mu} (GeV)', '{col}.pt_', '', (50, 0., 50.)),
'ptwide': ('p_{T}^{#mu} (GeV)', '{col}.pt_', '', [30. + 2. * x for x in range(85)] + [200. + 10. * x for x in range(10)]),
'run': ('Run', 'runNumber', '{col}.pt_ > 200.', (350, 271000., 274500.)),
'pt': ('p_{T}^{#mu} (GeV)', '{col}.pt_', '', [0. + 5. * x for x in range(10)] + [50. + 10. * x for x in range(6)])
})
},
'vbf': {
'vbf': ('HLT_Photon75_R9Id90_HE10_Iso40_EBOnly_VBF', '', 'VBF filter', {
'dEtajj': ('|#Delta#eta_{jj}|', 'Max$(TMath::Abs(dijet.dEtajj * (dijet.mjj > 800.)))', 'Sum$(dijet.mjj > 500) != 0', (50, 0., 5.)),
'mjj': ('m_{jj} (GeV)', 'Max$(TMath::Abs(dijet.mjj * (TMath::Abs(dijet.dEtajj) > 3.2)))', 'Sum$(TMath::Abs(dijet.dEtajj) > 3.2) != 0', (50, 0., 1000.))
})
}
}
# TTree output for fitting
fitconfs = {}
fitconfs['photon'] = []
fitconfs['electron'] = [
('ptzoom', 'el27')
]
fitconfs['muon'] = []
fitconfs['vbf'] = []
| [
"yiiyama@mit.edu"
] | yiiyama@mit.edu |
6627c39817bf5dff8482e5da4e684c51071c774c | 80e36c723d26fef80892a684a5987295e1dbd48c | /library/forms.py | 6e5c194fa078d5e8c738dff33f835dd95c7f4495 | [
"LicenseRef-scancode-public-domain"
] | permissive | xritzx/WordFruit | 9f4ef8b6fe4774d3ca4e9ea2ceff83ece99c6f3b | 04dd9539c5c7fb57a40ceb5d02b76f8f95c52ae6 | refs/heads/master | 2022-12-13T16:03:51.388925 | 2019-06-06T06:15:22 | 2019-06-06T06:15:22 | 168,710,999 | 4 | 3 | null | 2022-12-08T01:34:48 | 2019-02-01T14:39:55 | CSS | UTF-8 | Python | false | false | 189 | py | from django.forms import ModelForm
from .models import Book
class BookAddForm(ModelForm):
class Meta:
model = Book
exclude = ['contributor', 'date', 'read', 'likes']
| [
"ritankarpaul47@gmail.com"
] | ritankarpaul47@gmail.com |
52d0ac2b85718c3c146720e8f651230dfff0cc2c | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /NhPYFqfQcFXWvdH8t_6.py | d9f026bff80b989b0ba365c2911524b73da7508c | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,766 | py | """
A positive integer multiplied times its inverse is always equal to 1:
`17*(1/17)==1`. Modular arithmetic has a similar inverse function, although,
for modulus `m`, we are confined to integers from 0 to m-1. The modular
multiplicative inverse of 3 modulus 5 is equal to 2 because `(3*2)%5==1`.
Another example: the modular inverse of 17 modulus 1000007 is equal to 58824
because `(17*58824)%1000007==1`. The modular inverse, if it exists, must
always be in the range 0 to m-1.
Create a function that has arguments integer `n` and modulus `m`. The function
will return the modular inverse of `n` mod `m`. If the modular inverse does
not exist, return `False`.
### Examples
mod_inv(2, 3) ➞ 2
mod_inv(12, 47) ➞ 4
mod_inv(11, 33) ➞ False
mod_inv(55, 678) ➞ 37
mod_inv(81, 3455) ➞ 2346
### Notes
* Some of the test cases have rather large integers, so if you attempt to do a brute force search of the entire modular field, you may not be successful due to the 12 second time limit imposed by the server. See **Resources** for a more efficient approach.
* The modular inverse of a number `n` modulus `m` exists only if `n` and `m` are coprime (i.e. they have no common factors other than 1).
* One practical use of modular inverse is in public-key cryptography like RSA where it can be used to determine the value of the private key.
"""
def gcd_ex(a, b):
x, pre_x, y, pre_y, r, pre_r = 0, 1, 1, 0, a, b
while r != 0:
q = pre_r // r
pre_r, r = r, pre_r - q * r
pre_x, x = x, pre_x - q * x
pre_y, y = y, pre_y - q * y
return (pre_r, pre_x, pre_y)
def mod_inv(n, m):
gcd, x, y = gcd_ex(n, m)
return (y if y >= 0 else m + y) if gcd == 1 else False
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
b77eadfb20bc641f8047bfe61ca0392ebcd0f42d | f439d2e77582a747957df6ff6e102df91b8a16d3 | /examples/nlp/lstm_generator_textfile.py | 0c0399ed1f4628d5d8524f0a780eba7e0bb936c8 | [
"MIT"
] | permissive | NLPDev/tflearn | 6bba8a0e811d465c008511ef2946d183c996d0bf | 77436978c62124bd91ef739dc77c9ea58277c779 | refs/heads/master | 2020-04-09T13:20:36.113533 | 2018-12-07T15:46:33 | 2018-12-07T15:46:33 | 160,370,136 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,976 | py | from __future__ import absolute_import, division, print_function
import os, sys, argparse
import urllib
import tflearn
from tflearn.data_utils import *
parser = argparse.ArgumentParser(description=
'Pass a text file to generate LSTM output')
parser.add_argument('filename')
parser.add_argument('-t','--temp', help=
'Defaults to displaying multiple temperature outputs which is suggested.' +
' If temp is specified, a value of 0.0 to 2.0 is recommended.' +
' Temperature is the novelty or' +
' riskiness of the generated output. A value closer to 0 will result' +
' in output closer to the input, so higher is riskier.',
required=False, nargs=1, type=float)
parser.add_argument('-l','--length', help=
'Optional length of text sequences to analyze. Defaults to 25.',
required=False, default=25, nargs=1, type=int)
args = vars(parser.parse_args())
path = args['filename']
if args['temp'] and args['temp'][0] is not None:
temp = args['temp'][0]
print("Temperature set to", temp)
if temp > 2 or temp < 0:
print("Temperature out of suggested range. Suggested temp range is 0.0-2.0")
else:
print("Will display multiple temperature outputs")
if args['length'] is not 25:
maxlen = args['length'][0] # default 25 is set in .add_argument above if not set by user
print("Sequence max length set to ", maxlen)
else:
maxlen = args['length']
model_name=path.split('.')[0] # create model name from textfile input
if not os.path.isfile(path):
print("Couldn't find the text file. Are you sure the you passed is correct?")
X, Y, char_idx = \
textfile_to_semi_redundant_sequences(path, seq_maxlen=maxlen, redun_step=3)
g = tflearn.input_data([None, maxlen, len(char_idx)])
g = tflearn.lstm(g, 512, return_seq=True)
g = tflearn.dropout(g, 0.5)
g = tflearn.lstm(g, 512, return_seq=True)
g = tflearn.dropout(g, 0.5)
g = tflearn.lstm(g, 512)
g = tflearn.dropout(g, 0.5)
g = tflearn.fully_connected(g, len(char_idx), activation='softmax')
g = tflearn.regression(g, optimizer='adam', loss='categorical_crossentropy',
learning_rate=0.001)
m = tflearn.SequenceGenerator(g, dictionary=char_idx,
seq_maxlen=maxlen,
clip_gradients=5.0,
checkpoint_path='model_'+ model_name)
for i in range(50):
seed = random_sequence_from_textfile(path, maxlen)
m.fit(X, Y, validation_set=0.1, batch_size=128,
n_epoch=1, run_id=model_name)
print("-- TESTING...")
if args['temp'] is not None:
temp = args['temp'][0]
print("-- Test with temperature of %s --" % temp)
print(m.generate(600, temperature=temp, seq_seed=seed))
else:
print("-- Test with temperature of 1.0 --")
print(m.generate(600, temperature=1.0, seq_seed=seed))
print("-- Test with temperature of 0.5 --")
print(m.generate(600, temperature=0.5, seq_seed=seed))
| [
"vasile123andronic@gmail.com"
] | vasile123andronic@gmail.com |
44e793c1248ef2e36ba33257f2adbf511b590309 | 37d8802ecca37cc003053c2175f945a501822c82 | /09-动态规划/0062-不同路径-2.py | 7c1a0f8ea366b820befe2445e2814772c7343155 | [
"Apache-2.0"
] | permissive | Sytx74/LeetCode-Solution-Python | cc0f51e31a58d605fe65b88583eedfcfd7461658 | b484ae4c4e9f9186232e31f2de11720aebb42968 | refs/heads/master | 2020-07-04T18:17:24.781640 | 2019-07-30T03:34:19 | 2019-07-30T03:34:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 412 | py | class Solution:
def uniquePaths(self, m, n):
"""
:type m: int
:type n: int
:rtype: int
"""
dp = [1] * n
for i in range(1, m):
for j in range(1, n): # 从索引 2 开始走就行了
dp[j] = dp[j] + dp[j - 1]
return dp[-1]
if __name__ == '__main__':
s = Solution()
res = s.uniquePaths(5, 4)
print(res)
| [
"121088825@qq.com"
] | 121088825@qq.com |
431bc242a4cad6bd666fd73d507d2df2e74fb34f | dd3bbd4e7aaee7a8a5f26b927ce28ac472c855a5 | /eggs/z3c.form-2.4.3-py2.7.egg/z3c/form/adding.py | 5cee0932d4335930114a159e520d1c93762c90c9 | [] | no_license | nacho22martin/tesis | ea0a822f8bdbdef6f13f41276ecd4d6e85427ca5 | e137eb6225cc5e724bee74a892567796166134ac | refs/heads/master | 2020-12-24T13:20:58.334839 | 2013-11-09T12:42:41 | 2013-11-09T12:42:41 | 14,261,570 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,056 | py | ##############################################################################
#
# Copyright (c) 2007 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Implementation of an addform for IAdding
$Id: adding.py 76841 2007-06-20 13:09:18Z srichter $
"""
__docformat__ = "reStructuredText"
from z3c.form import form
class AddForm(form.AddForm):
"""An addform for the IAdding interface."""
def add(self, object):
ob = self.context.add(object)
self._finishedAdd = True
return ob
def nextURL(self):
return self.context.nextURL()
| [
"ignacio@plone.(none)"
] | ignacio@plone.(none) |
8c9bec14ad6067364063355bda91fc6dd73ef21e | 82d6b24fb786143b07e897d4bbe70f2c1d5fc481 | /hsvTrackbarColorDetection.py | a44002da3b1ae0702e5c89290e7119015a5ecbf1 | [] | no_license | Pritam055/image_opencv | cded3eff39d4ef2006bef7b4a05a38a86700da2b | 63c19bb4b30e6db76b3e1d90697409acebb3ec3e | refs/heads/master | 2021-03-19T00:12:05.157661 | 2020-07-01T16:21:32 | 2020-07-01T16:21:32 | 247,113,074 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,544 | py | import cv2
import numpy as np
frameWidth = 640
frameHeight = 480
cap = cv2.VideoCapture(0)
cap.set(3, frameWidth)
cap.set(4, frameHeight)
frameCounter = 0
def empty(a):
pass
cv2.namedWindow("HSV")
cv2.resizeWindow("HSV", 640, 240)
cv2.createTrackbar("HUE Min", "HSV", 0, 179, empty)
cv2.createTrackbar("HUE Max", "HSV", 179, 179, empty)
cv2.createTrackbar("SAT Min", "HSV", 0, 255, empty)
cv2.createTrackbar("SAT Max", "HSV", 255, 255, empty)
cv2.createTrackbar("VALUE Min", "HSV", 0, 255, empty)
cv2.createTrackbar("VALUE Max", "HSV", 255, 255, empty)
while True:
# frameCounter += 1
# if cap.get(cv2.CAP_PROP_FRAME_COUNT) == frameCounter:
# cap.set(cv2.CAP_PROP_POS_FRAMES, 0)
# frameCounter = 0
_, img = cap.read()
imgHsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
h_min = cv2.getTrackbarPos("HUE Min", "HSV")
h_max = cv2.getTrackbarPos("HUE Max", "HSV")
s_min = cv2.getTrackbarPos("SAT Min", "HSV")
s_max = cv2.getTrackbarPos("SAT Max", "HSV")
v_min = cv2.getTrackbarPos("VALUE Min", "HSV")
v_max = cv2.getTrackbarPos("VALUE Max", "HSV")
print(h_min)
lower = np.array([h_min, s_min, v_min])
upper = np.array([h_max, s_max, v_max])
mask = cv2.inRange(imgHsv, lower, upper)
result = cv2.bitwise_and(img, img, mask=mask)
mask = cv2.cvtColor(mask, cv2.COLOR_GRAY2BGR)
hStack = np.hstack([img, mask, result])
cv2.imshow('Horizontal Stacking', hStack)
if cv2.waitKey(1) and 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows() | [
"admin@gmail.com"
] | admin@gmail.com |
688026b78ebdbea1dfe19763a0be7da433cd1384 | 9d0195aa83cc594a8c61f334b90375961e62d4fe | /JTTest/SL7/CMSSW_10_2_15/src/dataRunA/nano404.py | 51f5c83a2ddb0a57f1128950bb4aba9a8dd8f246 | [] | no_license | rsk146/CMS | 4e49592fc64f6438051544c5de18598db36ed985 | 5f8dab8c59ae556598b9747b52b88205fffc4dbe | refs/heads/master | 2022-12-01T03:57:12.126113 | 2020-08-04T03:29:27 | 2020-08-04T03:29:27 | 284,863,383 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,292 | py | # Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: nanoAOD_jetToolbox_cff -s NANO --data --eventcontent NANOAOD --datatier NANOAOD --no_exec --conditions 102X_dataRun2_Sep2018Rereco_v1 --era Run2_2018,run2_nanoAOD_102Xv1 --customise_commands=process.add_(cms.Service('InitRootHandlers', EnableIMT = cms.untracked.bool(False))) --customise JMEAnalysis/JetToolbox/nanoAOD_jetToolbox_cff.nanoJTB_customizeMC --filein /users/h2/rsk146/JTTest/SL7/CMSSW_10_6_12/src/ttbarCutTest/dataReprocessing/0004A5E9-9F18-6B42-B31D-4206406CE423.root --fileout file:jetToolbox_nano_datatest.root
import FWCore.ParameterSet.Config as cms
from Configuration.StandardSequences.Eras import eras
process = cms.Process('NANO',eras.Run2_2018,eras.run2_nanoAOD_102Xv1)
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff')
process.load('PhysicsTools.NanoAOD.nano_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
# Input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('file:root://cms-xrd-global.cern.ch//store/data/Run2018A/EGamma/MINIAOD/17Sep2018-v2/270000/39D49137-05D8-9449-B8CC-7A28C5115144.root'),
secondaryFileNames = cms.untracked.vstring()
)
process.options = cms.untracked.PSet(
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
annotation = cms.untracked.string('nanoAOD_jetToolbox_cff nevts:1'),
name = cms.untracked.string('Applications'),
version = cms.untracked.string('$Revision: 1.19 $')
)
# Output definition
process.NANOAODoutput = cms.OutputModule("NanoAODOutputModule",
compressionAlgorithm = cms.untracked.string('LZMA'),
compressionLevel = cms.untracked.int32(9),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('NANOAOD'),
filterName = cms.untracked.string('')
),
fileName = cms.untracked.string('file:jetToolbox_nano_datatest404.root'),
outputCommands = process.NANOAODEventContent.outputCommands
)
# Additional output definition
# Other statements
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, '102X_dataRun2_Sep2018Rereco_v1', '')
# Path and EndPath definitions
process.nanoAOD_step = cms.Path(process.nanoSequence)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.NANOAODoutput_step = cms.EndPath(process.NANOAODoutput)
# Schedule definition
process.schedule = cms.Schedule(process.nanoAOD_step,process.endjob_step,process.NANOAODoutput_step)
from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
associatePatAlgosToolsTask(process)
# customisation of the process.
# Automatic addition of the customisation function from PhysicsTools.NanoAOD.nano_cff
from PhysicsTools.NanoAOD.nano_cff import nanoAOD_customizeData
#call to customisation function nanoAOD_customizeData imported from PhysicsTools.NanoAOD.nano_cff
process = nanoAOD_customizeData(process)
# Automatic addition of the customisation function from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff
from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff import nanoJTB_customizeMC
#call to customisation function nanoJTB_customizeMC imported from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff
process = nanoJTB_customizeMC(process)
# End of customisation functions
# Customisation from command line
process.add_(cms.Service('InitRootHandlers', EnableIMT = cms.untracked.bool(False)))
# Add early deletion of temporary data products to reduce peak memory need
from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
process = customiseEarlyDelete(process)
# End adding early deletion | [
"rsk146@scarletmail.rutgers.edu"
] | rsk146@scarletmail.rutgers.edu |
7b181c3b3d9e7fab86725dd12b5b8595e930051d | ecebefec65cc55b305419a689660eb8e2ea04fef | /release/virtual_player/simulation.py | 41fe28c584133cc4bcb995c6df09b94e2150affb | [] | no_license | generlist/ABRTuner | 4ab1d6d5e5201a7953d4565ca4574307a35513c3 | baea8fab155a71c185e74121a8f014e6ad889308 | refs/heads/master | 2020-05-26T01:09:29.712879 | 2018-08-03T23:40:01 | 2018-08-03T23:40:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,782 | py | # SIMULATION 1.0
import math
import sys
import os
import collections
from config import *
from helpers import *
from chunkMap import *
from performance_vector import *
from algorithms import *
from simulation_performance_vector import *
import numpy as np
from collections import deque
import time
from vplayer_state import State
import config
import warnings
warnings.filterwarnings('error')
def usage():
print >> sys.stderr, "Incorrect usage\nUsage: python " + sys.argv[0] + " <path to trace file>"
sys.exit(1)
if len(sys.argv) < 2:
usage()
trace_file = sys.argv[1]
if not os.path.isfile(trace_file):
print >> sys.stderr, "No such file: " + trace_file
sys.exit(1)
gp = getBolaGP()
bola_vp = getBolaVP(gp)
configs = []
if MPC_ABR:
configs = np.arange(0, 150, 10)
elif BOLA_ABR:
configs = np.arange(gp - 1.5, gp, 0.1)
elif HYB_ABR:
configs = np.arange(0.0, 1.0 ,0.03)
for param in configs:
s = State(config, trace_file)
# this while loop advances SIMULATION_STEP msec in each iteration,
# till the CLOCK exceeds the session_time_ms.
# SIMULATION_STEP is defined in config.py
while s.CLOCK < s.session_time_ms:
play_stalled_this_interval = 0
chunk_downloaded_this_interval = 0
blen_added_this_interval = 0
if DEBUG and not s.session_fully_downloaded:
s.PrintStats()
if s.CLOCK + s.interval > s.session_time_ms:
s.interval = s.session_time_ms - s.CLOCK
s.chunk_sched_time_delay = max(0, s.chunk_sched_time_delay - s.interval)
s.CLOCK += s.interval
if s.BLEN > s.min_playable_buff:
s.buffering = False
if s.buffering and not s.session_fully_downloaded:
play_stalled_this_interval = min(timeToDownloadSingleChunk(CHUNKSIZE, s.BR, s.BW, s.chunk_residue, s.CHUNKS_DOWNLOADED), s.interval / 1000.0)
if play_stalled_this_interval < s.interval / 1000.0: # chunk download so resume
s.buffering = False
if not s.session_fully_downloaded and s.chunk_sched_time_delay < s.interval:
s, param = chunksDownloaded(s, param, s.CLOCK - s.interval)
chunk_downloaded_this_interval = s.chunk_residue + s.numChunks
if play_stalled_this_interval == s.interval / 1000.0 and chunk_downloaded_this_interval >= 1.0:
s.buffering = False
s.chunk_residue = chunk_downloaded_this_interval - int(chunk_downloaded_this_interval)
if s.BLEN + chunk_downloaded_this_interval * s.CHUNKSIZE >= MAX_BUFFLEN: # can't download more than the MAX_BUFFLEN
chunk_downloaded_this_interval = int(MAX_BUFFLEN - s.BLEN) / CHUNKSIZE
s.chunk_residue = 0
if s.CHUNKS_DOWNLOADED + int(chunk_downloaded_this_interval) >= math.ceil((s.play_time_ms) / (CHUNKSIZE * 1000.0)):
chunk_downloaded_this_interval = math.ceil((s.play_time_ms) / (CHUNKSIZE * 1000.0)) - s.CHUNKS_DOWNLOADED
s.clock_inc = s.CLOCK - s.last_clock_val
s.last_clock_val = s.CLOCK
if s.numChunks > 0:
s.realBR = getRealBitrate(s.BR, s.CHUNKS_DOWNLOADED, CHUNKSIZE) / (CHUNKSIZE * 1000.0)
if chunk_downloaded_this_interval != 0 and s.numChunks > 0 and int(chunk_downloaded_this_interval) != 1 and s.last_chd_interval != 0 and s.last_chd_interval < chunk_downloaded_this_interval:
s.q.append((s.realBR * CHUNKSIZE * s.numChunks) / (s.clock_inc / 1000.0))
if s.CLOCK % 100 == 0:
s.player_visible_bw.append(np.mean(s.q))
s.last_chd_interval = chunk_downloaded_this_interval
s.CHUNKS_DOWNLOADED += int(chunk_downloaded_this_interval)
s.ATTEMPT_ID += int(chunk_downloaded_this_interval)
blen_added_this_interval = int(chunk_downloaded_this_interval) * CHUNKSIZE
if not s.buffering and s.BLEN - s.min_playable_buff >= 0 and s.BLEN - s.min_playable_buff + blen_added_this_interval < s.interval / 1000.0 and not s.session_fully_downloaded:
play_stalled_this_interval += (s.interval / 1000.0 - (float(s.BLEN) - s.min_playable_buff + float(blen_added_this_interval)) )
s.buffering = True
if not s.first_chunk:
s.BUFFTIME += float(play_stalled_this_interval)
s.PLAYTIME += s.interval / 1000.0 - play_stalled_this_interval
if s.first_chunk and s.CHUNKS_DOWNLOADED >= 1:
s.first_chunk = False
if s.buffering:
s.BLEN = s.min_playable_buff
elif not s.buffering and s.first_chunk and s.CHUNKS_DOWNLOADED == 0:
s.BLEN = max(0, float(s.BLEN) - s.interval / 1000.0)
else:
s.BLEN = max(0, float(s.CHUNKS_DOWNLOADED) * float(s.CHUNKSIZE) - float(s.PLAYTIME)) # else update the bufferlen to take into account the current time step
if s.CHUNKS_DOWNLOADED >= TOTAL_CHUNKS or s.CHUNKS_DOWNLOADED >= math.ceil((s.play_time_ms) / (s.CHUNKSIZE * 1000.0)):
s.session_fully_downloaded = True
break
if not s.first_chunk and not s.session_fully_downloaded and s.oldBR != s.BR:
s.numSwitches += 1
s.BW = max(interpolateBWInterval(s.CLOCK, s.used_bw_array, s.bw_array), 0.01) # interpolate bandwidth for the next heartbeat interval
s.used_bw_array.append(s.BW) # save the bandwidth used in the session
# account for the accumulated buffer
if s.BLEN > 0:
s.PLAYTIME += s.BLEN
# obtain stats to print
s.AVG_SESSION_BITRATE, s.REBUF_RATIO, s.rebuf_groundtruth = generateStats(s.AVG_SESSION_BITRATE, s.BUFFTIME, s.PLAYTIME, s.bufftimems, s.play_time_ms)
print s.trace_file + " param: "+str(param)+" minCell: "+str(s.minCellSize)+" QoE: " + str(s.maxQoE) + " avg. bitrate: " + str(s.AVG_SESSION_BITRATE) + " buf. ratio: " + str(s.REBUF_RATIO) +" playtime: " + str(s.PLAYTIME) +" buftime: " + str(s.BUFFTIME)
| [
"zahaib.akhtar@gmail.com"
] | zahaib.akhtar@gmail.com |
e3d448b0d378e6af706c92e929117378513523f0 | 70cfe888ffd5c71262769d568224d7da45ad6f25 | /tensorflow/contrib/cudnn_rnn/python/kernel_tests/cudnn_rnn_test.py | 6fb56b0858786662546ecab425b1a2564fbd9a64 | [
"Apache-2.0"
] | permissive | limitime/tensorflow | 57c13eaa5bc6a27754688da900a090392f623d25 | a2607aaba0b8bd364204a58425c464746c5697a8 | refs/heads/master | 2020-03-11T11:58:57.063430 | 2018-04-18T00:54:57 | 2018-04-18T00:54:57 | 129,984,353 | 1 | 0 | Apache-2.0 | 2018-04-18T01:14:49 | 2018-04-18T01:14:49 | null | UTF-8 | Python | false | false | 56,782 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Cudnn RNN models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import collections
import functools
import itertools
import os
import sys
import unittest
import numpy as np
from tensorflow.contrib.cudnn_rnn.python.layers import cudnn_rnn
from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops
from tensorflow.contrib.rnn.python.ops import rnn as contrib_rnn_lib
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import gradients_impl as gradients
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import rnn as rnn_lib
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables
from tensorflow.python.ops.losses import losses
from tensorflow.python.platform import googletest
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import adagrad
from tensorflow.python.training import adam
from tensorflow.python.training import checkpointable_utils
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import momentum
from tensorflow.python.training import rmsprop
from tensorflow.python.training import saver as saver_lib
CUDNN_LSTM = cudnn_rnn_ops.CUDNN_LSTM
CUDNN_GRU = cudnn_rnn_ops.CUDNN_GRU
CUDNN_RNN_RELU = cudnn_rnn_ops.CUDNN_RNN_RELU
CUDNN_RNN_TANH = cudnn_rnn_ops.CUDNN_RNN_TANH
CUDNN_RNN_UNIDIRECTION = cudnn_rnn_ops.CUDNN_RNN_UNIDIRECTION
CUDNN_RNN_BIDIRECTION = cudnn_rnn_ops.CUDNN_RNN_BIDIRECTION
CUDNN_LSTM_PARAMS_PER_LAYER = cudnn_rnn_ops.CUDNN_LSTM_PARAMS_PER_LAYER
CUDNN_GRU_PARAMS_PER_LAYER = cudnn_rnn_ops.CUDNN_GRU_PARAMS_PER_LAYER
CUDNN_RNN_TANH_PARAMS_PER_LAYER = cudnn_rnn_ops.CUDNN_RNN_TANH_PARAMS_PER_LAYER
CUDNN_RNN_RELU_PARAMS_PER_LAYER = cudnn_rnn_ops.CUDNN_RNN_RELU_PARAMS_PER_LAYER
class CudnnTestModel(object):
"""Model with convenient APIs for easier building and running test graph.
The graph built is used by all tests below to avoid repeatedly building
similar test graphs.
"""
def __init__(self,
rnn_mode,
num_layers,
num_units,
input_size,
direction=CUDNN_RNN_UNIDIRECTION,
dropout=0.,
dtype=dtypes.float32,
training=False,
seed=None,
kernel_initializer=None,
bias_initializer=None):
if dtype not in (dtypes.float16, dtypes.float32, dtypes.float64):
raise ValueError("Invalid dtype: %s" % dtype)
self._dtype = dtype
self._inputs = array_ops.placeholder(
dtype=dtype, shape=[None, None, input_size], name="inputs")
h = array_ops.placeholder(
dtype=dtype, shape=[None, None, num_units], name="h")
c = array_ops.placeholder(
dtype=dtype, shape=[None, None, num_units], name="c")
if rnn_mode == CUDNN_LSTM:
model_fn = cudnn_rnn.CudnnLSTM
self._initial_state = (h, c)
elif rnn_mode == CUDNN_GRU:
model_fn = cudnn_rnn.CudnnGRU
self._initial_state = (h,)
elif rnn_mode == CUDNN_RNN_TANH:
model_fn = cudnn_rnn.CudnnRNNTanh
self._initial_state = (h,)
elif rnn_mode == CUDNN_RNN_RELU:
model_fn = cudnn_rnn.CudnnRNNRelu
self._initial_state = (h,)
else:
raise ValueError("Invalid rnn_mode: %s" % rnn_mode)
self._rnn = model_fn(
num_layers,
num_units,
direction=direction,
dropout=dropout,
dtype=dtype,
seed=seed,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer)
self._rnn.build([None, None, input_size])
self._outputs, self._output_state = self._rnn(
self._inputs, initial_state=self._initial_state, training=training)
def _AddUp(self, outputs, output_state):
total = math_ops.reduce_sum(outputs)
for s in output_state:
total += math_ops.reduce_sum(s)
return total
@property
def inputs(self):
return self._inputs
@property
def initial_state(self):
return self._initial_state
@property
def outputs(self):
return self._outputs
@property
def output_state(self):
return self._output_state
@property
def rnn(self):
return self._rnn
@property
def total_sum(self):
return self._AddUp(self.outputs, self.output_state)
def SynthesizeInput(self, seq_length, batch_size, seed=1234):
"""Synthesizes input and initial state values for testing."""
np.random.seed(seed)
num_layers = self._rnn.num_layers
dir_count = self._rnn.num_dirs
num_units = self._rnn.num_units
input_size = self._rnn.input_size
np_dtype = np.float32 if self._dtype == dtypes.float32 else np.float64
inputs = np.random.randn(seq_length, batch_size,
input_size).astype(np_dtype)
input_h = np.random.randn(num_layers * dir_count, batch_size,
num_units).astype(np_dtype)
if self._rnn.rnn_mode == CUDNN_LSTM:
input_c = np.random.randn(num_layers * dir_count, batch_size,
num_units).astype(np_dtype)
initial_state = (input_h, input_c)
else:
initial_state = (input_h,)
return inputs, initial_state
def ZeroState(self, batch_size):
num_layers = self._rnn.num_layers
dir_count = self._rnn.num_dirs
num_units = self._rnn.num_units
np_dtype = np.float32 if self._dtype == dtypes.float32 else np.float64
input_h = np.zeros((num_layers * dir_count, batch_size,
num_units)).astype(np_dtype)
if self._rnn.rnn_mode == CUDNN_LSTM:
input_c = np.zeros((num_layers * dir_count, batch_size,
num_units)).astype(np_dtype)
initial_state = (input_h, input_c)
else:
initial_state = (input_h,)
return initial_state
def FProp(self, inputs_t, initial_state_t, training):
"""Builds additional subgraph with given inputs and state.
Args:
inputs_t: a tensor.
initial_state_t: a tensor.
training: boolean, true if training mode.
Returns:
A tensor of the forward pass output of the model.
"""
outputs, output_state = self._rnn(
inputs_t, initial_state=initial_state_t, training=training)
return self._AddUp(outputs, output_state)
def Feed(self, sess, inputs, initial_state=None, return_sum=True):
"""Runs graph with given inputs and initial state."""
batch_size = inputs.shape[1]
if initial_state is None:
initial_state = self.ZeroState(batch_size)
if return_sum:
return sess.run(
self.total_sum,
feed_dict={self.inputs: inputs,
self.initial_state: initial_state})
else:
return sess.run(
[self.outputs, self.output_state],
feed_dict={self.inputs: inputs,
self.initial_state: initial_state})
def _CreateCudnnCompatibleCanonicalRNN(rnn, inputs, is_bidi=False, scope=None):
mode = rnn.rnn_mode
num_units = rnn.num_units
num_layers = rnn.num_layers
# To reuse cuDNN-trained models, must use cudnn compatible rnn cells.
if mode == CUDNN_LSTM:
single_cell = lambda: cudnn_rnn_ops.CudnnCompatibleLSTMCell(num_units)
elif mode == CUDNN_GRU:
single_cell = lambda: cudnn_rnn_ops.CudnnCompatibleGRUCell(num_units)
elif mode == CUDNN_RNN_TANH:
single_cell = (lambda: rnn_cell_impl.BasicRNNCell(num_units, math_ops.tanh))
elif mode == CUDNN_RNN_RELU:
single_cell = (
lambda: rnn_cell_impl.BasicRNNCell(num_units, gen_nn_ops.relu))
else:
raise ValueError("%s is not supported!" % mode)
if not is_bidi:
cell = rnn_cell_impl.MultiRNNCell(
[single_cell() for _ in range(num_layers)])
return rnn_lib.dynamic_rnn(
cell, inputs, dtype=dtypes.float32, time_major=True, scope=scope)
else:
cells_fw = [single_cell() for _ in range(num_layers)]
cells_bw = [single_cell() for _ in range(num_layers)]
(outputs, output_state_fw,
output_state_bw) = contrib_rnn_lib.stack_bidirectional_dynamic_rnn(
cells_fw,
cells_bw,
inputs,
dtype=dtypes.float32,
time_major=True,
scope=scope)
return outputs, (output_state_fw, output_state_bw)
class CudnnRNNTestBasic(test_util.TensorFlowTestCase):
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testLayerBasic(self):
num_layers = 4
num_units = 2
batch_size = 8
direction = CUDNN_RNN_UNIDIRECTION
dir_count = 1
with vs.variable_scope("main"):
kernel_initializer = init_ops.constant_initializer(0.)
bias_initializer = init_ops.constant_initializer(0.)
inputs = random_ops.random_uniform([
num_layers * dir_count, batch_size, num_units], dtype=dtypes.float32)
lstm = cudnn_rnn.CudnnLSTM(num_layers, num_units,
direction=direction,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
name="awesome_lstm")
# Build the layer
outputs1, _ = lstm(inputs)
# Reuse the layer
outputs2, _ = lstm(inputs)
total_sum1 = math_ops.reduce_sum(outputs1)
total_sum2 = math_ops.reduce_sum(outputs2)
with vs.variable_scope("main", reuse=True):
lstm = cudnn_rnn.CudnnLSTM(num_layers, num_units,
direction=direction,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
name="awesome_lstm")
# Reuse the layer
outputs3, _ = lstm(inputs)
total_sum3 = math_ops.reduce_sum(outputs3)
self.assertEqual(1, len(variables.trainable_variables()))
self.assertEqual(1, len(ops.get_collection(ops.GraphKeys.SAVEABLE_OBJECTS)))
self.assertEqual("main/awesome_lstm/opaque_kernel",
variables.trainable_variables()[0].op.name)
with self.test_session(use_gpu=True) as sess:
sess.run(variables.global_variables_initializer())
(total_sum1_v, total_sum2_v, total_sum3_v) = sess.run(
[total_sum1, total_sum2, total_sum3])
self.assertEqual(0, total_sum1_v)
self.assertEqual(0, total_sum2_v)
self.assertEqual(0, total_sum3_v)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testOptimizersSupport(self):
for opt in ("adagrad", "adam", "rmsprop", "momentum", "sgd"):
self._TestOptimizerSupportHelper(opt)
def _GetOptimizer(self, opt):
if opt == "adagrad":
return adagrad.AdagradOptimizer(learning_rate=1e-2)
elif opt == "adam":
return adam.AdamOptimizer(learning_rate=1e-2)
elif opt == "rmsprop":
return rmsprop.RMSPropOptimizer(learning_rate=1e-2)
elif opt == "momentum":
return momentum.MomentumOptimizer(learning_rate=1e-2, momentum=0.9)
elif opt == "sgd":
return gradient_descent.GradientDescentOptimizer(learning_rate=1e-2)
else:
raise ValueError("Unsupported optimizer: %s" % opt)
def _TestOptimizerSupportHelper(self, opt):
num_layers = 4
num_units = 2
batch_size = 8
direction = CUDNN_RNN_UNIDIRECTION
dir_count = 1
with ops.Graph().as_default() as g:
kernel_initializer = init_ops.constant_initializer(0.)
bias_initializer = init_ops.constant_initializer(0.)
inputs = random_ops.random_uniform([
num_layers * dir_count, batch_size, num_units], dtype=dtypes.float32)
lstm = cudnn_rnn.CudnnLSTM(num_layers, num_units,
direction=direction,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
name="awesome_lstm")
outputs, _ = lstm(inputs)
loss = math_ops.reduce_sum(outputs)
optimizer = self._GetOptimizer(opt)
train_op = optimizer.minimize(loss)
with self.test_session(use_gpu=True, graph=g) as sess:
sess.run(variables.global_variables_initializer())
sess.run(train_op)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSaveableGraphDeviceAssignment(self):
num_layers = 4
num_units = 2
batch_size = 8
direction = CUDNN_RNN_UNIDIRECTION
dir_count = 1
def DeviceFn(op):
if op.type in ("Variable", "VariableV2"):
return "/cpu:0"
else:
return "/gpu:0"
with ops.Graph().as_default() as g:
with ops.device(DeviceFn):
with vs.variable_scope("main"):
kernel_initializer = init_ops.constant_initializer(3.14)
bias_initializer = init_ops.constant_initializer(1.59)
inputs = random_ops.random_uniform(
[num_layers * dir_count, batch_size, num_units],
dtype=dtypes.float32)
lstm = cudnn_rnn.CudnnLSTM(num_layers, num_units,
direction=direction,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
name="awesome_lstm")
outputs = lstm(inputs)
# saver is created in the scope of DeviceFn.
saver = saver_lib.Saver()
with self.test_session(use_gpu=True, graph=g) as sess:
save_path = os.path.join(self.get_temp_dir(),
"test-saveable-device-assignment")
sess.run(variables.global_variables_initializer())
saver.save(sess, save_path)
saver.restore(sess, save_path)
sess.run(outputs)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testDifferentShapesEager(self):
# Checks that kernel caching does not cause sharing of temporary storage
# across different input shapes when executing eagerly.
with context.eager_mode():
with ops.device("gpu:0"):
first_output, _ = cudnn_rnn.CudnnGRU(1, 100)(
array_ops.zeros([28, 100, 28]))
second_output, _ = cudnn_rnn.CudnnGRU(1, 100)(
array_ops.zeros([28, 100, 100]))
self.assertAllEqual([28, 100, 100], first_output.shape)
self.assertAllEqual([28, 100, 100], second_output.shape)
def _LossFunc():
first_output, _ = cudnn_rnn.CudnnGRU(1, 100)(
array_ops.zeros([28, 100, 28]))
second_output, _ = cudnn_rnn.CudnnGRU(1, 100)(
array_ops.zeros([28, 100, 100]))
return (math_ops.reduce_sum(first_output) +
math_ops.reduce_sum(second_output))
backprop.implicit_grad(_LossFunc)()
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testDifferentShapesGraph(self):
# Tests that a single kernel instance presented with multiple input shapes
# does not crash with graph execution.
with ops.device("gpu:0"):
layer = cudnn_rnn.CudnnGRU(1, 100)
layer(array_ops.zeros([28, 100, 100]))
def _Cond(index, accumulation):
del accumulation # unused
return math_ops.less(index, 4)
def _Body(index, accumulation):
layer_input = accumulation[:, :, 10 * (1 + index % 2):]
output, _ = layer(layer_input)
return index + 1, accumulation + output
original_input = array_ops.zeros([28, 100, 100])
_, accumulation = control_flow_ops.while_loop(_Cond, _Body,
[0, original_input])
grad, = gradients.gradients(
math_ops.reduce_sum(accumulation), (original_input,))
init_op = variables.global_variables_initializer()
with self.test_session() as sess:
sess.run(init_op)
accumulation_eval, grad_eval = sess.run((accumulation, grad))
self.assertAllEqual([28, 100, 100], accumulation_eval.shape)
self.assertAllEqual([28, 100, 100], grad_eval.shape)
# TODO(jamesqin): Transform to parameterized test after it is included in the
# TF open source codebase.
class CudnnRNNTestSaveRestore(test_util.TensorFlowTestCase):
def _CompareWeights(self, lhs, rhs):
self.assertEqual(len(lhs), len(rhs))
for lw, rw in zip(lhs, rhs):
self.assertAllEqual(lw, rw)
def _CompareBiases(self, lhs, rhs, rnn_mode, num_layers, direction):
self.assertEqual(len(lhs), len(rhs))
if rnn_mode == CUDNN_LSTM:
num_params_per_layer = CUDNN_LSTM_PARAMS_PER_LAYER
elif rnn_mode == CUDNN_GRU:
num_params_per_layer = CUDNN_GRU_PARAMS_PER_LAYER
elif rnn_mode == CUDNN_RNN_TANH:
num_params_per_layer = CUDNN_RNN_TANH_PARAMS_PER_LAYER
else:
num_params_per_layer = CUDNN_RNN_RELU_PARAMS_PER_LAYER
num_dirs = 1 if direction == CUDNN_RNN_UNIDIRECTION else 2
num_params_per_layer *= num_dirs
self.assertEqual(num_params_per_layer * num_layers, len(lhs))
for i in range(num_layers):
layer_lhs = lhs[i * num_params_per_layer: (i+1) * num_params_per_layer]
layer_rhs = rhs[i * num_params_per_layer: (i+1) * num_params_per_layer]
if direction == CUDNN_RNN_UNIDIRECTION:
self._CompareSingleLayerBiases(layer_lhs, layer_rhs)
else:
size = len(layer_lhs)
fw_lhs, bw_lhs = layer_lhs[:size//2], layer_lhs[size//2:]
fw_rhs, bw_rhs = layer_rhs[:size//2], layer_rhs[size//2:]
self._CompareSingleLayerBiases(fw_lhs, fw_rhs)
self._CompareSingleLayerBiases(bw_lhs, bw_rhs)
def _CompareSingleLayerBiases(self, lhs, rhs):
self.assertEqual(len(lhs), len(rhs))
lf_lhs, rt_lhs = lhs[:len(lhs)//2], lhs[len(lhs)//2:]
lf_rhs, rt_rhs = rhs[:len(rhs)//2], rhs[len(rhs)//2:]
self.assertEqual(len(lf_lhs), len(rt_lhs))
self.assertEqual(len(lf_rhs), len(rt_rhs))
sum_lhs, sum_rhs = [], []
for lf, rt in zip(lf_lhs, rt_lhs):
sum_lhs.append(lf + rt)
for lf, rt in zip(lf_rhs, rt_rhs):
sum_rhs.append(lf + rt)
self.assertEqual(len(sum_lhs), len(sum_rhs))
for lf, rt in zip(sum_lhs, sum_rhs):
self.assertAllEqual(lf, rt)
def _TestSaveRestoreVariable(self, rnn_mode, direction, dtype):
input_size = 3
num_layers = 2
num_units = 7
with ops.Graph().as_default() as g:
random_seed.set_random_seed(1234)
model = CudnnTestModel(
rnn_mode,
num_layers,
num_units,
input_size,
direction=direction,
dtype=dtype)
rnn = model.rnn
save_path = os.path.join(self.get_temp_dir(),
"save-restore-variable-test")
saver = saver_lib.Saver()
weights, biases = model.rnn.saveable._OpaqueParamsToCanonical()
opaque_params = rnn.trainable_variables[0]
# CudnnTestModel() creates CudnnOpaqueParamsSaveable that helps saver save
# Cudnn vars in canonical format.
reset_op = state_ops.assign(
opaque_params,
array_ops.zeros(array_ops.shape(opaque_params), dtype=dtype))
# Passing graph explicitly, otherwise an old sess would be reused.
with self.test_session(use_gpu=True, graph=g) as sess:
sess.run(variables.global_variables_initializer())
val = saver.save(sess, save_path)
self.assertEqual(save_path, val)
weights_v, biases_v = sess.run([weights, biases])
# Reset opaque param
sess.run(reset_op)
saver.restore(sess, save_path)
weights_v_restored, biases_v_restored = sess.run([weights, biases])
self._CompareWeights(weights_v, weights_v_restored)
self._CompareBiases(biases_v, biases_v_restored, rnn_mode, num_layers,
direction)
def _TestSaveRestoreTwoVariables(self, rnn_mode, direction, dtype):
input_size = 3
num_layers = 2
num_units = 7
with ops.Graph().as_default() as g:
random_seed.set_random_seed(1234)
with vs.variable_scope("m1"):
model1 = CudnnTestModel(
rnn_mode,
num_layers,
num_units,
input_size,
direction=direction,
dtype=dtype)
with vs.variable_scope("m2"):
model2 = CudnnTestModel(
rnn_mode,
num_layers,
num_units,
input_size,
direction=direction,
dtype=dtype)
opaque_params = (model1.rnn.trainable_variables[0],
model2.rnn.trainable_variables[0])
weights1, biases1 = model1.rnn.saveable._OpaqueParamsToCanonical()
weights2, biases2 = model2.rnn.saveable._OpaqueParamsToCanonical()
reset_params = [
state_ops.assign(params,
array_ops.zeros_like(params, dtype=dtype))
for params in opaque_params
]
reset_op = control_flow_ops.group(*reset_params)
save_path = os.path.join(self.get_temp_dir(),
"save-restore-variable-test2")
saver = saver_lib.Saver()
# Passing graph explicitly, otherwise an old sess would be reused.
with self.test_session(use_gpu=True, graph=g) as sess:
sess.run(variables.global_variables_initializer())
val = saver.save(sess, save_path)
self.assertEqual(save_path, val)
weights1_v, biases1_v = sess.run([weights1, biases1])
weights2_v, biases2_v = sess.run([weights2, biases2])
sess.run(reset_op)
saver.restore(sess, save_path)
weights1_v_restored, biases1_v_restored = sess.run([weights1, biases1])
weights2_v_restored, biases2_v_restored = sess.run([weights2, biases2])
self._CompareWeights(weights1_v, weights1_v_restored)
self._CompareWeights(weights2_v, weights2_v_restored)
self._CompareBiases(biases1_v, biases1_v_restored, rnn_mode, num_layers,
direction)
self._CompareBiases(biases2_v, biases2_v_restored, rnn_mode, num_layers,
direction)
def _TestSaveRestoreOutput(self, rnn_mode, direction, dtype):
with ops.Graph().as_default() as g:
num_layers = 2
num_units = 7
input_size = 7
seq_length = 8
batch_size = 4
model = CudnnTestModel(
rnn_mode,
num_layers,
num_units,
input_size,
direction=direction,
dtype=dtype,
training=False)
rnn = model.rnn
save_path = os.path.join(self.get_temp_dir(), "save-restore-output-test")
saver = saver_lib.Saver()
# Only one opaque var in a cudnn layer.
assert len(rnn.trainable_variables) == 1
reset_params = state_ops.assign(
rnn.trainable_variables[0],
array_ops.zeros(
array_ops.shape(rnn.trainable_variables[0]), dtype=dtype))
# Passing graph explicitly, otherwise an old sess would be reused.
with self.test_session(use_gpu=True, graph=g) as sess:
sess.run(variables.global_variables_initializer())
inputs, initial_state = model.SynthesizeInput(seq_length, batch_size)
total_sum_v = model.Feed(sess, inputs, initial_state)
val = saver.save(sess, save_path)
self.assertEqual(save_path, val)
sess.run(reset_params)
saver.restore(sess, save_path)
total_sum_v_restored = model.Feed(sess, inputs, initial_state)
self.assertAllClose(total_sum_v, total_sum_v_restored, atol=1e-5)
def _TestSaveRestoreHelper(self, rnn_mode):
directions = [CUDNN_RNN_UNIDIRECTION, CUDNN_RNN_BIDIRECTION]
dtype_list = [dtypes.float16, dtypes.float32, dtypes.float64]
for direction, dtype in itertools.product(directions, dtype_list):
self._TestSaveRestoreVariable(rnn_mode, direction, dtype)
self._TestSaveRestoreTwoVariables(rnn_mode, direction, dtype)
self._TestSaveRestoreOutput(rnn_mode, direction, dtype)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSaveRestoreRepeatedlyCreateCustomSaveable(self):
input_size = 3
num_layers = 2
num_units = 7
with ops.Graph().as_default():
random_seed.set_random_seed(1234)
model = CudnnTestModel(
CUDNN_LSTM,
num_layers,
num_units,
input_size,
direction=CUDNN_RNN_UNIDIRECTION,
dtype=dtypes.float32)
with self.assertRaisesRegexp(RuntimeError,
"Cudnn saveable already created"):
model.rnn._create_saveable()
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSaveRestoreLSTM(self):
self._TestSaveRestoreHelper(CUDNN_LSTM)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSaveRestoreGRU(self):
self._TestSaveRestoreHelper(CUDNN_GRU)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSaveRestoreRNNTanh(self):
self._TestSaveRestoreHelper(CUDNN_RNN_TANH)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSaveRestoreRNNRelu(self):
self._TestSaveRestoreHelper(CUDNN_RNN_RELU)
class CudnnRNNTestSaveRestoreCheckpointable(test_util.TensorFlowTestCase):
def _VerifyCheckpoint(
self, checkpoint_path, compatible_cell_fn, cudnn_cell_fn,
num_layers, input_size, expected_variable_values, num_applications=3):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
with ops.device("gpu:0"):
cudnn_layer = cudnn_cell_fn()
cudnn_checkpoint = checkpointable_utils.Checkpoint(cell=cudnn_layer)
status = cudnn_checkpoint.restore(checkpoint_path)
inputs = 3. * array_ops.ones([num_applications, num_layers, input_size],
dtype=dtypes.float32)
cudnn_output, _ = cudnn_layer(inputs)
status.assert_consumed().run_restore_ops()
second_save_path = cudnn_checkpoint.save(checkpoint_prefix)
restore_layer = compatible_cell_fn()
restore_layer_checkpoint = checkpointable_utils.Checkpoint(
cell=restore_layer)
status = restore_layer_checkpoint.restore(second_save_path)
current_state = restore_layer.zero_state(1, dtypes.float32)
for _ in range(num_applications):
restore_layer_output, current_state = restore_layer(
inputs=3. * array_ops.ones([1, input_size]),
state=current_state)
status.assert_consumed().run_restore_ops()
self.assertTrue(restore_layer.variables)
for variable, expected_value in zip(
restore_layer.variables, expected_variable_values):
self.assertAllClose(expected_value, self.evaluate(variable))
self.assertAllClose(self.evaluate(restore_layer_output),
self.evaluate(cudnn_output)[-1, -1:, ...])
def _CheckpointableSingleCellUnidirectionalTestTemplate(
self, single_cell_fn, cudnn_cell_fn):
# Single-layer cuDNN cells with object-based checkpointing should be
# checkpoint compatible with either single CudnnCompatible cells or
# MultiRnnCells with one cell.
input_size = 3
save_cell_layer = single_cell_fn()
save_cell_layer(
inputs=array_ops.ones([1, input_size]),
state=save_cell_layer.zero_state(1, dtypes.float32))
self.assertTrue(save_cell_layer.variables)
expected_values = []
np.random.seed(10)
for variable in save_cell_layer.variables:
value = np.random.normal(size=variable.shape)
expected_values.append(value)
self.evaluate(variable.assign(value))
save_checkpoint = checkpointable_utils.Checkpoint(cell=save_cell_layer)
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
first_save_path = save_checkpoint.save(checkpoint_prefix)
self._VerifyCheckpoint(
checkpoint_path=first_save_path,
compatible_cell_fn=
lambda: rnn_cell_impl.MultiRNNCell([single_cell_fn()]),
cudnn_cell_fn=cudnn_cell_fn,
num_layers=1,
expected_variable_values=expected_values,
input_size=input_size)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
@test_util.run_in_graph_and_eager_modes()
def testLSTMCheckpointableSingleLayer(self):
num_units = 2
direction = CUDNN_RNN_UNIDIRECTION
self._CheckpointableSingleCellUnidirectionalTestTemplate(
single_cell_fn=functools.partial(
cudnn_rnn_ops.CudnnCompatibleLSTMCell, num_units=num_units),
cudnn_cell_fn=functools.partial(
cudnn_rnn.CudnnLSTM, num_layers=1, num_units=num_units,
direction=direction, name="awesome_lstm"))
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
@test_util.run_in_graph_and_eager_modes()
def testGRUCheckpointableSingleLayer(self):
num_units = 2
direction = CUDNN_RNN_UNIDIRECTION
with self.assertRaises(NotImplementedError):
# TODO(allenl): Implement object-based saving for GRUs and other cells.
self._CheckpointableSingleCellUnidirectionalTestTemplate(
single_cell_fn=functools.partial(
cudnn_rnn_ops.CudnnCompatibleGRUCell, num_units=num_units),
cudnn_cell_fn=functools.partial(
cudnn_rnn.CudnnGRU, num_layers=1, num_units=num_units,
direction=direction, name="awesome_gru"))
def _CheckpointableMultiLayerTestTemplate(
self, single_cell_fn, cudnn_cell_fn, num_layers):
def _MultiCellFn():
return rnn_cell_impl.MultiRNNCell(
[single_cell_fn() for _ in range(num_layers)])
input_size = 3
save_graph = ops.Graph()
with save_graph.as_default(), self.test_session(graph=save_graph):
save_layer = _MultiCellFn()
save_layer(inputs=array_ops.ones([1, input_size]),
state=save_layer.zero_state(1, dtypes.float32))
self.assertTrue(save_layer.variables)
expected_values = []
np.random.seed(10)
for variable in save_layer.variables:
value = np.random.normal(size=variable.shape)
expected_values.append(value)
self.evaluate(variable.assign(value))
save_checkpoint = checkpointable_utils.Checkpoint(cell=save_layer)
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
first_save_path = save_checkpoint.save(checkpoint_prefix)
self._VerifyCheckpoint(
checkpoint_path=first_save_path,
compatible_cell_fn=_MultiCellFn, cudnn_cell_fn=cudnn_cell_fn,
num_layers=num_layers,
expected_variable_values=expected_values,
input_size=input_size)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
@test_util.run_in_graph_and_eager_modes()
def testCudnnCompatibleLSTMCheckpointablMultiLayer(self):
num_units = 2
num_layers = 3
direction = CUDNN_RNN_UNIDIRECTION
self._CheckpointableMultiLayerTestTemplate(
single_cell_fn=functools.partial(
cudnn_rnn_ops.CudnnCompatibleLSTMCell, num_units=num_units),
cudnn_cell_fn=functools.partial(
cudnn_rnn.CudnnLSTM, num_layers=num_layers, num_units=num_units,
direction=direction, name="awesome_lstm"),
num_layers=num_layers)
# TODO(jamesqin): Transform to parameterized test after it is included in the
# TF open source codebase.
class CudnnRNNTestCompatibleRNNCells(test_util.TensorFlowTestCase):
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testCudnnCompatibleLSTM(self):
self._TestCudnnCompatibleRnnCellsHelper(CUDNN_LSTM)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testCudnnCompatibleGRU(self):
self._TestCudnnCompatibleRnnCellsHelper(CUDNN_GRU)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testCudnnCompatibleRNNTanh(self):
self._TestCudnnCompatibleRnnCellsHelper(CUDNN_RNN_TANH)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testCudnnCompatibleRNNRelu(self):
self._TestCudnnCompatibleRnnCellsHelper(CUDNN_RNN_RELU)
def _TestCudnnCompatibleRnnCellsHelper(self, rnn_mode):
configs = [
{
"num_layers": 1,
"seq_length": 3,
"num_units": 4,
"input_size": 5,
"batch_size": 6,
},
{
"num_layers": 2,
"seq_length": 8,
"num_units": 4,
"input_size": 8,
"batch_size": 16,
},
{
"num_layers": 2,
"seq_length": 3,
"num_units": 4,
"input_size": 5,
"batch_size": 6,
},
{
"num_layers": 1,
"seq_length": 2,
"num_units": 2,
"input_size": 4,
"batch_size": 1,
},
]
directions = [CUDNN_RNN_UNIDIRECTION, CUDNN_RNN_BIDIRECTION]
for cfg, direction in zip(configs, directions):
self._TestCudnnCompatibleRnnCells(cfg["num_layers"], cfg["seq_length"],
cfg["num_units"], cfg["input_size"],
cfg["batch_size"], rnn_mode, direction)
def _TestCudnnCompatibleRnnCells(self, num_layers, seq_length, num_units,
input_size, batch_size, rnn_mode, direction):
dtype = dtypes.float32
# Train graph
with ops.Graph().as_default() as g:
model = CudnnTestModel(
rnn_mode,
num_layers,
num_units,
input_size,
direction=direction,
dtype=dtype,
training=True)
target_output = array_ops.placeholder(dtype=dtype)
loss_op = losses.log_loss(
labels=target_output, predictions=model.total_sum)
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1e-2)
train_op = optimizer.minimize(loss_op)
saver = saver_lib.Saver()
# Train Cudnn model
seed = 0
with self.test_session(use_gpu=True, graph=g) as sess:
sess.run(variables.global_variables_initializer())
# Train 128 steps
num_steps = 128
for _ in range(num_steps):
inputs, _ = model.SynthesizeInput(seq_length, batch_size, seed)
targets = np.random.rand()
sess.run(
train_op,
feed_dict={
model.inputs: inputs,
model.initial_state: model.ZeroState(batch_size),
target_output: targets
})
seed += 1
save_path = os.path.join(self.get_temp_dir(),
("cudnn-rnn-%s-test" % rnn_mode))
save_v = saver.save(sess, save_path)
self.assertEqual(save_path, save_v)
# Cudnn inference graph
with ops.Graph().as_default() as g:
model = CudnnTestModel(
rnn_mode,
num_layers,
num_units,
input_size,
direction=direction,
dtype=dtype,
training=False)
rnn = model.rnn
saver = saver_lib.Saver()
inference_input = np.random.rand(seq_length, batch_size,
input_size).astype(np.float32)
with self.test_session(use_gpu=True, graph=g) as sess:
sess.run(variables.global_variables_initializer())
saver.restore(sess, save_path)
# Cudnn inference
cudnn_outputs_v, cudnn_output_states_v = model.Feed(
sess, inference_input, return_sum=False)
# Canonical RNN inference graph
with ops.Graph().as_default() as g:
cell_inputs = array_ops.placeholder(
dtype, shape=[seq_length, batch_size, input_size])
if direction == CUDNN_RNN_UNIDIRECTION:
# outputs is one tensor, states are num_layer tuples, each 2 tensors
(outputs, states) = _CreateCudnnCompatibleCanonicalRNN(rnn, cell_inputs)
if rnn_mode == CUDNN_LSTM:
output_h = array_ops.stack([s.h for s in states])
output_c = array_ops.stack([s.c for s in states])
else:
output_state = array_ops.stack([s for s in states])
else:
# outputs is one tensor.
# states is a tuple of 2 tuples:
# each sub tuple is num_layer tuples, each with 2 tensors.
(outputs, states) = _CreateCudnnCompatibleCanonicalRNN(
rnn, cell_inputs, is_bidi=True)
output_state_fw, output_state_bw = states
if rnn_mode == CUDNN_LSTM:
output_h, output_c = [], []
for s_fw, s_bw in zip(output_state_fw, output_state_bw):
output_h.append(array_ops.stack([s_fw.h, s_bw.h]))
output_c.append(array_ops.stack([s_fw.c, s_bw.c]))
output_h = array_ops.concat(output_h, axis=0)
output_c = array_ops.concat(output_c, axis=0)
else:
output_state = []
for s_fw, s_bw in zip(output_state_fw, output_state_bw):
output_state.append(array_ops.stack([s_fw, s_bw]))
output_state = array_ops.concat(output_state, axis=0)
saver = saver_lib.Saver()
with self.test_session(use_gpu=True, graph=g) as sess:
saver.restore(sess, save_path)
# BlockCell inference
if rnn_mode == CUDNN_LSTM:
outputs_v, output_h_v, output_c_v = sess.run(
[outputs, output_h, output_c],
feed_dict={cell_inputs: inference_input})
self.assertAllClose(cudnn_outputs_v, outputs_v)
cudnn_output_h_v, cudnn_output_c_v = cudnn_output_states_v
self.assertAllClose(cudnn_output_h_v, output_h_v)
self.assertAllClose(cudnn_output_c_v, output_c_v)
else:
outputs_v, output_state_v = sess.run(
[outputs, output_state],
feed_dict={cell_inputs: inference_input})
self.assertAllClose(cudnn_outputs_v, outputs_v, atol=2e-5, rtol=2e-5)
(cudnn_output_h_v,) = cudnn_output_states_v
self.assertAllClose(cudnn_output_h_v, output_state_v, atol=2e-5,
rtol=2e-5)
class CudnnRNNTestParamsSize(test_util.TensorFlowTestCase):
def _TestOpaqueParamsSize(self, rnn_mode, num_layers, num_units, input_size,
dtype, direction):
logging.info("Testing one lstm param size with config: %s", locals())
model = CudnnTestModel(
rnn_mode,
num_layers,
num_units,
input_size,
dtype=dtype,
direction=direction)
rnn = model.rnn
# Min param size estimate = sum(weights.size) + sum(biases.size)
min_params_size = (
np.sum(map(np.prod, rnn.canonical_weight_shapes)) +
np.sum([sp[0] for sp in rnn.canonical_bias_shapes]))
opaque_params = rnn.trainable_variables[0]
with self.test_session(use_gpu=True, graph=ops.get_default_graph()):
variables.global_variables_initializer().run()
opaque_params_size_v = opaque_params.eval().size
self.assertLessEqual(min_params_size, opaque_params_size_v)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testOpaqueParamsSize(self):
test_configs = [
[4, 200, 200],
[4, 200, 300],
[4, 200, 100],
[1, 100, 200],
[2, 200, 100],
[3, 200, 400],
]
directions = [CUDNN_RNN_UNIDIRECTION, CUDNN_RNN_BIDIRECTION]
dtype_list = [dtypes.float16, dtypes.float32, dtypes.float64]
rnns = [CUDNN_LSTM, CUDNN_GRU, CUDNN_RNN_RELU, CUDNN_RNN_TANH]
for (rnn, config, dtype, direction) in itertools.product(
rnns, test_configs, dtype_list, directions):
num_layers, num_units, input_size = config
with ops.Graph().as_default():
self._TestOpaqueParamsSize(rnn, num_layers, num_units, input_size,
dtype, direction)
class CudnnRNNTestTraining(test_util.TensorFlowTestCase):
def _ComputeNumericGrad(self, sess, y, x, delta=1e-4, step=1):
"""Compute the numeric gradient of y wrt to x.
Args:
sess: The TF session constructed with a graph containing x and y.
y: A scalar TF Tensor in the graph constructed in sess.
x: A TF Tensor in the graph constructed in sess.
delta: Gradient checker's small perturbation of x[i].
step: Only compute numerical gradients for a subset of x values.
I.e. dy/dx[i] is computed if i % step == 0.
Returns:
A Tensor of the same shape and dtype as x. If x[i] is not chosen
to compute the numerical gradient dy/x[i], the corresponding
value is set to 0.
"""
x_data = sess.run(x)
x_size = x_data.size
x_shape = x_data.shape
numeric_grad = np.zeros(x_size, dtype=x_data.dtype)
for i in range(0, x_size, step):
x_pos = x_data.copy()
if x_size == 1:
x_pos += delta
else:
x_pos.flat[i] += delta
y_pos_feed_dict = dict([(x.name, x_pos)])
y_pos = sess.run(y, feed_dict=y_pos_feed_dict)
x_neg = x_data.copy()
if x_size == 1:
x_neg -= delta
else:
x_neg.flat[i] -= delta
y_neg_feed_dict = dict([(x.name, x_neg)])
y_neg = sess.run(y, feed_dict=y_neg_feed_dict)
numeric_grad[i] = (y_pos - y_neg) / (2 * delta)
return numeric_grad.reshape(x_shape)
def _GetShape(self, sess, inputs):
if not isinstance(inputs, collections.Iterable):
return sess.run(array_ops.shape(inputs))
else:
return sess.run([array_ops.shape(x) for x in inputs])
def _GradientCheckFp16(self, sess, y, xs, num_samples,
tolerance=1e-6, delta=1e-4):
"""Gradient check for Fp16.
Fp16 numerical gradients end up being zeros. Use a new way to check
gradients:
Given multi-variant function:
y = f(x1, x2, ... xn)
delta_y = f(x1 + delta_x1, x2+delta_x2, ..., xn+delta_xn) -
f(x1, x2, ..., xn)
= f'(x1) * delta_x1 + f'(x2) * delta_x2 + .. + f'(xn) * delta_xn
where:
delta_xi are very small disturbance.
f'(xi) is the gradient of y w.r.t xi.
The gradient check verifies the expected delta_y calculated by the above
equation is close to the actual delta_y.
Args:
sess: tf.Session object.
y: output tensor.
xs: a tensor or a list of input tensors.
num_samples: number of test samples to run.
tolerance: error tolerance.
delta: the order of magnititued of input disturbance to apply to calculate
the output change w.r.t inputs.
"""
sym_grads = self._ComputeSymGrads(sess, y, xs)
xs_shapes = self._GetShape(sess, xs)
x_vals = [sess.run(x) for x in xs]
for _ in range(num_samples):
delta_xs = [delta * np.random.rand(*shape.tolist())
for shape in xs_shapes]
feed_dict = {}
for x, x_val, delta_x in zip(xs, x_vals, delta_xs):
feed_dict[x] = x_val + delta_x
actual_delta_y = (float(sess.run(y, feed_dict=feed_dict)) -
float(sess.run(y)))
expected_delta_y = 0.
for sym_grad, delta_x in zip(sym_grads, delta_xs):
expected_delta_y += np.dot(
sym_grad.astype(np.float32).flatten(),
delta_x.astype(np.float32).flatten())
self.assertAllClose(expected_delta_y, actual_delta_y,
atol=tolerance, rtol=tolerance)
def _GradientCheck(self, sess, y, xs, tolerance=1e-6, delta=1e-4):
sym_grads = self._ComputeSymGrads(sess, y, xs)
num_grads = [self._ComputeNumericGrad(sess, y, x, delta) for x in xs]
self.assertEqual(len(sym_grads), len(num_grads))
for sym, num in zip(sym_grads, num_grads):
self.assertFalse(np.any(np.isnan(sym)))
self.assertFalse(np.any(np.isnan(num)))
self.assertAllClose(sym, num, atol=tolerance, rtol=tolerance)
def _ComputeSymGrads(self, sess, y, xs):
sym_grads_t = gradients.gradients(y, xs)
return sess.run(sym_grads_t)
def _TestOneSimpleTraining(self, rnn_mode, num_layers, num_units, input_size,
batch_size, seq_length, dir_count, dropout, dtype,
delta, tolerance):
# Gradient checking runs two forward ops with almost the same input. Need to
# make sure the drop patterns across the two runs are the same.
logging.info("Training test with config: %s", locals())
old_env_state = os.environ.get("TF_CUDNN_RESET_RND_GEN_STATE", str(False))
os.environ["TF_CUDNN_RESET_RND_GEN_STATE"] = str(True)
np.random.seed(1234)
random_seed.set_random_seed(5678)
has_input_c = (rnn_mode == CUDNN_LSTM)
direction = (CUDNN_RNN_UNIDIRECTION
if dir_count == 1 else CUDNN_RNN_BIDIRECTION)
model = CudnnTestModel(
rnn_mode,
num_layers,
num_units,
input_size,
direction=direction,
dropout=dropout,
dtype=dtype,
training=True,
bias_initializer=init_ops.random_normal_initializer(
mean=1., dtype=dtype))
rnn = model.rnn
params = rnn.trainable_variables[0]
inputs = variables.Variable(
random_ops.random_uniform(
[seq_length, batch_size, input_size], dtype=dtype),
dtype=dtype)
input_h = variables.Variable(
random_ops.random_uniform(
[num_layers * dir_count, batch_size, num_units], dtype=dtype),
dtype=dtype)
if has_input_c:
input_c = variables.Variable(
random_ops.random_uniform(
[num_layers * dir_count, batch_size, num_units], dtype=dtype),
dtype=dtype)
initial_state = (input_h, input_c)
else:
initial_state = (input_h,)
total_sum = model.FProp(inputs, initial_state, training=True)
with self.test_session(use_gpu=True, graph=ops.get_default_graph()) as sess:
sess.run(variables.global_variables_initializer())
all_inputs = [inputs, params]
for s in initial_state:
all_inputs.append(s)
if dtype == dtypes.float16:
self._GradientCheckFp16(
sess, total_sum, all_inputs,
num_samples=FLAGS.grad_check_num_samples,
tolerance=tolerance, delta=delta)
else:
for _ in range(FLAGS.grad_check_num_samples):
# Each time choose a different set of inputs.
sess.run(variables.global_variables_initializer())
self._GradientCheck(
sess, total_sum, all_inputs,
tolerance=tolerance, delta=delta)
os.environ["TF_CUDNN_RESET_RND_GEN_STATE"] = old_env_state
def _TestSimpleTrainingHelper(self, rnn_mode, test_configs):
dropouts = [0, 0.5, 1.]
for config, dropout in itertools.product(test_configs, dropouts):
dtype = config.get("dtype", dtypes.float32)
delta = config.get("delta", 1e-4)
tolerance = config.get("tolerance", 1e-6)
dir_count = config.get("dir_count", 1)
shape = config["shape"]
with ops.Graph().as_default():
self._TestOneSimpleTraining(rnn_mode, shape["num_layers"],
shape["num_units"], shape["input_size"],
shape["batch_size"], shape["seq_length"],
dir_count, dropout, dtype, delta,
tolerance)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSimpleTrainingLSTMFp64(self):
test_configs = [
{
"dtype": dtypes.float64,
"tolerance": 5e-6,
"shape": {
"num_layers": 2,
"num_units": 3,
"input_size": 4,
"batch_size": 3,
"seq_length": 4,
},
},
]
self._TestSimpleTrainingHelper(CUDNN_LSTM, test_configs)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSimpleTrainingLSTMFp32(self):
test_configs = [
{
"dtype": dtypes.float32,
"delta": 1e-4,
"tolerance": 9e-2,
"shape": {
"num_layers": 2,
"num_units": 3,
"input_size": 4,
"batch_size": 3,
"seq_length": 4,
},
},
]
self._TestSimpleTrainingHelper(CUDNN_LSTM, test_configs)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSimpleTrainingLSTMFp16(self):
test_configs = [
{
"dtype": dtypes.float16,
"delta": 1e-3,
"tolerance": 9e-2,
"shape": {
"num_layers": 2,
"num_units": 3,
"input_size": 4,
"batch_size": 3,
"seq_length": 4,
},
},
{
"dtype": dtypes.float16,
"delta": 1e-2,
"tolerance": 9e-2,
"shape": {
"num_layers": 2,
"num_units": 6,
"input_size": 8,
"batch_size": 6,
"seq_length": 4,
},
},
]
self._TestSimpleTrainingHelper(CUDNN_LSTM, test_configs)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSimpleTrainingGRUFp64(self):
test_configs = [
{
"dtype": dtypes.float64,
"tolerance": 5e-6,
"shape": {
"num_layers": 2,
"num_units": 3,
"input_size": 4,
"batch_size": 3,
"seq_length": 4,
}
},
]
self._TestSimpleTrainingHelper(CUDNN_GRU, test_configs)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSimpleTrainingGRUFp32(self):
test_configs = [
{
"dtype": dtypes.float32,
"delta": 1e-3,
"tolerance": 4e-3,
"shape": {
"num_layers": 2,
"num_units": 3,
"input_size": 4,
"batch_size": 3,
"seq_length": 4,
},
},
]
self._TestSimpleTrainingHelper(CUDNN_GRU, test_configs)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSimpleTrainingGRUFp16(self):
test_configs = [
{
"dtype": dtypes.float16,
"delta": 2e-3,
"tolerance": 6e-2,
"shape": {
"num_layers": 2,
"num_units": 3,
"input_size": 4,
"batch_size": 3,
"seq_length": 4,
},
},
]
self._TestSimpleTrainingHelper(CUDNN_GRU, test_configs)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSimpleTrainingRNNTanhFp64(self):
test_configs = [
{
"dtype": dtypes.float64,
"tolerance": 5e-6,
"shape": {
"num_layers": 2,
"num_units": 3,
"input_size": 4,
"batch_size": 3,
"seq_length": 4,
},
},
]
self._TestSimpleTrainingHelper(CUDNN_RNN_TANH, test_configs)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSimpleTrainingRNNTanhFp32(self):
test_configs = [
{
"dtype": dtypes.float32,
"delta": 1e-3,
"tolerance": 5e-3,
"shape": {
"num_layers": 2,
"num_units": 3,
"input_size": 4,
"batch_size": 3,
"seq_length": 4,
},
},
]
self._TestSimpleTrainingHelper(CUDNN_RNN_TANH, test_configs)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSimpleTrainingRNNTanhFp16(self):
test_configs = [
{
"dtype": dtypes.float16,
"delta": 1e-3,
"tolerance": 5e-2,
"shape": {
"num_layers": 2,
"num_units": 3,
"input_size": 4,
"batch_size": 3,
"seq_length": 4,
},
},
]
self._TestSimpleTrainingHelper(CUDNN_RNN_TANH, test_configs)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSimpleTrainingRNNReluFp64(self):
test_configs = [
{
"dtype": dtypes.float64,
"tolerance": 5e-6,
"shape": {
"num_layers": 2,
"num_units": 3,
"input_size": 4,
"batch_size": 3,
"seq_length": 4,
},
},
]
self._TestSimpleTrainingHelper(CUDNN_RNN_RELU, test_configs)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSimpleTrainingRNNReluFp32(self):
test_configs = [
{
"dtype": dtypes.float32,
"delta": 1e-4,
"tolerance": 3e-1,
"shape": {
"num_layers": 2,
"num_units": 3,
"input_size": 4,
"batch_size": 3,
"seq_length": 4,
},
},
]
self._TestSimpleTrainingHelper(CUDNN_RNN_RELU, test_configs)
@unittest.skipUnless(test.is_built_with_cuda(),
"Test only applicable when running on GPUs")
def testSimpleTrainingRNNReluFp16(self):
test_configs = [
{
"dtype": dtypes.float16,
"delta": 1e-3,
"tolerance": 7e-2,
"shape": {
"num_layers": 2,
"num_units": 3,
"input_size": 4,
"batch_size": 3,
"seq_length": 4,
},
},
]
self._TestSimpleTrainingHelper(CUDNN_RNN_RELU, test_configs)
if __name__ == "__main__":
argv0 = sys.argv[0]
parser = argparse.ArgumentParser()
parser.add_argument(
"--grad_check_num_samples",
type=int,
default=5,
help="Number of samples to run for gradient check.")
FLAGS, unparsed = parser.parse_known_args()
sys.argv = [argv0] + unparsed
googletest.main()
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
d82b95343bfad8ecb00fb1488a773f0a45b56461 | c9f67529e10eb85195126cfa9ada2e80a834d373 | /lib/python3.5/site-packages/torch/legacy/nn/MSECriterion.py | a93d045d898a3dc9560df362a1571c9a4ec7de11 | [
"Apache-2.0"
] | permissive | chilung/dllab-5-1-ngraph | 10d6df73ea421bfaf998e73e514972d0cbe5be13 | 2af28db42d9dc2586396b6f38d02977cac0902a6 | refs/heads/master | 2022-12-17T19:14:46.848661 | 2019-01-14T12:27:07 | 2019-01-14T12:27:07 | 165,513,937 | 0 | 1 | Apache-2.0 | 2022-12-08T04:59:31 | 2019-01-13T14:19:16 | Python | UTF-8 | Python | false | false | 1,082 | py | import torch
from .Criterion import Criterion
class MSECriterion(Criterion):
def __init__(self, sizeAverage=True):
super(MSECriterion, self).__init__()
self.sizeAverage = sizeAverage
self.output_tensor = None
def updateOutput(self, input, target):
if self.output_tensor is None:
self.output_tensor = input.new(1)
self._backend.MSECriterion_updateOutput(
self._backend.library_state,
input,
target,
self.output_tensor,
self.sizeAverage,
True, # reduce
)
self.output = self.output_tensor[0].item()
return self.output
def updateGradInput(self, input, target):
implicit_gradOutput = torch.Tensor([1]).type(input.type())
self._backend.MSECriterion_updateGradInput(
self._backend.library_state,
input,
target,
implicit_gradOutput,
self.gradInput,
self.sizeAverage,
True, # reduce
)
return self.gradInput
| [
"chilung.cs06g@nctu.edu.tw"
] | chilung.cs06g@nctu.edu.tw |
dfcf03607d0d721031a3b8bef63a7511114a9ca0 | 836f2095d5ac8a200fc4b19c2644c8f693612d23 | /src/preprocess.py | 25f880dacaaeded768f7cc037a5e26423e60fa4b | [] | no_license | peternara/VL-T5-embeding-image-to-text-mulit-modal | bf62e910dcc8d89606099a8b15065def917e9349 | 1902413ade01fb6f032c1cdbec65aaa41313277a | refs/heads/main | 2023-05-01T21:45:10.682026 | 2021-05-25T15:49:57 | 2021-05-25T15:49:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,560 | py | import torch
import numpy as np
import random
from copy import deepcopy
def corrupt_spans(text, mask_ratio=0.15, prefix=None):
"""Masked Language Modeling with corrupted span prediction
Args:
text
Returns:
source_text (masked_text)
target_text
Ex) (in vocab ids)
input
In this tutorial, we’ll explore how to preprocess your data using Transformers. The main tool for this is what we call a tokenizer.
masked_text
<extra_id_0> this tutorial, we’ll explore how to preprocess your data <extra_id_1> Transformers. The main tool for this is what <extra_id_2> call a tokenizer.
target_text
"""
tokens = text.split()
n_tokens = len(tokens)
n_mask = int(max(mask_ratio * n_tokens, 1))
mask_indices = torch.randperm(n_tokens)[:n_mask].sort().values
assert len(mask_indices) > 0, text
mask_indices = mask_indices.tolist()
span = [mask_indices[0], mask_indices[0]+1]
spans = []
for i, mask_index in enumerate(mask_indices):
# if current mask is not the last one & the next mask is right after current mask
if i < len(mask_indices) - 1 and mask_indices[i+1] == mask_index + 1:
contiguous = True
else:
contiguous = False
if contiguous:
span[1] += 1
else:
# non contiguous -> output current span
spans.append(span)
# if current mask is not the last one -> create next span
if i < len(mask_indices) - 1:
span = [mask_indices[i+1], mask_indices[i+1]+1]
masked_tokens = deepcopy(tokens)
target_tokens = []
cum_span_length = 0
for i, span in enumerate(spans):
start, end = span
masked_tokens[start-cum_span_length+i: end -
cum_span_length+i] = [f'<extra_id_{i}>']
target_tokens.append(f'<extra_id_{i}>')
target_tokens.extend(tokens[start:end])
cum_span_length += (end - start)
# target_tokens.append(f'<extra_id_{i+1}>')
# target_tokens.append(f'</s>')
masked_text = " ".join(masked_tokens)
if prefix is None:
source_text = masked_text
else:
source_text = f"{prefix} {masked_text}"
target_text = " ".join(target_tokens)
return source_text, target_text
def corrupt_bart(input_text, mask_ratio=0.30, prefix="denoise text:"):
"""BART-style Masked Language Modeling with corrupted span prediction
Args:
text
Returns:
source_text (masked_text)
target_text
Ex) (in vocab ids)
input
In this tutorial, we’ll explore how to preprocess your data using Transformers. The main tool for this is what we call a tokenizer.
masked_text
denoise text: In <mask> we’ll explore how to preprocess your data <mask> Transformers. <mask> main <mask> for this is what we <mask> a tokenizer.
target_text
same is input text
"""
tokens = input_text.split()
n_tokens = len(tokens)
n_mask = int(max(mask_ratio * n_tokens, 1))
mask_indices = torch.randperm(n_tokens)[:n_mask].sort().values
assert len(mask_indices) > 0, input_text
mask_indices = mask_indices.tolist()
span = [mask_indices[0], mask_indices[0]+1]
spans = []
for i, mask_index in enumerate(mask_indices):
# if current mask is not the last one & the next mask is right after current mask
if i < len(mask_indices) - 1 and mask_indices[i+1] == mask_index + 1:
contiguous = True
else:
contiguous = False
if contiguous:
span[1] += 1
else:
# non contiguous -> output current span
spans.append(span)
# if current mask is not the last one -> create next span
if i < len(mask_indices) - 1:
span = [mask_indices[i+1], mask_indices[i+1]+1]
masked_tokens = deepcopy(tokens)
cum_span_length = 0
for i, span in enumerate(spans):
start, end = span
masked_tokens[start-cum_span_length +
i: end-cum_span_length+i] = ['<mask>']
cum_span_length += (end - start)
masked_text = " ".join(masked_tokens)
if prefix is None:
source_text = masked_text
else:
source_text = f"{prefix} {masked_text}"
target_text = input_text
return source_text, target_text
def ground_caption(captions, n_ground=1, prefix="describe visual inputs:", sort=True):
"""
For VG
Args:
captions
n_ground
Returns:
source_text
target_text
Ex) (in vocab ids)
captions
['Yellow banana', 'red crayon', 'black cow', 'blue sky']
n_ground > 1
ground_indices
[1, 0, 2]
source_text
describe visual inputs: <vis_extra_id_1> <vis_extra_id_0> <vis_extra_id_2>
target_text
<extra_id_0> red crayon <extra_id_1> Yellow banana <extra_id_2> black cow
n_ground == 1
source_text
describe visual inputs: <vis_extra_id_1>
target_text
red crayon
"""
n_boxes = len(captions)
if sort:
ground_indices = torch.randperm(n_boxes)[:n_ground].sort().values
else:
ground_indices = torch.randperm(n_boxes)[:n_ground]
ground_indices = ground_indices.tolist()
source_text = [prefix]
target_text = []
if n_ground == 1:
idx = ground_indices[0]
source_text.append(f'<vis_extra_id_{idx}>')
target_text.append(f'{captions[idx]}')
else:
for j, idx in enumerate(ground_indices):
source_text.append(f'<vis_extra_id_{idx}>')
target_text.append(f'<extra_id_{j}>')
target_text.append(f'{captions[idx]}')
# target_text.append('</s>')
source_text = " ".join(source_text)
target_text = " ".join(target_text)
# return ground_indices, source_text, target_text
return source_text, target_text
def refer_expression(captions, n_ground=1, prefix="refer expressions:", sort=True):
"""
n_ground > 1
ground_indices
[1, 0, 2]
source_text
refer expressions: <extra_id_0> red crayon <extra_id_1> Yellow banana <extra_id_2> black cow
target_text
<vis_extra_id_1> <vis_extra_id_0> <vis_extra_id_2>
n_ground == 1
source_text
refer expressions: red crayon
target_text
<vis_extra_id_1>
"""
n_boxes = len(captions)
if sort:
ground_indices = torch.randperm(n_boxes)[:n_ground].sort().values
else:
ground_indices = torch.randperm(n_boxes)[:n_ground]
ground_indices = ground_indices.tolist()
source_text = [prefix]
target_text = []
if n_ground == 1:
idx = ground_indices[0]
source_text.append(f'{captions[idx]}')
target_text.append(f'<vis_extra_id_{idx}>')
else:
for j, idx in enumerate(ground_indices):
source_text.append(f'<extra_id_{j}>')
source_text.append(f'{captions[idx]}')
target_text.append(f'<vis_extra_id_{idx}>')
# target_text.append('</s>')
source_text = " ".join(source_text)
target_text = " ".join(target_text)
# return ground_indices, source_text, target_text
return source_text, target_text
| [
"heythisischo@gmail.com"
] | heythisischo@gmail.com |
bdc874a8f0c2d822cf43731e1ed0437f025b91f7 | 16385e10f6ad05b8147517daf2f40dbdda02617c | /site-packages/cs.metrics-15.4.0.2-py2.7.egg/cs/metrics/updates/v15_2_0_15/__init__.py | 3fe53ff954cbc858516882f09d4b8a7e3f041ebb | [] | no_license | prachipainuly-rbei/devops-poc | 308d6cab02c14ffd23a0998ff88d9ed0420f513a | 6bc932c67bc8d93b873838ae6d9fb8d33c72234d | refs/heads/master | 2020-04-18T01:26:10.152844 | 2019-02-01T12:25:19 | 2019-02-01T12:25:19 | 167,118,611 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,759 | py | #!/usr/bin/env powerscript
# -*- mode: python; coding: iso-8859-1 -*-
#
# Copyright (C) 1990 - 2014 CONTACT Software GmbH
# All rights reserved.
# http://www.contact.de/
#
from __future__ import unicode_literals
__revision__ = "$Id: __init__.py 154332 2017-02-21 14:43:33Z khi $"
class AddServiceUsers(object):
"""
Add the qcaggregationengine and qccomputationengine users with his standard assignments. We have
to use an update script because role assignments are usually not
updated automatically.
"""
def run(self):
from cdb.comparch import modules
from cdb.comparch import content
from cdb import sqlapi
for login, password in [('qcaggregationengine', '$pbkdf2-sha256$29000$0hqDkNJaS4nxvrd2LsVYyw$ydktQewvlXlJbIKuVfGSxbqqVBfrgKkTMMODNYeWHbc'),
('qccomputationengine', '$pbkdf2-sha256$29000$LiUk5HyPUWrtPcfYO.d8rw$s9lNyEJ8IA9sS4ANySTZiICx.w1jmI6Eyim2zl4gf9s')]:
user = sqlapi.RecordSet2("angestellter",
"personalnummer='%s'" % login)
if not user:
m = modules.Module.ByKeys('cs.metrics')
for rel, key in [('angestellter', 'personalnummer'),
('cdb_global_subj', 'subject_id')]:
content_filter = content.ModuleContentFilter([rel])
mc = modules.ModuleContent(m.module_id, m.std_conf_exp_dir, content_filter)
for mod_content in mc.getItems(rel).values():
if mod_content.getAttr(key) == login:
try:
mod_content.insertIntoDB()
user = sqlapi.RecordSet2("angestellter",
"personalnummer='%s'" % login)
except Exception:
# Already there
pass
# The component architecture does not transport the password
if user and (not user[0].password or user[0].password == password):
import cdbwrapc
import os
new_pw = cdbwrapc.get_crypted_password(login, os.urandom(32))
user[0].update(password=new_pw)
class AddServiceUserOptions(object):
"""
Add the new mandatory service options for the service
cs.metrics.qc_engine.QCAggregationEngine and cs.metrics.qc_engine.QCComputationEngine
"""
def run(self):
from cdb import sqlapi
svc_names_and_login_pairs = [("cs.metrics.qc_engine.QCAggregationEngine",
"qcaggregationengine"),
("cs.metrics.qc_engine.QCComputationEngine",
"qccomputationengine")]
for svcname, login in svc_names_and_login_pairs:
svcs = sqlapi.RecordSet2("cdbus_svcs", "svcname='{}'".format(svcname))
for svc in svcs:
cond = "svcid='%s'" % sqlapi.quote(svc.svcid)
opt_names = [svc_opt.name
for svc_opt in sqlapi.RecordSet2("cdbus_svcopts", cond)]
new_opts = {
"--user": login
}
for name, val in new_opts.items():
if name not in opt_names:
sqlapi.Record("cdbus_svcopts",
svcid=svc.svcid,
name=name,
value=val).insert()
pre = []
post = [AddServiceUsers, AddServiceUserOptions]
if __name__ == "__main__":
AddServiceUsers().run()
AddServiceUserOptions().run()
| [
"PPR4COB@rbeigcn.com"
] | PPR4COB@rbeigcn.com |
01bb186bb65ce9f242c8fbdbaa54f37358929af9 | 69e318f2b60175108bc74ee669bfe16287a71cb6 | /plugins/modules/fortios_dlp_sensor.py | ef1a1661c8017f9c5ec6f59b63cdeb8688fb9e92 | [] | no_license | chillancezen/ansible-galaxy-fortios-collection | 5268a5fd97fb4594772349b8d89cb818ec54b3bd | 66a331cd4493d1b0f49798d5c2cd6ef5aeba84d3 | refs/heads/master | 2022-04-09T19:20:59.073193 | 2020-03-26T07:17:09 | 2020-03-26T07:17:09 | 250,185,374 | 0 | 0 | null | 2020-03-26T07:06:16 | 2020-03-26T07:06:16 | null | UTF-8 | Python | false | false | 22,093 | py | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_dlp_sensor
short_description: Configure DLP sensors in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify dlp feature and sensor category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
state:
description:
- Indicates whether to create or remove the object.
This attribute was present already in previous version in a deeper level.
It has been moved out to this outer level.
type: str
required: false
choices:
- present
- absent
version_added: 2.9
dlp_sensor:
description:
- Configure DLP sensors.
default: null
type: dict
suboptions:
state:
description:
- B(Deprecated)
- Starting with Ansible 2.9 we recommend using the top-level 'state' parameter.
- HORIZONTALLINE
- Indicates whether to create or remove the object.
type: str
required: false
choices:
- present
- absent
comment:
description:
- Comment.
type: str
dlp_log:
description:
- Enable/disable DLP logging.
type: str
choices:
- enable
- disable
extended_log:
description:
- Enable/disable extended logging for data leak prevention.
type: str
choices:
- enable
- disable
filter:
description:
- Set up DLP filters for this sensor.
type: list
suboptions:
action:
description:
- Action to take with content that this DLP sensor matches.
type: str
choices:
- allow
- log-only
- block
- quarantine-ip
archive:
description:
- Enable/disable DLP archiving.
type: str
choices:
- disable
- enable
company_identifier:
description:
- Enter a company identifier watermark to match. Only watermarks that your company has placed on the files are matched.
type: str
expiry:
description:
- Quarantine duration in days, hours, minutes format (dddhhmm).
type: str
file_size:
description:
- Match files this size or larger (0 - 4294967295 kbytes).
type: int
file_type:
description:
- Select the number of a DLP file pattern table to match. Source dlp.filepattern.id.
type: int
filter_by:
description:
- Select the type of content to match.
type: str
choices:
- credit-card
- ssn
- regexp
- file-type
- file-size
- fingerprint
- watermark
- encrypted
fp_sensitivity:
description:
- Select a DLP file pattern sensitivity to match.
type: list
suboptions:
name:
description:
- Select a DLP sensitivity. Source dlp.fp-sensitivity.name.
required: true
type: str
id:
description:
- ID.
required: true
type: int
match_percentage:
description:
- Percentage of fingerprints in the fingerprint databases designated with the selected fp-sensitivity to match.
type: int
name:
description:
- Filter name.
type: str
proto:
description:
- Check messages or files over one or more of these protocols.
type: str
choices:
- smtp
- pop3
- imap
- http-get
- http-post
- ftp
- nntp
- mapi
- mm1
- mm3
- mm4
- mm7
regexp:
description:
- Enter a regular expression to match (max. 255 characters).
type: str
severity:
description:
- Select the severity or threat level that matches this filter.
type: str
choices:
- info
- low
- medium
- high
- critical
type:
description:
- Select whether to check the content of messages (an email message) or files (downloaded files or email attachments).
type: str
choices:
- file
- message
flow_based:
description:
- Enable/disable flow-based DLP.
type: str
choices:
- enable
- disable
full_archive_proto:
description:
- Protocols to always content archive.
type: str
choices:
- smtp
- pop3
- imap
- http-get
- http-post
- ftp
- nntp
- mapi
- mm1
- mm3
- mm4
- mm7
nac_quar_log:
description:
- Enable/disable NAC quarantine logging.
type: str
choices:
- enable
- disable
name:
description:
- Name of the DLP sensor.
required: true
type: str
options:
description:
- Configure DLP options.
type: str
replacemsg_group:
description:
- Replacement message group used by this DLP sensor. Source system.replacemsg-group.name.
type: str
summary_proto:
description:
- Protocols to always log summary.
type: str
choices:
- smtp
- pop3
- imap
- http-get
- http-post
- ftp
- nntp
- mapi
- mm1
- mm3
- mm4
- mm7
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure DLP sensors.
fortios_dlp_sensor:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
dlp_sensor:
comment: "Comment."
dlp_log: "enable"
extended_log: "enable"
filter:
-
action: "allow"
archive: "disable"
company_identifier: "myId_9"
expiry: "<your_own_value>"
file_size: "11"
file_type: "12 (source dlp.filepattern.id)"
filter_by: "credit-card"
fp_sensitivity:
-
name: "default_name_15 (source dlp.fp-sensitivity.name)"
id: "16"
match_percentage: "17"
name: "default_name_18"
proto: "smtp"
regexp: "<your_own_value>"
severity: "info"
type: "file"
flow_based: "enable"
full_archive_proto: "smtp"
nac_quar_log: "enable"
name: "default_name_26"
options: "<your_own_value>"
replacemsg_group: "<your_own_value> (source system.replacemsg-group.name)"
summary_proto: "smtp"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import FortiOSHandler
from ansible_collections.fortinet.fortios.plugins.module_utils.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_dlp_sensor_data(json):
option_list = ['comment', 'dlp_log', 'extended_log',
'filter', 'flow_based', 'full_archive_proto',
'nac_quar_log', 'name', 'options',
'replacemsg_group', 'summary_proto']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def dlp_sensor(data, fos):
vdom = data['vdom']
if 'state' in data and data['state']:
state = data['state']
elif 'state' in data['dlp_sensor'] and data['dlp_sensor']:
state = data['dlp_sensor']['state']
else:
state = True
dlp_sensor_data = data['dlp_sensor']
filtered_data = underscore_to_hyphen(filter_dlp_sensor_data(dlp_sensor_data))
if state == "present":
return fos.set('dlp',
'sensor',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('dlp',
'sensor',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_dlp(data, fos):
if data['dlp_sensor']:
resp = dlp_sensor(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"dlp_sensor": {
"required": False, "type": "dict", "default": None,
"options": {
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"comment": {"required": False, "type": "str"},
"dlp_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"extended_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"filter": {"required": False, "type": "list",
"options": {
"action": {"required": False, "type": "str",
"choices": ["allow", "log-only", "block",
"quarantine-ip"]},
"archive": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"company_identifier": {"required": False, "type": "str"},
"expiry": {"required": False, "type": "str"},
"file_size": {"required": False, "type": "int"},
"file_type": {"required": False, "type": "int"},
"filter_by": {"required": False, "type": "str",
"choices": ["credit-card", "ssn", "regexp",
"file-type", "file-size", "fingerprint",
"watermark", "encrypted"]},
"fp_sensitivity": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"id": {"required": True, "type": "int"},
"match_percentage": {"required": False, "type": "int"},
"name": {"required": False, "type": "str"},
"proto": {"required": False, "type": "str",
"choices": ["smtp", "pop3", "imap",
"http-get", "http-post", "ftp",
"nntp", "mapi", "mm1",
"mm3", "mm4", "mm7"]},
"regexp": {"required": False, "type": "str"},
"severity": {"required": False, "type": "str",
"choices": ["info", "low", "medium",
"high", "critical"]},
"type": {"required": False, "type": "str",
"choices": ["file", "message"]}
}},
"flow_based": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"full_archive_proto": {"required": False, "type": "str",
"choices": ["smtp", "pop3", "imap",
"http-get", "http-post", "ftp",
"nntp", "mapi", "mm1",
"mm3", "mm4", "mm7"]},
"nac_quar_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"name": {"required": True, "type": "str"},
"options": {"required": False, "type": "str"},
"replacemsg_group": {"required": False, "type": "str"},
"summary_proto": {"required": False, "type": "str",
"choices": ["smtp", "pop3", "imap",
"http-get", "http-post", "ftp",
"nntp", "mapi", "mm1",
"mm3", "mm4", "mm7"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_dlp(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_dlp(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| [
"fshen01@fortinet.com"
] | fshen01@fortinet.com |
96284c764d1e8cc52522bb858550adedcdd6e277 | 47b4a652bf47afbff07a7148c3b4a94b86f85bb2 | /swap_start/torch_train/1_torch/AIPlayer.py | fbeea3485720c25f45be34eccfce6f0b9b2912a7 | [
"MIT"
] | permissive | yudongqiu/gomoku | 3423253dcac52c0b738249900f1e86b31ca99524 | 4a95f2a5008f31fed5cb92c6bd6d55f9669ddd06 | refs/heads/master | 2022-10-28T09:16:38.939698 | 2021-01-30T21:01:48 | 2021-01-30T21:01:48 | 84,468,572 | 3 | 1 | MIT | 2022-09-30T09:03:45 | 2017-03-09T17:12:22 | Python | UTF-8 | Python | false | false | 33,993 | py | #!/usr/bin/env python
from __future__ import print_function, division
import itertools, time, copy
import collections, random
import os, pickle
import numba
from numba import cuda
import numpy as np
board_size = 15
show_q = False
class AIPlayer:
def __init__(self, name, model=None, level=0):
self.name = name
self.load_model(model)
self.level = level
self.learndata = dict()
self.opponent = None
self.all_interest_states = np.zeros(board_size**4 * 3, dtype=np.float32).reshape(board_size**2, 3, board_size, board_size)
self.move_interest_values = np.zeros(board_size**2, dtype=np.float32).reshape(board_size,board_size)
self.reset()
self.reset_cache()
def load_model(self, model):
# no data
if model is None:
self.model = None
# if provided path
elif isinstance(model, str):
import torch
self.model = torch.load(model)
else:
# provided model
self.model = model
def reset(self):
""" Reset before a new game """
self.hist_states = []
self.surprised = False
self.started_from_beginning = True
def reset_cache(self):
""" Reset cache before using new model """
self.cache = LeveledCache(maxlevel=self.level, maxsize=1000000)
def strategy(self, board_state, starting_level=0):
""" AI's strategy
Information provided to you:
board_state = (board, last_move, playing, board_size)
board = (x_stones, o_stones)
stones is a set contains positions of one player's stones. e.g.
x_stones = {(8,8), (8,9), (8,10), (8,11)}
playing = 0|1, the current player's index
Your strategy will return a position code for the next stone, e.g. (8,7)
"""
# load input board_state
board, last_move, playing, board_size = board_state
self.playing_white = bool(playing)
# build new state representation
state = np.zeros(board_size**2, dtype=np.int8).reshape(board_size, board_size)
# put black stones, update index
for br, bc in board[0]:
state[br-1,bc-1] = 1
# put white stones, update index
for wr, wc in board[1]:
state[wr-1,wc-1] = -1
# update index 1 -> 0 for last_move
last_move = (last_move[0]-1, last_move[1]-1)
# prepare input for best_action_q
alpha = -2.0
beta = 2.0
empty_spots_left = board_size**2 - len(board[0]) - len(board[1])
# predict next best action and q
player = -1 if self.playing_white else 1
# TODO: remove .copy()
best_move, best_q = self.best_action_q(state.copy(), empty_spots_left, alpha, beta, player, level=starting_level)
# save the winrate and the state
self.update_if_game_finish(state, best_move, best_q, player)
# return the best move
return (best_move[0]+1, best_move[1]+1), best_q
def best_action_q(self, state, empty_spots_left, alpha, beta, player, level=0):
"""
Get the optimal action for a state and the predicted win rate for player
Inputs
------
state: np.ndarray of shape (15, 15)
The current game state in a matrix. 1 = black, -1 = white, 0 = empty
empty_spots_left: int
How many empty spots are left, easy to keep track
alpha: float
Current alpha value in alpha-beta pruning, the running min of the max win rate
beta: float
Current beta value in alpha-beta pruning, the running max of the min win rate
player: int
The current player. 1 is black, -1 is white
Returns
-------
best_move: tuple(int, int)
The best move on the board, given by (r, c)
best_q: float or None
The value the best move. 1.0 means 100% win, -1.0 means 100% lose, 0 means draw
"""
if empty_spots_left == 0: # Board filled up, it's a tie
return (0,0), 0.0
verbose = False
n_moves = 40 if empty_spots_left > 200 else 20
self.move_interest_values.fill(0) # reuse the same array to save init cost
self.move_interest_values[4:11, 4:11] = 5.0 # manually assign higher interest in middle
interested_moves = find_interesting_moves(state, empty_spots_left, self.move_interest_values, player, n_moves, verbose)
#best_move = (-1,-1) # admit defeat if all moves have 0 win rate
best_move = (interested_moves[0,0], interested_moves[0,1]) # continue to play even I'm losing
# if there is only one move to place, directly return that move, use same level
if len(interested_moves) == 1:
# check if this move is known
move, move_q, unknown_moves, unknown_move_ids = self.check_known(state, interested_moves, player, level)
if move != None:
best_q = move_q
else:
best_q = self.next_iter_winrate(state, empty_spots_left, best_move, alpha, beta, player, level)
return best_move, best_q
# if there are multiple moves to evaluate, check cache first
best_move, max_q, unknown_moves, unknown_move_ids = self.check_known(state, interested_moves, player, level)
if len(unknown_moves) > 0:
# for unknown moves, if level has reached, evaluate with DNN model
if level >= self.level:
dnn_q_array = self.dnn_evaluate(state, unknown_moves, player)
# store the values in cache
for move_id, dnn_q in zip(unknown_move_ids, dnn_q_array):
self.cache.set(move_id, dnn_q, level)
# find the best move from tf results
dnn_best_move_idx = np.argmax(dnn_q_array)
dnn_max_q = dnn_q_array[dnn_best_move_idx]
# compare the tf results with cached results
if dnn_max_q > max_q:
max_q = dnn_max_q
best_move = unknown_moves[dnn_best_move_idx]
else:
# if level has not reached yet, go deeper to the next level
for move, move_id in zip(unknown_moves, unknown_move_ids):
q = self.next_iter_winrate(state, empty_spots_left, move, alpha, beta, player, level+1)
# store the result in cache
self.cache.set(move_id, q, level+1)
if q > max_q:
max_q = q
best_move = move
if max_q >= 1.0:
# early return
break
return best_move, max_q
def next_iter_winrate(self, state, empty_spots_left, current_move, alpha, beta, player, level):
"""Execute the step of the player, then return the winrate by computing next step"""
# update the stone down
state[current_move] = player
# known moves were handled already, here we evaluate opponents winrate
opponent_best_move, opponent_best_q = self.best_action_q(state, empty_spots_left-1, alpha, beta, -player, level)
# recover state
state[current_move] = 0
# my winrate is opposite of opponents
return -opponent_best_q
def check_known(self, state, interested_moves, player, level):
"""
Check which move in interested moves is known, using cache and ending condition
"""
max_q = -100
best_move = None
unknown_moves = []
unknown_move_ids = []
for move in interested_moves:
this_move = (move[0], move[1])
assert state[this_move] == 0 # interest move should be empty here
# put down this move
state[this_move] = player
# if I lost, no need to compute others
if i_lost(state, player):
# restore state
state[this_move] = 0
# early return
best_move = this_move
max_q = -1.0
unknown_moves = []
unknown_move_ids = []
break
# compute cache key
this_state_id = state.tobytes()
# check if its cached
q = self.cache.get(this_state_id, level)
# if not cached, check if I will win
if q is None:
if i_will_win(state, this_move, player):
q = 1.0
if q is not None:
# early return when found winning move
if q == 1.0:
# restore state
state[this_move] = 0
# early return
best_move = this_move
max_q = 1.0
unknown_moves = []
unknown_move_ids = []
break
# compare with running max, update
elif q > max_q:
max_q = q
best_move = this_move
else:
# q is not known
unknown_moves.append(this_move)
unknown_move_ids.append(this_state_id)
# restore state
state[this_move] = 0
return best_move, max_q, unknown_moves, unknown_move_ids
def dnn_evaluate(self, state, dnn_moves, player):
n_dnn = len(dnn_moves)
if n_dnn > 0:
all_interest_states = self.all_interest_states[:n_dnn] # we only need a slice of the big array
all_interest_states[:,0,:,:] = (state == player) # player's stones
all_interest_states[:,1,:,:] = (state == -player) # opponent stones
all_interest_states[:,2,:,:] = 1 if player == 1 else 0 # if player is black, set 1 else 0
for i,current_move in enumerate(dnn_moves):
ci, cj = current_move
all_interest_states[i,0,ci,cj] = 1 # put current move down
predict_y = self.model.predict(all_interest_states)
return predict_y.ravel()
else:
return []
def update_if_game_finish(self, state, best_move, best_q, player):
# put down this step and record learn data
# state[best_move] = player
# state_id = state.tobytes()
# self.learndata[state_id] = [state.copy(), best_q, 1]
# state[best_move] = 0
# store learn data for oppoenent, this helps improve the data
state_id = state.tobytes()
# if self.playing_white == False and best_q == -1.0:
# import IPython; IPython.embed()
if hasattr(self, 'opponent') and hasattr(self.opponent, 'learndata'):
self.opponent.learndata[state_id] = [state.copy(), -best_q, 1]
# # record the history states
# self.hist_states.append(opponent_state_id)
# # check if game finish
# state[best_move] = 1
# game_result = None
# new_u = 0
# if i_win(state, best_move, 1):
# new_u = -1.0
# game_result = 'win'
# elif i_lost(state, 1):
# new_u = 1.0
# game_result = 'lose'
# elif empty_spots_left <= 2:
# new_u = 0
# game_result = 'draw'
# if game_result and self.started_from_beginning is True:
# discount = 0.9
# for opponent_state_id in self.hist_states[::-1]:
# st, u, n_visited = self.opponent.learndata[opponent_state_id]
# n_visited += 1
# new_u = u + discount * (new_u - u) / n_visited**0.5 # this is the learning rate
# # surprise
# if (game_result == 'win' and new_u > 0.1) or (game_result == 'lose' and new_u < -0.1):
# self.surprised = True
# self.opponent.learndata[opponent_state_id] = (st, new_u, n_visited)
# print(f"Updated U from {u:9.6f} to {new_u:9.6f} [{n_visited}]")
# print(f"{self.name}: Updated win rate of {len(self.hist_states)} states")
# self.started_from_beginning = False # we only update once
# Below are utility functions
@numba.jit(nopython=True, nogil=True, cache=True)
def find_interesting_moves(state, empty_spots_left, move_interest_values, player, n_moves, verbose=False):
""" Look at state and find the interesing n_move moves.
input:
-------
state: numpy.array board_size x board_size, 1=black, -1=white, 0=empty
empty_spots_left: number of empty spots on the board
player: current player to find interesting moves, 1=black, -1=white
n_moves: int, desired number of interesing moves
output:
-------
interested_moves: numpy.array final_n_moves x 2
*note : final_n_moves = 1 if limited
* else final_n_moves = n_moves + number of length-4 moves
*note2: final_n_moves will not exceed empty_spots_left
#suggested_n_moves: suggested number of moves to
"""
force_to_block = False
exist_will_win_move = False
directions = ((1,1), (1,0), (0,1), (1,-1))
final_single_move = np.zeros(2, dtype=np.int64).reshape(1,2) # for returning the single move
for r in range(board_size):
for c in range(board_size):
if state[r,c] != 0: continue
interest_value = 10 # as long as it's a valid point, this is for avoiding the taken spaces
my_hard_4 = 0
for dr, dc in directions:
my_line_length = 1 # last_move
opponent_line_length = 1
# try to extend in the positive direction (max 5 times to check overline)
ext_r = r
ext_c = c
skipped_1 = 0
my_blocked = False
opponent_blocked = False
for i in range(5):
ext_r += dr
ext_c += dc
if ext_r < 0 or ext_r >= board_size or ext_c < 0 or ext_c >= board_size:
break
elif state[ext_r, ext_c] == player:
if my_blocked == True:
break
else:
my_line_length += 1
opponent_blocked = True
elif state[ext_r, ext_c] == -player:
if opponent_blocked == True:
break
else:
opponent_line_length += 1
my_blocked = True
elif skipped_1 == 0:
skipped_1 = i + 1 # allow one skip and record the position of the skip
else:
# peek at the next one and if it might be useful, add some interest
if ((state[ext_r+dr, ext_c+dc] == player) and (my_blocked == False)) or ((state[ext_r+dr, ext_c+dc] == -player) and (opponent_blocked == False)):
interest_value += 15
break
# the backward counting starts at the furthest "unskipped" stone
forward_my_open = False
forward_opponent_open = False
if skipped_1 == 0:
my_line_length_back = my_line_length
opponent_line_length_back = opponent_line_length
elif skipped_1 == 1:
my_line_length_back = 1
opponent_line_length_back = 1
forward_my_open = True
forward_opponent_open = True
else:
if my_blocked == False:
my_line_length_back = skipped_1
opponent_line_length_back = 1
forward_my_open = True
else:
my_line_length_back = 1
opponent_line_length_back = skipped_1
forward_opponent_open = True
my_line_length_no_skip = my_line_length_back
opponent_line_length_no_skip = opponent_line_length_back
# backward is a little complicated, will try to extend my stones first
ext_r = r
ext_c = c
skipped_2 = 0
opponent_blocked = False
for i in range(6-my_line_length_no_skip):
ext_r -= dr
ext_c -= dc
if ext_r < 0 or ext_r >= board_size or ext_c < 0 or ext_c >= board_size:
break
elif state[ext_r, ext_c] == player:
my_line_length_back += 1
opponent_blocked = True
elif state[ext_r, ext_c] == -player:
break
else:
if skipped_2 == 0:
skipped_2 = i + 1
else:
# peek at the next one and if it might be useful, add some interest
if state[ext_r-dr, ext_c-dc] == player:
interest_value += 15
break
# see if i'm winning
if my_line_length_back == 5:
# if there are 5 stones in backward counting, and it's not skipped in the middle
if skipped_2 == 0 or skipped_2 == (6-my_line_length_no_skip):
# i will win with this move, I will place the stone
final_single_move[0,0] = r
final_single_move[0,1] = c
return final_single_move
# extend my forward line length to check if there is hard 4
if skipped_2 == 0:
my_line_length += my_line_length_back - my_line_length_no_skip
else:
my_line_length += skipped_2 - 1
backward_my_open = True if skipped_2 > 0 else False
backward_opponent_open = False
# then try to extend the opponent
if opponent_blocked == True:
if skipped_2 == 1:
backward_opponent_open = True
skipped_2 = 0 # reset the skipped_2 here to enable the check of opponent 5 later
else:
ext_r = r
ext_c = c
skipped_2 = 0
for i in range(6-opponent_line_length_no_skip):
ext_r -= dr
ext_c -= dc
if ext_r < 0 or ext_r >= board_size or ext_c < 0 or ext_c >= board_size:
break
elif state[ext_r, ext_c] == player:
break
elif state[ext_r, ext_c] == -player:
opponent_line_length_back += 1
else:
if skipped_2 == 0:
skipped_2 = i + 1
else:
# peek at the next one and if it might be useful, add some interest
if state[ext_r-dr, ext_c-dc] == -player:
interest_value += 15
break
# extend opponent forward line length to check if there is hard 4
if skipped_2 == 0:
opponent_line_length += opponent_line_length_back - opponent_line_length_no_skip
else:
opponent_line_length += skipped_2 - 1
backward_opponent_open = True
# here if opponent_line_length_back == 5, skipped_2 will be 0 and this flag won't be True
# but it do not affect our final result, because we have to block this no matter if it's open
# check if we have to block this
if opponent_line_length_back == 5:
if (skipped_2 == 0) or (skipped_2 == 6-opponent_line_length_no_skip):
final_single_move[0,0] = r
final_single_move[0,1] = c
force_to_block = True
if force_to_block == False:
# if I will win after this move, I won't consider other moves
if forward_my_open == True and my_line_length == 4:
my_hard_4 += 1
if backward_my_open == True and my_line_length_back == 4:
my_hard_4 += 1
if my_hard_4 >= 2:
final_single_move[0,0] = r
final_single_move[0,1] = c
exist_will_win_move = True
if force_to_block == False and exist_will_win_move == False:
# compute the interest_value for other moves
# if any line length >= 5, it's an overline so skipped
if (forward_my_open == True) and (my_line_length < 5):
interest_value += my_line_length ** 4
if (backward_my_open == True) and (my_line_length_back < 5):
interest_value += my_line_length_back ** 4
if (forward_opponent_open == True) and (opponent_line_length < 5):
interest_value += opponent_line_length ** 4
if (backward_opponent_open == True) and (opponent_line_length_back < 5):
interest_value += opponent_line_length_back ** 4
# if (r,c) == (5,5):
# print("(dr,dc) =", dr,dc)
# print('forward_my_open', forward_my_open, "my_line_length", my_line_length)
# print('backward_my_open', backward_my_open,"my_line_length_back", my_line_length_back)
# print('forward_opponent_open',forward_opponent_open,'opponent_line_length',opponent_line_length)
# print('backward_opponent_open',backward_opponent_open,'opponent_line_length_back',opponent_line_length_back)
# print("interest_value=", interest_value)
# after looking at all directions, record the total interest_value of this move
move_interest_values[r, c] += interest_value
if interest_value > 256: # one (length_4) ** 4, highly interesting move
n_moves += 1
# all moves have been investigated now see if we have to block first
if force_to_block == True or exist_will_win_move == True:
if verbose == True:
print(final_single_move[0,0], final_single_move[0,1], "Only One")
return final_single_move
else:
flattened_interest = move_interest_values.ravel()
# The interest value > 250 means at least one length_4 or three length_3 which make it highly interesting
#n_high_interest_moves = np.sum(flattened_interest > 266) # did it in the loop
if n_moves > empty_spots_left:
n_moves = empty_spots_left
high_interest_idx = np.argsort(flattened_interest)[-n_moves:][::-1]
interested_moves = np.empty(n_moves*2, dtype=np.int64).reshape(n_moves, 2)
interested_moves[:,0] = high_interest_idx // board_size
interested_moves[:,1] = high_interest_idx % board_size
if verbose == True:
print("There are", n_moves, "interested_moves")
for i in range(n_moves):
print(interested_moves[i,0],interested_moves[i,1],' : ', flattened_interest[high_interest_idx[i]])
return interested_moves
@numba.jit(nopython=True, nogil=True)
def i_win(state, last_move, player):
""" Return true if I just got 5-in-a-row with last_move """
r, c = last_move
# try all 4 directions, the other 4 is included
directions = [(1,1), (1,0), (0,1), (1,-1)]
for dr, dc in directions:
line_length = 1 # last_move
# try to extend in the positive direction (max 4 times)
ext_r = r
ext_c = c
for _ in range(5):
ext_r += dr
ext_c += dc
if ext_r < 0 or ext_r >= board_size or ext_c < 0 or ext_c >= board_size:
break
elif state[ext_r, ext_c] == player:
line_length += 1
else:
break
# try to extend in the opposite direction
ext_r = r
ext_c = c
for _ in range(6-line_length):
ext_r -= dr
ext_c -= dc
if ext_r < 0 or ext_r >= board_size or ext_c < 0 or ext_c >= board_size:
break
elif state[ext_r, ext_c] == player:
line_length += 1
else:
break
if line_length == 5:
return True # 5 in a row
return False
@numba.jit(nopython=True, nogil=True)
def i_lost(state, player):
for r in range(board_size):
for c in range(board_size):
if state[r,c] == 0 and i_win(state, (r,c), -player):
return True
return False
@numba.jit(nopython=True, nogil=True)
def i_will_win(state, last_move, player):
""" Return true if I will win next step if the opponent don't have 4-in-a-row.
Winning Conditions:
1. 5 in a row.
2. 4 in a row with both end open. (free 4)
3. 4 in a row with one missing stone x 2 (hard 4 x 2)
"""
r, c = last_move
# try all 4 directions, the other 4 is equivalent
directions = [(1,1), (1,0), (0,1), (1,-1)]
n_hard_4 = 0 # number of hard 4s found
for dr, dc in directions:
line_length = 1 # last_move
# try to extend in the positive direction (max 5 times to check overline)
ext_r = r
ext_c = c
skipped_1 = 0
for i in range(5):
ext_r += dr
ext_c += dc
if ext_r < 0 or ext_r >= board_size or ext_c < 0 or ext_c >= board_size:
break
elif state[ext_r, ext_c] == player:
line_length += 1
elif skipped_1 == 0 and state[ext_r, ext_c] == 0:
skipped_1 = i+1 # allow one skip and record the position of the skip
else:
break
# try to extend in the opposite direction
ext_r = r
ext_c = c
skipped_2 = 0
# the backward counting starts at the furthest "unskipped" stone
if skipped_1 != 0:
line_length_back = skipped_1
else:
line_length_back = line_length
line_length_no_skip = line_length_back
for i in range(6-line_length_back):
ext_r -= dr
ext_c -= dc
if ext_r < 0 or ext_r >= board_size or ext_c < 0 or ext_c >= board_size:
break
elif state[ext_r, ext_c] == player:
line_length_back += 1
elif skipped_2 == 0 and state[ext_r, ext_c] == 0:
skipped_2 = i + 1
else:
break
if line_length_back == 6:
# we found 6 stones in a row, this is overline, skip this entire line
continue
elif line_length_back == 5:
if (skipped_2 == 0) or (skipped_2 == (6-line_length_no_skip)):
# we found 5 stones in a row, because the backward counting is not skipped in the middle
return True
# else there is an empty spot in the middle of 6 stones, it's not a hard 4 any more
elif line_length_back == 4:
# here we have only 4 stones, if skipped in back count, it's a hard 4
if skipped_2 != 0:
n_hard_4 += 1 # backward hard 4
if n_hard_4 == 2:
return True # two hard 4
# here we check if there's a hard 4 in the forward direction
# extend the forward line to the furthest "unskipped" stone
if skipped_2 == 0:
line_length += line_length_back - line_length_no_skip
else:
line_length += skipped_2 - 1
# hard 4 only if forward length is 4, if forward reaches 5 or more, it's going to be overline
if line_length == 4 and skipped_1 != 0:
n_hard_4 += 1 # forward hard 4
if n_hard_4 == 2:
return True # two hard 4 or free 4
return False
from collections import OrderedDict
class LRU(OrderedDict):
'Limit size, evicting the least recently looked-up key when full'
def __init__(self, maxsize=128):
self.maxsize = maxsize
super().__init__()
def __getitem__(self, key):
value = super().__getitem__(key)
self.move_to_end(key)
return value
def __setitem__(self, key, value):
if key in self:
self.move_to_end(key)
super().__setitem__(key, value)
if len(self) > self.maxsize:
oldest = next(iter(self))
del self[oldest]
class LeveledCache:
"""
Cache with level system, level 0 has highest priority
When maxsize reached, oldest key from lowest cache will be deleted
"""
def __init__(self, maxlevel, maxsize=128):
assert maxlevel >= 0
self.maxlevel = maxlevel
self.maxsize = maxsize
self.caches = [OrderedDict() for _ in range(maxlevel+1)]
self.size = 0
def get(self, key, max_accepted_level):
"""
Go over each level in cache, find one value with key
If none found, return None
"""
for level in range(min(self.maxlevel, max_accepted_level)+1):
# starting from level 0, look for cached value
cache = self.caches[level]
try:
result = cache[key]
cache.move_to_end(key)
return result
except KeyError:
pass
return None
def set(self, key, value, level):
"""
set a value in cache with level
"""
assert level <= self.maxlevel
# delete oldest from lowest cache if size reached
if self.size >= self.maxsize:
for l in range(self.maxlevel, -1, -1):
cache = self.caches[l]
try:
oldest = next(iter(cache))
del cache[oldest]
break
except StopIteration:
# if cache is empty skip and go to the next level cache
pass
else:
# update size
self.size += 1
# insert new key
cache = self.caches[level]
# move key to end
if key in cache:
cache.move_to_end(key)
# set the value
cache[key] = value
def read_board_state(f):
# default
black_stones = []
white_stones = []
board = [black_stones, white_stones]
last_move = None
playing = 0
# read and parse board
for line in open(f):
if '|' in line:
line_idx, contents = line.split('|', maxsplit=1)
row_i = int(line_idx)
stones = contents.split()
if len(stones) == board_size:
for col_j, s in enumerate(stones):
if s == 'x':
black_stones.append((row_i, col_j))
elif s == 'X':
black_stones.append((row_i, col_j))
last_move = (row_i, col_j)
playing = 0
elif s == 'o':
white_stones.append((row_i, col_j))
elif s == 'O':
white_stones.append((row_i, col_j))
last_move = (row_i, col_j)
playing = 1
elif s == '-':
pass
else:
print(f'found unknown stone: {s}')
board_state = [board, last_move, playing, board_size]
return board_state
def convert_board_state(board_state):
board, last_move, playing, board_size = board_state
playing_white = bool(playing)
# build new state representation
state = np.zeros(board_size**2, dtype=np.int8).reshape(board_size, board_size)
# put black stones, update index
for br, bc in board[0]:
state[br-1,bc-1] = 1
# put white stones, update index
for wr, wc in board[1]:
state[wr-1,wc-1] = -1
return state
def show_state(state):
board_size = 15
print(' '*4 + ' '.join([chr(97+i) for i in range(board_size)]))
print(' '*3 + '='*(2*board_size))
for x in range(board_size):
row = ['%2s|'%x]
for y in range(board_size):
if state[x,y] == 1:
c = 'x'
elif state[x,y] == -1:
c = 'o'
else:
c = '-'
row.append(c)
print(' '.join(row))
def benchmark():
state = np.zeros(board_size**2, dtype=np.int8).reshape(board_size, board_size)
state[8,8] = 1
move_interest_values = np.zeros(board_size**2, dtype=np.float32).reshape(board_size,board_size)
player = 1
n_moves = 20
# find_interesting_moves(state, 224, move_interest_values, player, n_moves, verbose=False)
# find_interesting_moves_gpu(state, 224, move_interest_values, player, n_moves, verbose=False)
t0 = time.time()
N = 1000
for _ in range(N):
find_interesting_moves(state, 224, move_interest_values, player, n_moves, verbose=False)
# find_interesting_moves_gpu(state, 224, move_interest_values, player, n_moves, verbose=False)
print(f"{N} repeats took {time.time() - t0:9.3f} s")
if __name__ == "__main__":
benchmark() | [
"saviorbruce@gmail.com"
] | saviorbruce@gmail.com |
197cc624f47f8f44191b93e8603750c75b54ad13 | 6f2ae51bfa26f58cf0eccad67a563dc91f87e0ac | /oop18 (quick tips).py | 62cd87d8ff9bd42976ba810c827c9dfcd799132c | [
"MIT"
] | permissive | nicholaskarlson/Object-Oriented-Programming-in-Python | c830e9dc86df72ee6fbfd017cf05198e4664e2b6 | 9a51892a99f9920c20c3abf8342a060af94305e6 | refs/heads/master | 2022-12-03T03:27:12.282726 | 2020-08-22T12:17:27 | 2020-08-22T12:17:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 942 | py | # OOP quick tips
# Tip 1
# printing the children classes of Parent class.
# Parent classes
class Father:
def __init__(self):
value=0
def update(self):
value+=1
def renew(self):
value=0
def show(self):
print(value)
class Mother:
def __init__(self):
value=1
def update(self):
value-=1
def renew(self):
value=0
def show(self):
print(value)
# Children classes
class Child_1(Father):
def update(self):
value+=2
class Child_2(Mother):
def update(self):
value-=2
# the main function.
def interiors(*classx):
subclasses=set()
work=[*classx]
while work:
parent=work.pop()
for child in parent.__subclasses__():
if child not in subclasses:
subclasses.add(child)
work.append(child)
return subclasses
print(interiors(Father,Mother))
| [
"ahammadshawki8@gmail.com"
] | ahammadshawki8@gmail.com |
b5aecc91b76a83f848608b2e1b3114abc8dd616c | b5921afe6ea5cd8b3dcfc83147ab5893134a93d0 | /tl/utils/lockmanager.py | 1fa59de415bd6e96b46201e2b870ea969f38ebeb | [
"LicenseRef-scancode-other-permissive"
] | permissive | techdragon/tl | aaeb46e18849c04ad436e0e786401621a4be82ee | 6aba8aeafbc92cabdfd7bec11964f7c3f9cb835d | refs/heads/master | 2021-01-17T16:13:18.636457 | 2012-11-02T10:08:10 | 2012-11-02T10:08:10 | 9,296,808 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,410 | py | # tl/utils/lockmanager.py
#
#
""" manages locks """
## basic imports
import _thread
import threading
import logging
## LockManager class
class LockManager(object):
""" place to hold locks """
def __init__(self):
self.locks = {}
def allocate(self, name):
""" allocate a new lock """
self.locks[name] = _thread.allocate_lock()
logging.debug('lockmanager - allocated %s' % name)
def get(self, name):
""" get lock """
if name not in self.locks: self.allocate(name)
return self.locks[name]
def delete(self, name):
""" delete lock """
if name in self.locks: del self.locks[name]
def acquire(self, name):
""" acquire lock """
if name not in self.locks: self.allocate(name)
logging.debug('lockmanager - *acquire* %s' % name)
self.locks[name].acquire()
def release(self, name):
""" release lock """
logging.debug('lockmanager - *releasing* %s' % name)
try: self.locks[name].release()
except RuntimeError: pass
## RLockManager class
class RLockManager(LockManager):
def allocate(self, name):
""" allocate a new lock """
self.locks[name] = threading.RLock()
logging.debug('lockmanager - allocated RLock %s' % name)
## global lockmanagers
lockmanager = LockManager()
rlockmanager = RLockManager()
| [
"feedbackflow@gmail.com"
] | feedbackflow@gmail.com |
ba2789e49303e6988343f44d5c915447e9d9121e | 8e70df9b2363fd525691096d950fb72d9b33bdd4 | /test/test_user_signin_link_creation.py | 0e10dea654d63f48024f450e3e85c44942c86468 | [
"Apache-2.0"
] | permissive | FlatIO/api-client-python | d275b5905fdcf6c3b160f990dc08aa55d55be28f | 225c2cceaff762c88efd5cf0e64737df82ec6b27 | refs/heads/master | 2023-07-28T08:34:53.364428 | 2023-06-05T17:17:53 | 2023-06-05T17:17:53 | 87,713,658 | 8 | 2 | Apache-2.0 | 2023-08-23T20:23:09 | 2017-04-09T14:17:48 | Python | UTF-8 | Python | false | false | 2,421 | py | """
Flat API
The Flat API allows you to easily extend the abilities of the [Flat Platform](https://flat.io), with a wide range of use cases including the following: * Creating and importing new music scores using MusicXML, MIDI, Guitar Pro (GP3, GP4, GP5, GPX, GP), PowerTab, TuxGuitar and MuseScore files * Browsing, updating, copying, exporting the user's scores (for example in MP3, WAV or MIDI) * Managing educational resources with Flat for Education: creating & updating the organization accounts, the classes, rosters and assignments. The Flat API is built on HTTP. Our API is RESTful It has predictable resource URLs. It returns HTTP response codes to indicate errors. It also accepts and returns JSON in the HTTP body. The [schema](/swagger.yaml) of this API follows the [OpenAPI Initiative (OAI) specification](https://www.openapis.org/), you can use and work with [compatible Swagger tools](http://swagger.io/open-source-integrations/). This API features Cross-Origin Resource Sharing (CORS) implemented in compliance with [W3C spec](https://www.w3.org/TR/cors/). You can use your favorite HTTP/REST library for your programming language to use Flat's API. This specification and reference is [available on Github](https://github.com/FlatIO/api-reference). Getting Started and learn more: * [API Overview and introduction](https://flat.io/developers/docs/api/) * [Authentication (Personal Access Tokens or OAuth2)](https://flat.io/developers/docs/api/authentication.html) * [SDKs](https://flat.io/developers/docs/api/sdks.html) * [Rate Limits](https://flat.io/developers/docs/api/rate-limits.html) * [Changelog](https://flat.io/developers/docs/api/changelog.html) # noqa: E501
The version of the OpenAPI document: 2.18.0
Contact: developers@flat.io
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import flat_api
from flat_api.model.user_signin_link_creation import UserSigninLinkCreation
class TestUserSigninLinkCreation(unittest.TestCase):
"""UserSigninLinkCreation unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testUserSigninLinkCreation(self):
"""Test UserSigninLinkCreation"""
# FIXME: construct object with mandatory attributes with example values
# model = UserSigninLinkCreation() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"vincent@giersch.fr"
] | vincent@giersch.fr |
fc19c5d307befafb06976f57b6ef089143644929 | 749bd7db8d1902274a47bb7d98b9d6ced3ef6b68 | /S-NET/nmt_snet_ans_syn/nmt/train.py | 1e6f3611588973d6649eda58278c7d78dcbeced2 | [
"Apache-2.0"
] | permissive | burglarhobbit/machine-reading-comprehension | 37582e0fdca4690bd55accf33987b5fce1f663ea | 04729af3d934a7696938f4079089b9b014c986aa | refs/heads/master | 2023-02-08T10:39:19.262900 | 2020-01-11T04:08:30 | 2020-01-11T04:08:30 | 114,113,176 | 29 | 15 | null | 2023-02-02T02:32:54 | 2017-12-13T11:34:36 | Python | UTF-8 | Python | false | false | 25,370 | py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""For training NMT models."""
from __future__ import print_function
import math
import os
import random
import time
import tensorflow as tf
from . import attention_model
from . import gnmt_model
from . import inference
from . import model as nmt_model
from . import model_helper
from .utils import misc_utils as utils
from .utils import nmt_utils
utils.check_tensorflow_version()
__all__ = [
"run_sample_decode", "run_internal_eval", "run_external_eval",
"run_avg_external_eval", "run_full_eval", "init_stats", "update_stats",
"print_step_info", "process_stats", "train"
]
def run_sample_decode(infer_model, infer_sess, model_dir, hparams,
summary_writer, src_data, tgt_data, ques_data,
feature_s_data, feature_e_data):
"""Sample decode a random sentence from src_data."""
with infer_model.graph.as_default():
loaded_infer_model, global_step = model_helper.create_or_load_model(
infer_model.model, model_dir, infer_sess, "infer")
_sample_decode(loaded_infer_model, global_step, infer_sess, hparams,
infer_model.iterator, src_data, tgt_data, ques_data,
feature_s_data, feature_e_data,
infer_model.src_placeholder, infer_model.ques_placeholder,
infer_model.feature_s_placeholder, infer_model.feature_e_placeholder,
infer_model.batch_size_placeholder, summary_writer)
def run_internal_eval(
eval_model, eval_sess, model_dir, hparams, summary_writer,
use_test_set=True):
"""Compute internal evaluation (perplexity) for both dev / test."""
with eval_model.graph.as_default():
loaded_eval_model, global_step = model_helper.create_or_load_model(
eval_model.model, model_dir, eval_sess, "eval")
dev_src_file = "%s.%s" % (hparams.dev_prefix, hparams.src)
dev_tgt_file = "%s.%s" % (hparams.dev_prefix, hparams.tgt)
dev_ques_file = "%s.%s" % ("ques_dev", hparams.src)
dev_feature_file_s = "%s.%s" % ("feature_s_dev", hparams.src)
dev_feature_file_e = "%s.%s" % ("feature_e_dev", hparams.src)
dev_eval_iterator_feed_dict = {
eval_model.src_file_placeholder: dev_src_file,
eval_model.tgt_file_placeholder: dev_tgt_file,
eval_model.ques_file_placeholder: dev_ques_file,
eval_model.feature_file_s_placeholder: dev_feature_file_s,
eval_model.feature_file_e_placeholder: dev_feature_file_e,
}
dev_ppl = _internal_eval(loaded_eval_model, global_step, eval_sess,
eval_model.iterator, dev_eval_iterator_feed_dict,
summary_writer, "dev")
test_ppl = None
if use_test_set and hparams.test_prefix:
test_src_file = "%s.%s" % (hparams.test_prefix, hparams.src)
test_tgt_file = "%s.%s" % (hparams.test_prefix, hparams.tgt)
test_ques_file = "%s.%s" % ("ques_test", hparams.src)
test_feature_file_s = "%s.%s" % ("feature_s_test", hparams.src)
test_feature_file_e = "%s.%s" % ("feature_e_test", hparams.src)
test_eval_iterator_feed_dict = {
eval_model.src_file_placeholder: test_src_file,
eval_model.tgt_file_placeholder: test_tgt_file,
eval_model.ques_file_placeholder: test_ques_file,
eval_model.feature_file_s_placeholder: test_feature_file_s,
eval_model.feature_file_e_placeholder: test_feature_file_e,
}
test_ppl = _internal_eval(loaded_eval_model, global_step, eval_sess,
eval_model.iterator, test_eval_iterator_feed_dict,
summary_writer, "test")
return dev_ppl, test_ppl
def run_external_eval(infer_model, infer_sess, model_dir, hparams,
summary_writer, save_best_dev=True, use_test_set=True,
avg_ckpts=False):
"""Compute external evaluation (bleu, rouge, etc.) for both dev / test."""
with infer_model.graph.as_default():
loaded_infer_model, global_step = model_helper.create_or_load_model(
infer_model.model, model_dir, infer_sess, "infer")
dev_src_file = "%s.%s" % (hparams.dev_prefix, hparams.src)
dev_tgt_file = "%s.%s" % (hparams.dev_prefix, hparams.tgt)
dev_ques_file = "%s.%s" % ("ques_dev", hparams.src)
dev_feature_s_file = "%s.%s" % ("feature_s_dev", hparams.src)
dev_feature_e_file = "%s.%s" % ("feature_e_dev", hparams.src)
dev_infer_iterator_feed_dict = {
infer_model.src_placeholder: inference.load_data(dev_src_file),
infer_model.ques_placeholder: inference.load_data(dev_ques_file),
infer_model.feature_s_placeholder: inference.load_data(dev_feature_s_file),
infer_model.feature_e_placeholder: inference.load_data(dev_feature_e_file),
infer_model.batch_size_placeholder: hparams.infer_batch_size,
}
dev_scores = _external_eval(
loaded_infer_model,
global_step,
infer_sess,
hparams,
infer_model.iterator,
dev_infer_iterator_feed_dict,
dev_tgt_file,
"dev",
summary_writer,
save_on_best=save_best_dev,
avg_ckpts=avg_ckpts)
test_scores = None
if use_test_set and hparams.test_prefix:
test_src_file = "%s.%s" % (hparams.test_prefix, hparams.src)
test_tgt_file = "%s.%s" % (hparams.test_prefix, hparams.tgt)
test_ques_file = "%s.%s" % ("ques_test", hparams.src)
test_feature_s_file = "%s.%s" % ("feature_s_test", hparams.src)
test_feature_e_file = "%s.%s" % ("feature_e_test", hparams.src)
test_infer_iterator_feed_dict = {
infer_model.src_placeholder: inference.load_data(test_src_file),
infer_model.ques_placeholder: inference.load_data(test_ques_file),
infer_model.feature_s_placeholder: inference.load_data(test_feature_s_file),
infer_model.feature_e_placeholder: inference.load_data(test_feature_e_file),
infer_model.batch_size_placeholder: hparams.infer_batch_size,
}
test_scores = _external_eval(
loaded_infer_model,
global_step,
infer_sess,
hparams,
infer_model.iterator,
test_infer_iterator_feed_dict,
test_tgt_file,
"test",
summary_writer,
save_on_best=False,
avg_ckpts=avg_ckpts)
return dev_scores, test_scores, global_step
def run_avg_external_eval(infer_model, infer_sess, model_dir, hparams,
summary_writer, global_step):
"""Creates an averaged checkpoint and run external eval with it."""
avg_dev_scores, avg_test_scores = None, None
if hparams.avg_ckpts:
# Convert VariableName:0 to VariableName.
global_step_name = infer_model.model.global_step.name.split(":")[0]
avg_model_dir = model_helper.avg_checkpoints(
model_dir, hparams.num_keep_ckpts, global_step, global_step_name)
if avg_model_dir:
avg_dev_scores, avg_test_scores, _ = run_external_eval(
infer_model,
infer_sess,
avg_model_dir,
hparams,
summary_writer,
avg_ckpts=True)
return avg_dev_scores, avg_test_scores
def run_full_eval(model_dir, infer_model, infer_sess, eval_model, eval_sess,
hparams, summary_writer, sample_src_data, sample_tgt_data,
sample_ques_data, sample_feature_s_data, sample_feature_e_data,
avg_ckpts=False):
"""Wrapper for running sample_decode, internal_eval and external_eval."""
run_sample_decode(infer_model, infer_sess, model_dir, hparams, summary_writer,
sample_src_data, sample_tgt_data, sample_ques_data,
sample_feature_s_data, sample_feature_e_data)
dev_ppl, test_ppl = run_internal_eval(
eval_model, eval_sess, model_dir, hparams, summary_writer)
dev_scores, test_scores, global_step = run_external_eval(
infer_model, infer_sess, model_dir, hparams, summary_writer)
metrics = {
"dev_ppl": dev_ppl,
"test_ppl": test_ppl,
"dev_scores": dev_scores,
"test_scores": test_scores,
}
avg_dev_scores, avg_test_scores = None, None
if avg_ckpts:
avg_dev_scores, avg_test_scores = run_avg_external_eval(
infer_model, infer_sess, model_dir, hparams, summary_writer,
global_step)
metrics["avg_dev_scores"] = avg_dev_scores
metrics["avg_test_scores"] = avg_test_scores
result_summary = _format_results("dev", dev_ppl, dev_scores, hparams.metrics)
if avg_dev_scores:
result_summary += ", " + _format_results("avg_dev", None, avg_dev_scores,
hparams.metrics)
if hparams.test_prefix:
result_summary += ", " + _format_results("test", test_ppl, test_scores,
hparams.metrics)
if avg_test_scores:
result_summary += ", " + _format_results("avg_test", None,
avg_test_scores, hparams.metrics)
return result_summary, global_step, metrics
def init_stats():
"""Initialize statistics that we want to accumulate."""
return {"step_time": 0.0, "loss": 0.0, "predict_count": 0.0,
"total_count": 0.0, "grad_norm": 0.0}
def update_stats(stats, start_time, step_result):
"""Update stats: write summary and accumulate statistics."""
(_, step_loss, step_predict_count, step_summary, global_step,
step_word_count, batch_size, grad_norm, learning_rate) = step_result
# Update statistics
stats["step_time"] += (time.time() - start_time)
stats["loss"] += (step_loss * batch_size)
stats["predict_count"] += step_predict_count
stats["total_count"] += float(step_word_count)
stats["grad_norm"] += grad_norm
return global_step, learning_rate, step_summary
def print_step_info(prefix, global_step, info, result_summary, log_f):
"""Print all info at the current global step."""
utils.print_out(
"%sstep %d lr %g step-time %.2fs wps %.2fK ppl %.2f gN %.2f %s, %s" %
(prefix, global_step, info["learning_rate"], info["avg_step_time"],
info["speed"], info["train_ppl"], info["avg_grad_norm"], result_summary,
time.ctime()),
log_f)
def process_stats(stats, info, global_step, steps_per_stats, log_f):
"""Update info and check for overflow."""
# Update info
info["avg_step_time"] = stats["step_time"] / steps_per_stats
info["avg_grad_norm"] = stats["grad_norm"] / steps_per_stats
info["train_ppl"] = utils.safe_exp(stats["loss"] / stats["predict_count"])
info["speed"] = stats["total_count"] / (1000 * stats["step_time"])
# Check for overflow
is_overflow = False
train_ppl = info["train_ppl"]
if math.isnan(train_ppl) or math.isinf(train_ppl) or train_ppl > 1e20:
utils.print_out(" step %d overflow, stop early" % global_step,
log_f)
is_overflow = True
return is_overflow
def before_train(loaded_train_model, train_model, train_sess, global_step,
hparams, log_f):
"""Misc tasks to do before training."""
stats = init_stats()
info = {"train_ppl": 0.0, "speed": 0.0, "avg_step_time": 0.0,
"avg_grad_norm": 0.0,
"learning_rate": loaded_train_model.learning_rate.eval(
session=train_sess)}
start_train_time = time.time()
utils.print_out("# Start step %d, lr %g, %s" %
(global_step, info["learning_rate"], time.ctime()), log_f)
# Initialize all of the iterators
skip_count = hparams.batch_size * hparams.epoch_step
utils.print_out("# Init train iterator, skipping %d elements" % skip_count)
train_sess.run(
train_model.iterator.initializer,
feed_dict={train_model.skip_count_placeholder: skip_count})
return stats, info, start_train_time
def train(hparams, scope=None, target_session=""):
"""Train a translation model."""
log_device_placement = hparams.log_device_placement
out_dir = hparams.out_dir
num_train_steps = hparams.num_train_steps
steps_per_stats = hparams.steps_per_stats
steps_per_external_eval = hparams.steps_per_external_eval
steps_per_eval = 10 * steps_per_stats
avg_ckpts = hparams.avg_ckpts
if not steps_per_external_eval:
steps_per_external_eval = 5 * steps_per_eval
if not hparams.attention:
model_creator = nmt_model.Model
else: # Attention
if (hparams.encoder_type == "gnmt" or
hparams.attention_architecture in ["gnmt", "gnmt_v2"]):
model_creator = gnmt_model.GNMTModel
elif hparams.attention_architecture == "standard":
model_creator = attention_model.AttentionModel
else:
raise ValueError("Unknown attention architecture %s" %
hparams.attention_architecture)
train_model = model_helper.create_train_model(model_creator, hparams, scope)
eval_model = model_helper.create_eval_model(model_creator, hparams, scope)
infer_model = model_helper.create_infer_model(model_creator, hparams, scope)
# Preload data for sample decoding.
dev_src_file = "%s.%s" % (hparams.dev_prefix, hparams.src)
dev_tgt_file = "%s.%s" % (hparams.dev_prefix, hparams.tgt)
dev_ques_file = "%s.%s" % ("ques_dev", hparams.src)
dev_feature_s_file = "%s.%s" % ("feature_s_dev", hparams.src)
dev_feature_e_file = "%s.%s" % ("feature_e_dev", hparams.src)
sample_src_data = inference.load_data(dev_src_file)
sample_tgt_data = inference.load_data(dev_tgt_file)
sample_ques_data = inference.load_data(dev_ques_file)
sample_feature_s_data = inference.load_data(dev_feature_s_file)
sample_feature_e_data = inference.load_data(dev_feature_e_file)
summary_name = "train_log"
model_dir = hparams.out_dir
# Log and output files
log_file = os.path.join(out_dir, "log_%d" % time.time())
log_f = tf.gfile.GFile(log_file, mode="a")
utils.print_out("# log_file=%s" % log_file, log_f)
utils.print_out("# VISIBLE GPUS=%s" % hparams.gpu_ids)
utils.print_out("# VISIBLE GPUS=%s" % hparams.gpu_ids1)
# TensorFlow model
config_proto = utils.get_config_proto(
log_device_placement=log_device_placement,
num_intra_threads=hparams.num_intra_threads,
num_inter_threads=hparams.num_inter_threads,
gpu_ids=hparams.gpu_ids)
train_sess = tf.Session(
target=target_session, config=config_proto, graph=train_model.graph)
eval_sess = tf.Session(
target=target_session, config=config_proto, graph=eval_model.graph)
infer_sess = tf.Session(
target=target_session, config=config_proto, graph=infer_model.graph)
with train_model.graph.as_default():
loaded_train_model, global_step = model_helper.create_or_load_model(
train_model.model, model_dir, train_sess, "train")
# Summary writer
summary_writer = tf.summary.FileWriter(
os.path.join(out_dir, summary_name), train_model.graph)
# First evaluation
run_full_eval(
model_dir, infer_model, infer_sess,
eval_model, eval_sess, hparams,
summary_writer, sample_src_data,
sample_tgt_data, sample_feature_s_data, sample_feature_e_data, avg_ckpts)
last_stats_step = global_step
last_eval_step = global_step
last_external_eval_step = global_step
# This is the training loop.
stats, info, start_train_time = before_train(
loaded_train_model, train_model, train_sess, global_step, hparams, log_f)
while global_step < num_train_steps:
### Run a step ###
start_time = time.time()
try:
step_result = loaded_train_model.train(train_sess)
hparams.epoch_step += 1
except tf.errors.OutOfRangeError:
# Finished going through the training dataset. Go to next epoch.
hparams.epoch_step = 0
utils.print_out(
"# Finished an epoch, step %d. Perform external evaluation" %
global_step)
run_sample_decode(infer_model, infer_sess, model_dir, hparams,
summary_writer, sample_src_data, sample_tgt_data, sample_ques_data,
sample_feature_s_data, sample_feature_e_data)
run_external_eval(infer_model, infer_sess, model_dir, hparams,
summary_writer)
if avg_ckpts:
run_avg_external_eval(infer_model, infer_sess, model_dir, hparams,
summary_writer, global_step)
train_sess.run(
train_model.iterator.initializer,
feed_dict={train_model.skip_count_placeholder: 0})
continue
# Process step_result, accumulate stats, and write summary
global_step, info["learning_rate"], step_summary = update_stats(
stats, start_time, step_result)
summary_writer.add_summary(step_summary, global_step)
# Once in a while, we print statistics.
if global_step - last_stats_step >= steps_per_stats:
last_stats_step = global_step
is_overflow = process_stats(
stats, info, global_step, steps_per_stats, log_f)
print_step_info(" ", global_step, info, _get_best_results(hparams),
log_f)
if is_overflow:
break
# Reset statistics
stats = init_stats()
if global_step - last_eval_step >= steps_per_eval:
last_eval_step = global_step
utils.print_out("# Save eval, global step %d" % global_step)
utils.add_summary(summary_writer, global_step, "train_ppl",
info["train_ppl"])
# Save checkpoint
loaded_train_model.saver.save(
train_sess,
os.path.join(out_dir, "translate.ckpt"),
global_step=global_step)
# Evaluate on dev/test
run_sample_decode(infer_model, infer_sess,
model_dir, hparams, summary_writer, sample_src_data,
sample_tgt_data, sample_ques_data,
sample_feature_s_data, sample_feature_e_data)
run_internal_eval(
eval_model, eval_sess, model_dir, hparams, summary_writer)
if global_step - last_external_eval_step >= steps_per_external_eval:
last_external_eval_step = global_step
# Save checkpoint
loaded_train_model.saver.save(
train_sess,
os.path.join(out_dir, "translate.ckpt"),
global_step=global_step)
run_sample_decode(infer_model, infer_sess,
model_dir, hparams, summary_writer, sample_src_data,
sample_tgt_data, sample_ques_data,
sample_feature_s_data, sample_feature_e_data)
run_external_eval(
infer_model, infer_sess, model_dir,
hparams, summary_writer)
if avg_ckpts:
run_avg_external_eval(infer_model, infer_sess, model_dir, hparams,
summary_writer, global_step)
# Done training
loaded_train_model.saver.save(
train_sess,
os.path.join(out_dir, "translate.ckpt"),
global_step=global_step)
(result_summary, _, final_eval_metrics) = (
run_full_eval(
model_dir, infer_model, infer_sess, eval_model, eval_sess, hparams,
summary_writer, sample_src_data, sample_tgt_data, sample_ques_data,
sample_feature_s_data, sample_feature_e_data, avg_ckpts))
print_step_info("# Final, ", global_step, info, result_summary, log_f)
utils.print_time("# Done training!", start_train_time)
summary_writer.close()
utils.print_out("# Start evaluating saved best models.")
for metric in hparams.metrics:
best_model_dir = getattr(hparams, "best_" + metric + "_dir")
summary_writer = tf.summary.FileWriter(
os.path.join(best_model_dir, summary_name), infer_model.graph)
result_summary, best_global_step, _ = run_full_eval(
best_model_dir, infer_model, infer_sess, eval_model, eval_sess, hparams,
summary_writer, sample_src_data, sample_tgt_data, sample_ques_data,
sample_feature_s_data, sample_feature_e_data)
print_step_info("# Best %s, " % metric, best_global_step, info,
result_summary, log_f)
summary_writer.close()
if avg_ckpts:
best_model_dir = getattr(hparams, "avg_best_" + metric + "_dir")
summary_writer = tf.summary.FileWriter(
os.path.join(best_model_dir, summary_name), infer_model.graph)
result_summary, best_global_step, _ = run_full_eval(
best_model_dir, infer_model, infer_sess, eval_model, eval_sess,
hparams, summary_writer, sample_src_data, sample_tgt_data, sample_ques_data,
sample_feature_s_data, sample_feature_e_data)
print_step_info("# Averaged Best %s, " % metric, best_global_step, info,
result_summary, log_f)
summary_writer.close()
return final_eval_metrics, global_step
def _format_results(name, ppl, scores, metrics):
"""Format results."""
result_str = ""
if ppl:
result_str = "%s ppl %.2f" % (name, ppl)
if scores:
for metric in metrics:
if result_str:
result_str += ", %s %s %.1f" % (name, metric, scores[metric])
else:
result_str = "%s %s %.1f" % (name, metric, scores[metric])
return result_str
def _get_best_results(hparams):
"""Summary of the current best results."""
tokens = []
for metric in hparams.metrics:
tokens.append("%s %.2f" % (metric, getattr(hparams, "best_" + metric)))
return ", ".join(tokens)
def _internal_eval(model, global_step, sess, iterator, iterator_feed_dict,
summary_writer, label):
"""Computing perplexity."""
sess.run(iterator.initializer, feed_dict=iterator_feed_dict)
ppl = model_helper.compute_perplexity(model, sess, label)
utils.add_summary(summary_writer, global_step, "%s_ppl" % label, ppl)
return ppl
def _sample_decode(model, global_step, sess, hparams, iterator, src_data,
tgt_data, ques_data, feature_s_data, feature_e_data,
iterator_src_placeholder, iterator_ques_placeholder,
iterator_feature_s_placeholder, iterator_feature_e_placeholder,
iterator_batch_size_placeholder, summary_writer):
"""Pick a sentence and decode."""
decode_id = random.randint(0, len(src_data) - 1)
utils.print_out(" # %d" % decode_id)
utils.print_out(" feature_s_data: %s" % feature_s_data[decode_id])
utils.print_out(" feature_e_data: %s" % feature_s_data[decode_id])
#feature_e_data = tf.Print(feature_e_data,[tf.shape(feature_e_data)],message="feature_e_data:")
iterator_feed_dict = {
iterator_src_placeholder: [src_data[decode_id]],
iterator_ques_placeholder: [ques_data[decode_id]],
iterator_feature_s_placeholder: [feature_s_data[decode_id]],
iterator_feature_e_placeholder: [feature_s_data[decode_id]],
iterator_batch_size_placeholder: 1,
}
sess.run(iterator.initializer, feed_dict=iterator_feed_dict)
nmt_outputs, attention_summary = model.decode(sess)
if hparams.beam_width > 0:
# get the top translation.
nmt_outputs = nmt_outputs[0]
translation = nmt_utils.get_translation(
nmt_outputs,
sent_id=0,
tgt_eos=hparams.eos,
subword_option=hparams.subword_option)
utils.print_out(" src: %s" % src_data[decode_id])
utils.print_out(" ques: %s" % ques_data[decode_id])
utils.print_out(" ref: %s" % tgt_data[decode_id])
utils.print_out(b" nmt: " + translation)
# Summary
if attention_summary is not None:
summary_writer.add_summary(attention_summary, global_step)
def _external_eval(model, global_step, sess, hparams, iterator,
iterator_feed_dict, tgt_file, label, summary_writer,
save_on_best, avg_ckpts=False):
"""External evaluation such as BLEU and ROUGE scores."""
out_dir = hparams.out_dir
decode = global_step > 0
if avg_ckpts:
label = "avg_" + label
if decode:
utils.print_out("# External evaluation, global step %d" % global_step)
sess.run(iterator.initializer, feed_dict=iterator_feed_dict)
output = os.path.join(out_dir, "output_%s" % label)
scores = nmt_utils.decode_and_evaluate(
label,
model,
sess,
output,
ref_file=tgt_file,
metrics=hparams.metrics,
subword_option=hparams.subword_option,
beam_width=hparams.beam_width,
tgt_eos=hparams.eos,
decode=decode)
# Save on best metrics
if decode:
for metric in hparams.metrics:
if avg_ckpts:
best_metric_label = "avg_best_" + metric
else:
best_metric_label = "best_" + metric
utils.add_summary(summary_writer, global_step, "%s_%s" % (label, metric),
scores[metric])
# metric: larger is better
if save_on_best and scores[metric] > getattr(hparams, best_metric_label):
setattr(hparams, best_metric_label, scores[metric])
model.saver.save(
sess,
os.path.join(
getattr(hparams, best_metric_label + "_dir"), "translate.ckpt"),
global_step=model.global_step)
utils.save_hparams(out_dir, hparams)
return scores
| [
"bhavyapatwa007@gmail.com"
] | bhavyapatwa007@gmail.com |
4fd7d2bf8eaab733ceafeabe6a8c3ece9b28126e | e641bd95bff4a447e25235c265a58df8e7e57c84 | /third_party/blink/renderer/modules/webcodecs/DEPS | f79eda8dda151643478fa884e467ea8d8ff57eff | [
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft"
] | permissive | zaourzag/chromium | e50cb6553b4f30e42f452e666885d511f53604da | 2370de33e232b282bd45faa084e5a8660cb396ed | refs/heads/master | 2023-01-02T08:48:14.707555 | 2020-11-13T13:47:30 | 2020-11-13T13:47:30 | 312,600,463 | 0 | 0 | BSD-3-Clause | 2022-12-23T17:01:30 | 2020-11-13T14:39:10 | null | UTF-8 | Python | false | false | 942 | include_rules = [
"+base/threading/thread_task_runner_handle.h",
"+components/viz/common/gpu/raster_context_provider.h",
"+components/viz/common/resources/single_release_callback.h",
"+gpu/command_buffer/client/shared_image_interface.h",
"+media/base",
"+media/filters",
"+media/formats/mp4/box_definitions.h",
"+media/media_buildflags.h",
"+media/mojo",
"+media/renderers",
"+media/video",
"+third_party/libyuv",
"+ui/gfx/color_space.h",
"+ui/gfx/geometry/rect.h",
"+ui/gfx/geometry/size.h",
"+ui/gfx/gpu_memory_buffer.h",
]
specific_include_rules = {
"video_track_reader_writer_test\.cc": [
"+base/run_loop.h",
],
"video_decoder_broker_test\.cc": [
"+base/run_loop.h",
"+base/threading/thread.h",
"+gpu/command_buffer/common/mailbox_holder.h",
],
"audio_decoder_broker_test\.cc": [
"+base/run_loop.h",
"+base/files/file_util.h",
],
}
| [
"commit-bot@chromium.org"
] | commit-bot@chromium.org | |
c0a00ebc3b8a77426bb463375c0e5a7233203829 | 4ac23b2633321df48a896180e6205dfc17ad5746 | /scratch09/ex04.py | b5ece5306f502f6fa8319f3759496fd815d500bf | [] | no_license | lee-saint/lab-python | f051a544ed97956f9725bb6f4a080bdc65c7e1ad | a425c173c379dda0de21eec538195ded17d31697 | refs/heads/master | 2020-11-30T12:24:12.614119 | 2019-12-27T07:35:50 | 2019-12-27T07:35:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,246 | py | """
pandas 패키지를 사용한 csv 파일 읽기
"""
import os
import pandas as pd
import matplotlib.pyplot as plt
file_path = os.path.join('..', 'scratch08', 'mpg.csv')
df = pd.read_csv(file_path)
print(df.head()) # 데이터프레임의 앞의 일부분 데이터 출력
print('shape:', df.shape) # 관측값 234, 변수 11
print('dtypes:', df.dtypes)
# DataFrame.dtypes: 각 컬럼(변수)의 데이터 타입
# pandas의 데이터 타입: object(문자열), float(실수), int(정수)
print(df.describe()) # 기술 요약 통계량
displ = df['displ']
print(displ)
cty = df['cty']
plt.scatter(displ, cty)
plt.show()
# DataFrame에서 행(row)을 선택할 때: df.iloc[행 번호(인덱스)], df.loc[행 레이블]
print(df.iloc[0])
print(df.iloc[0:3]) # row index 0 이상 3 미만인 행 선택
# 데이터프레임에서 여러 개의 컬럼(변수)들을 선택
cols = ['displ', 'cty', 'hwy'] # []: 리스트
print(df[cols]) # []: 인덱스 연산자
# 데이터프레임에서 여러 개의 행(관측값)과 컬럼(변수)들을 선택
# df.loc[row_labels, col_labels]: 행과 열의 레이블(이름)
# df.iloc[row_indices, col_indices]: 행과 열의 인덱스(숫자)
print(df.loc[0:3, cols])
print(df.iloc[0:3, 0:3])
| [
"plutorian131@gmail.com"
] | plutorian131@gmail.com |
9e8e3a770c962073cf83f0c6dfa97a5803dfcdfe | 90f2cbe1c940a20dcc893837b6033a51d3233931 | /Learn_Flas/flask_study/lab-4-movie/app/home/forms.py | e82f1fe123a80255955704bfb4bcbf1116930796 | [] | no_license | MaxNcu/Learn_Python | 71501f38f6442f3ff2a1de1ff685b8975e50af20 | 5a1c6edf353ed7447b2ffd4126ad7668d8c5a407 | refs/heads/master | 2022-01-15T18:56:04.814476 | 2019-07-20T03:02:02 | 2019-07-20T03:02:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 204 | py | # -*- coding: utf-8 -*-
# @Time : 2018/8/15 0015 13:48
# @Author : Langzi
# @Blog : www.langzi.fun
# @File : forms.py
# @Software: PyCharm
import sys
reload(sys)
sys.setdefaultencoding('utf-8') | [
"982722261@qq.com"
] | 982722261@qq.com |
5c55b42e4c4e1fb43abd86684c0665b0d4446b63 | d3d53fd1fb10e3895495066c3cc7b5529dfb2e27 | /main/migrations/0007_partenaire.py | 4e88e863572660060118dbc8e3e939a601dac5f3 | [] | no_license | miyou995/msenergy | c517b05cb3b28e9bbe3a5e668990ea96951f3fb7 | 8f635d1e19f8d91bffe7490cc88e25aa9b65e410 | refs/heads/master | 2023-01-13T06:50:10.379344 | 2020-10-20T10:41:05 | 2020-10-20T10:41:05 | 299,890,605 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 643 | py | # Generated by Django 3.0.7 on 2020-07-27 09:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0006_contactform_date_added'),
]
operations = [
migrations.CreateModel(
name='Partenaire',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('logo', models.ImageField(upload_to='part/')),
('url_marque', models.URLField(blank=True)),
],
),
]
| [
"inter.taki@gmail.com"
] | inter.taki@gmail.com |
7ea82c6443323fd95423f77eaa3d686341b30664 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /KQe5w8AdSLbweW8ck_5.py | e974521972362f31814cf4c82cbb398519e4a242 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,010 | py | """
Create a function that returns the characters from a list or string `r` on odd
or even positions, depending on the specifier `s`. The specifier will be
**"odd"** for items on _odd positions_ (1, 3, 5, ...) and **"even"** for items
on _even positions_ (2, 4, 6, ...).
### Examples
char_at_pos([2, 4, 6, 8, 10], "even") ➞ [4, 8]
# 4 & 8 occupy the 2nd & 4th positions
char_at_pos("EDABIT", "odd") ➞ "EAI"
# "E", "A" and "I" occupy the 1st, 3rd and 5th positions
char_at_pos(["A", "R", "B", "I", "T", "R", "A", "R", "I", "L", "Y"], "odd") ➞ ["A", "B", "T", "A", "I", "Y"]
### Notes
* Lists are zero-indexed, so, index+1 = position or position-1 = index.
* There will not be an empty string or an empty list.
* ( **Optional** ) Try solving this challenge in a single-line lambda function.
* A more advanced version of this challenge can be [found here](https://edabit.com/challenge/72KukSssxk2eHrWqx).
"""
char_at_pos=lambda r,s:(r[::2],r[1::2])['e'in s]
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
51d3c030d8d2022184af7c47d538156b2dbd5e13 | 738ac84e16f206e417399e96702b04433b2f286f | /setup.py | 1713fc06cbf62f9b132418e455eae59f547c13d0 | [] | no_license | belonesox/pg2bcolz | d732f7575c061f82b0097487bce29ec0ec8d89ae | 56964fca6256a9afa3051782fadea8708a56cce4 | refs/heads/master | 2020-07-05T05:17:33.306526 | 2019-08-15T12:11:03 | 2019-08-15T12:11:03 | 202,534,382 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,812 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of pg2bcolz.
# https://github.com/belonesox/pg2bcolz
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT-license
# Copyright (c) 2018, Stas Fomin <stas-fomin@yandex.ru>
from setuptools import setup, find_packages
from pg2bcolz import __version__
tests_require = [
'mock',
'nose',
'coverage',
'yanc',
'preggy',
'tox',
'ipdb',
'coveralls',
'sphinx',
]
setup(
name='pg2bcolz',
version=__version__,
description='Fast and optimized loading of large bcolz from postgres DB',
long_description='''
Fast and optimized loading of large bcolz tables from postgres DB
''',
keywords='Bcolz Postgres',
author='Stas Fomin',
author_email='stas-fomin@yandex.ru',
url='https://github.com/belonesox/pg2bcolz',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Unix',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
],
packages=find_packages(),
include_package_data=False,
install_requires=[
"psycopg2",
# "pandas",
"numpy",
"bcolz",
# add your dependencies here
# remember to use 'package-name>=x.y.z,<x.y+1.0' notation (this way you get bugfixes)
],
extras_require={
'tests': tests_require,
},
entry_points={
'console_scripts': [
# add cli scripts here in this form:
# 'bcolz=bcolz.cli:main',
],
},
)
| [
"stas-fomin@yandex.ru"
] | stas-fomin@yandex.ru |
053a6f94449e06bd7c27c4b37e61028d442c5162 | 837b411d23a63771ed9eac61671e7967b5026c05 | /run.py | ba02ccaa81f5aa89c9d91a7a8808134d3ca14866 | [] | no_license | stsh1119/Request_trap_app | fd17cb688d0346e84f670bfcf32a03f724f20e04 | bdb2bfd36dcabf700d87988412d3468135abcfbb | refs/heads/main | 2023-03-12T01:10:48.536099 | 2021-03-01T19:27:02 | 2021-03-01T19:27:02 | 331,733,745 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 71 | py | from request_trap import app
if __name__ == '__main__':
app.run()
| [
"stshlaptop@gmail.com"
] | stshlaptop@gmail.com |
40c82ae4f2841d3d7fe415a716b29527eac584bb | 2b0eab74af8d23244ff11699830f9bb10fbd717a | /accounts/migrations/0015_auto_20190527_1010.py | 7e0ed91e427e416ba95a9cf189e3a066bef8f203 | [] | no_license | alexandrenorman/mixeur | c7e25cd20b03c78b361cb40e3e359a6dc5d9b06b | 95d21cd6036a99c5f399b700a5426e9e2e17e878 | refs/heads/main | 2023-03-13T23:50:11.800627 | 2021-03-07T15:49:15 | 2021-03-07T15:49:15 | 345,384,858 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 441 | py | # Generated by Django 2.2 on 2019-05-27 08:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0014_user_phone_cache'),
]
operations = [
migrations.RenameField(
model_name='rgpdconsent',
old_name='allow_to_share_my_information_with_ADEME',
new_name='allow_to_share_my_information_with_ademe',
),
]
| [
"norman@xael.org"
] | norman@xael.org |
427d19c541374cbceef49ce9c683561c00b10ed6 | d05a59feee839a4af352b7ed2fd6cf10a288a3cb | /xlsxwriter/test/worksheet/test_write_filter.py | cf9f4989a1a59d294da00df1dafaf3b81e3ff4fb | [
"BSD-2-Clause-Views"
] | permissive | elessarelfstone/XlsxWriter | 0d958afd593643f990373bd4d8a32bafc0966534 | bb7b7881c7a93c89d6eaac25f12dda08d58d3046 | refs/heads/master | 2020-09-24T06:17:20.840848 | 2019-11-24T23:43:01 | 2019-11-24T23:43:01 | 225,685,272 | 1 | 0 | NOASSERTION | 2019-12-03T18:09:06 | 2019-12-03T18:09:05 | null | UTF-8 | Python | false | false | 743 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2019, John McNamara, jmcnamara@cpan.org
#
import unittest
from ...compatibility import StringIO
from ...worksheet import Worksheet
class TestWriteFilter(unittest.TestCase):
"""
Test the Worksheet _write_filter() method.
"""
def setUp(self):
self.fh = StringIO()
self.worksheet = Worksheet()
self.worksheet._set_filehandle(self.fh)
def test_write_filter(self):
"""Test the _write_filter() method"""
self.worksheet._write_filter('East')
exp = """<filter val="East"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
| [
"jmcnamara@cpan.org"
] | jmcnamara@cpan.org |
2591a5d573367c1b0e87394c9bd88b7c4525efbb | 292cec77b5003a2f80360d0aee77556d12d990f7 | /src/bentoml_cli/worker/http_dev_api_server.py | 669023f72a5ef4abf765eec7ab835a72487e137f | [
"Apache-2.0"
] | permissive | yubozhao/BentoML | 194a6ec804cc1c6dbe7930c49948b6707cbc3c5f | d4bb5cbb90f9a8ad162a417103433b9c33b39c84 | refs/heads/master | 2022-12-17T00:18:55.555897 | 2022-12-06T00:11:39 | 2022-12-06T00:11:39 | 178,978,385 | 3 | 0 | Apache-2.0 | 2020-12-01T18:17:15 | 2019-04-02T01:53:53 | Python | UTF-8 | Python | false | false | 3,932 | py | from __future__ import annotations
import socket
import click
@click.command()
@click.argument("bento_identifier", type=click.STRING, required=False, default=".")
@click.option("--fd", type=click.INT, required=True)
@click.option("--working-dir", required=False, type=click.Path(), default=None)
@click.option("--backlog", type=click.INT, default=2048)
@click.option(
"--prometheus-dir",
type=click.Path(exists=True),
help="Required by prometheus to pass the metrics in multi-process mode",
)
@click.option(
"--ssl-certfile",
type=str,
default=None,
help="SSL certificate file",
)
@click.option(
"--ssl-keyfile",
type=str,
default=None,
help="SSL key file",
)
@click.option(
"--ssl-keyfile-password",
type=str,
default=None,
help="SSL keyfile password",
)
@click.option(
"--ssl-version",
type=int,
default=None,
help="SSL version to use (see stdlib 'ssl' module)",
)
@click.option(
"--ssl-cert-reqs",
type=int,
default=None,
help="Whether client certificate is required (see stdlib 'ssl' module)",
)
@click.option(
"--ssl-ca-certs",
type=str,
default=None,
help="CA certificates file",
)
@click.option(
"--ssl-ciphers",
type=str,
default=None,
help="Ciphers to use (see stdlib 'ssl' module)",
)
def main(
bento_identifier: str,
fd: int,
working_dir: str | None,
backlog: int,
prometheus_dir: str | None,
ssl_certfile: str | None,
ssl_keyfile: str | None,
ssl_keyfile_password: str | None,
ssl_version: int | None,
ssl_cert_reqs: int | None,
ssl_ca_certs: str | None,
ssl_ciphers: str | None,
):
"""
Start a development server for the BentoML service.
"""
import psutil
import uvicorn
from bentoml import load
from bentoml._internal.log import configure_server_logging
from bentoml._internal.context import component_context
from bentoml._internal.configuration.containers import BentoMLContainer
component_context.component_type = "dev_api_server"
configure_server_logging()
if prometheus_dir is not None:
BentoMLContainer.prometheus_multiproc_dir.set(prometheus_dir)
svc = load(bento_identifier, working_dir=working_dir, standalone_load=True)
# setup context
component_context.component_name = svc.name
if svc.tag is None:
component_context.bento_name = svc.name
component_context.bento_version = "not available"
else:
component_context.bento_name = svc.tag.name
component_context.bento_version = svc.tag.version or "not available"
sock = socket.socket(fileno=fd)
uvicorn_options = {
"backlog": backlog,
"log_config": None,
"workers": 1,
"lifespan": "on",
}
if ssl_certfile:
import ssl
uvicorn_options["ssl_certfile"] = ssl_certfile
if ssl_keyfile:
uvicorn_options["ssl_keyfile"] = ssl_keyfile
if ssl_keyfile_password:
uvicorn_options["ssl_keyfile_password"] = ssl_keyfile_password
if ssl_ca_certs:
uvicorn_options["ssl_ca_certs"] = ssl_ca_certs
if not ssl_version:
ssl_version = ssl.PROTOCOL_TLS_SERVER
uvicorn_options["ssl_version"] = ssl_version
if not ssl_cert_reqs:
ssl_cert_reqs = ssl.CERT_NONE
uvicorn_options["ssl_cert_reqs"] = ssl_cert_reqs
if not ssl_ciphers:
ssl_ciphers = "TLSv1"
uvicorn_options["ssl_ciphers"] = ssl_ciphers
if psutil.WINDOWS:
uvicorn_options["loop"] = "asyncio"
import asyncio
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) # type: ignore
config = uvicorn.Config(svc.asgi_app, **uvicorn_options)
uvicorn.Server(config).run(sockets=[sock])
if __name__ == "__main__":
main() # pylint: disable=no-value-for-parameter
| [
"noreply@github.com"
] | yubozhao.noreply@github.com |
d7ad5a84b638fc0540e7f580dc4f50df4fde635c | 23611933f0faba84fc82a1bc0a85d97cf45aba99 | /google-cloud-sdk/lib/surface/iam/service_accounts/sign_blob.py | e33878bf514e44c4212e0e1d81cd6185617a051e | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | KaranToor/MA450 | 1f112d1caccebdc04702a77d5a6cee867c15f75c | c98b58aeb0994e011df960163541e9379ae7ea06 | refs/heads/master | 2021-06-21T06:17:42.585908 | 2020-12-24T00:36:28 | 2020-12-24T00:36:28 | 79,285,433 | 1 | 1 | Apache-2.0 | 2020-12-24T00:38:09 | 2017-01-18T00:05:44 | Python | UTF-8 | Python | false | false | 2,830 | py | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for signing blobs for service accounts."""
import textwrap
from apitools.base.py import exceptions
from googlecloudsdk.command_lib.iam import base_classes
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.core import log
class SignBlob(base_classes.BaseIamCommand):
"""Sign a blob with a managed service account key.
This command signs a file containing arbitrary binary data (a blob) using a
system-managed service account key.
"""
detailed_help = {
'DESCRIPTION': '{description}',
'EXAMPLES': textwrap.dedent("""\
To sign a blob file with a system-managed service account key,
run:
$ {command} --iam-account my-account@somedomain.com input.bin output.bin
"""),
'SEE ALSO': textwrap.dedent("""\
For more information on how this command ties into the wider cloud
infrastructure, please see
[](https://cloud.google.com/appengine/docs/java/appidentity/)
"""),
}
@staticmethod
def Args(parser):
parser.add_argument('--iam-account',
required=True,
help='The service account to sign as.')
parser.add_argument('input',
metavar='INPUT-FILE',
help='A path to the blob file to be signed.')
parser.add_argument('output',
metavar='OUTPUT-FILE',
help='A path the resulting signed blob will be '
'written to.')
def Run(self, args):
try:
response = self.iam_client.projects_serviceAccounts.SignBlob(
self.messages.IamProjectsServiceAccountsSignBlobRequest(
name=iam_util.EmailToAccountResourceName(args.iam_account),
signBlobRequest=self.messages.SignBlobRequest(
bytesToSign=self.ReadFile(args.input))))
self.WriteFile(args.output, response.signature)
log.status.Print(
'signed blob [{0}] as [{1}] for [{2}] using key [{3}]'.format(
args.input, args.output, args.iam_account, response.keyId))
except exceptions.HttpError as error:
raise iam_util.ConvertToServiceAccountException(error, args.account)
| [
"toork@uw.edu"
] | toork@uw.edu |
66cc401c0e1112684bdbcf769d7b8f85b3ad00b6 | 916480ae24345193efa95df013f637e0a115653b | /web/transiq/driver/migrations/0022_driver_pan.py | ee0c60713f9da7066133b323920f9e578f38b391 | [
"Apache-2.0"
] | permissive | manibhushan05/tms | 50e289c670e1615a067c61a051c498cdc54958df | 763fafb271ce07d13ac8ce575f2fee653cf39343 | refs/heads/master | 2022-12-11T07:59:30.297259 | 2021-09-08T03:24:59 | 2021-09-08T03:24:59 | 210,017,184 | 0 | 0 | Apache-2.0 | 2022-12-08T02:35:01 | 2019-09-21T16:23:57 | Python | UTF-8 | Python | false | false | 403 | py | # Generated by Django 2.0.5 on 2018-08-28 16:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('driver', '0021_auto_20180702_1554'),
]
operations = [
migrations.AddField(
model_name='driver',
name='pan',
field=models.CharField(blank=True, max_length=11, null=True),
),
]
| [
"mani@myhost.local"
] | mani@myhost.local |
0891b4eb3464f0cd9a8d15ce215aa9850f292e7a | f0a4708890bcd40171b6b884970b7e9fa349c213 | /tasks/TaskComparison.py | 3e9b0bf7ea45971d3cf8ef708661368549aaf021 | [
"MIT"
] | permissive | kosmitive/qlearn | 257c07c6eb37368f7f7a73d7cd56c54517823b53 | a15180267a88e32fac7d944940caf79c5bdec536 | refs/heads/master | 2021-09-06T05:55:02.791505 | 2018-02-02T22:58:55 | 2018-02-02T22:58:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,129 | py | import os
import matplotlib
matplotlib.use("Agg")
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
import time
from agents.QLearningAgent import QLearningAgent
from collection.ColorCollection import ColorCollection
from collection.PolicyCollection import PolicyCollection
from environments.GridWorld import GridWorld
from environments.BinaryFlipEnvironment import BinaryFlipEnvironment
from environments.DeepSeaExploration import DeepSeaExploration
from environments.DeepSeaExplorationTwo import DeepSeaExplorationTwo
from environments.DeepSeaExplorationThree import DeepSeaExplorationThree
from environments.DeepSeaExplorationFour import DeepSeaExplorationFour
from environments.ExplorationChain import ExplorationChain
from environments.ExplorationTree import ExplorationTree
from environments.SharedLearningChain import SharedLearningChain
from manager.DirectoryManager import DirectoryManager
from plots.MultiDimensionalHeatMap import MultiDimensionalHeatmap
# ------------------------------ SETTINGS ------------------------------------
run = list()
new_batch_names = [['eps_greedy', []], ['shared_bootstrap', []], ['bootstrapped', []],
['boltzmann', []], ['cb_pseudo_count', []],
['optimistic', []], ['ucb', []],
['bootstrapped_heads_per_sample', []], ['ucb_infogain', []], ['deterministic_bootstrapped_cb_pseudo_count', []]]
fb = 2
seed = fb
if fb == 0:
new_batch_names = [['optimistic', []], ['ucb', []], ['boltzmann', []], ['bootstrapped', []], ['cb_pseudo_count', []],
['bootstrapped_heads_per_sample', []], ['ucb_infogain', []], ['deterministic_bootstrapped_cb_pseudo_count', []]]
new_envs = [[[ExplorationChain], [200], lambda n: n + 9, 1500, "exp_chain"]]
run.append([new_envs, new_batch_names])
elif fb == 1:
new_batch_names = [['optimistic', []], ['ucb', []], ['cb_pseudo_count', []],
['bootstrapped_heads_per_sample', []], ['ucb_infogain', []], ['deterministic_bootstrapped_cb_pseudo_count', []]]
new_envs = [[[SharedLearningChain], [133], lambda n: n, 1500, "shared_chain"]]
run.append([new_envs, new_batch_names])
elif fb == 2:
new_batch_names = [['deterministic_bootstrapped_cb_pseudo_count', []]]
new_envs = [[[DeepSeaExplorationTwo], [19], lambda n: n, 750, "deep_sea_two"]]
run.append([new_envs, new_batch_names])
new_batch_names = [['ucb_infogain', []],
['deterministic_bootstrapped_cb_pseudo_count', []]]
new_envs = [[[BinaryFlipEnvironment], [6], lambda n: 8 * n, 1500, "bin_flip"]]
run.append([new_envs, new_batch_names])
# new_envs = [[BinaryFlipEnvironment, [6], lambda n: n ** 2, 2500]]
# new_batch_names = [['eps_greedy', []], ['shared_bootstrap', []], ['bootstrapped', []],
# ['boltzmann', []], ['cb_pseudo_count', []],
# ['optimistic', []], ['ucb', []],
# ['bootstrapped_heads_per_sample', []], ['ucb_infogain', []],
# ['pc_pseudo_count', []], ['deterministic_bootstrapped_cb_pseudo_count',[]]]
# run.append([new_envs, new_batch_names])
#
# new_envs = [[GridWorld, [10], lambda n: 2 * n, 2500]]
# new_batch_names = [['eps_greedy', []], ['bootstrapped_heads_per_sample', []], ['ucb_infogain', []]]
# run.append([new_envs, new_batch_names])
#
# new_envs = [[DeepSeaExploration, [20], lambda n: n, 625]]
# new_batch_names = [['eps_greedy', []], ['shared_bootstrap', []], ['bootstrapped', []],
# ['boltzmann', []], ['cb_pseudo_count', []],
# ['optimistic', []], ['ucb', []],
# ['bootstrapped_heads_per_sample', []], ['ucb_infogain', []],
# ['pc_pseudo_count', []], ['deterministic_bootstrapped_cb_pseudo_count',[]]]
# run.append([new_envs, new_batch_names])
#
# new_envs = [[DeepSeaExplorationTwo, [20], lambda n: n, 625]]
# new_batch_names = [['eps_greedy', []], ['shared_bootstrap', []], ['bootstrapped', []],
# ['boltzmann', []], ['cb_pseudo_count', []],
# ['optimistic', []], ['ucb', []],
# ['bootstrapped_heads_per_sample', []], ['ucb_infogain', []],
# ['pc_pseudo_count', []], ['deterministic_bootstrapped_cb_pseudo_count',[]]]
# run.append([new_envs, new_batch_names])
#
# new_envs = [[DeepSeaExplorationThree, [20], lambda n: n, 625]]
# new_batch_names = [['eps_greedy', []], ['shared_bootstrap', []], ['bootstrapped', []],
# ['boltzmann', []], ['cb_pseudo_count', []],
# ['optimistic', []], ['ucb', []],
# ['bootstrapped_heads_per_sample', []], ['ucb_infogain', []],
# ['pc_pseudo_count', []], ['deterministic_bootstrapped_cb_pseudo_count',[]]]
# run.append([new_envs, new_batch_names])
#
# new_envs = [[DeepSeaExplorationFour, [20], lambda n: n, 625]]
# new_batch_names = [['eps_greedy', []], ['shared_bootstrap', []], ['bootstrapped', []],
# ['boltzmann', []], ['cb_pseudo_count', []],
# ['optimistic', []], ['ucb', []],
# ['bootstrapped_heads_per_sample', []], ['ucb_infogain', []],
# ['pc_pseudo_count', []], ['deterministic_bootstrapped_cb_pseudo_count',[]]]
# run.append([new_envs, new_batch_names])
save_directory = "run/RunBiggerProblem"
#num_models = 1000
num_episodes = 7000
#record_indices = [] # 0, 1, 2, 3]
plot_models = 1
plot_heads = 5
save_frame = 1
fps = 15
for [all_envs, batch_names] in run:
for [env_build, problem_sizes, problem_to_step, num_models, env_name] in all_envs:
for N in problem_sizes:
for [batch_name, record_indices] in batch_names:
# define the different policies you want to try out
dir_manager = DirectoryManager(save_directory, "{}_{}".format(env_name, N), batch_name)
# get policy collection
policies = PolicyCollection.get_batch(batch_name)
# define the evaluation rewards
m = len(env_build)
training_rewards = np.empty((num_episodes + 1, m, len(policies), num_models))
training_mean = np.empty((num_episodes + 1, m, len(policies)))
training_var = np.empty((num_episodes + 1, m, len(policies)))
# set value for first episode
training_rewards[0, :, :, :] = 0
training_mean[0, :, :] = 0
training_var[0, :, :] = 0
min_rew = 10000000
max_rew = -min_rew
for bi in range(len(env_build)):
build_env = env_build[bi]
# --------
# create variable for the steps a30nd do this amount of steps.
num_steps = problem_to_step(N)
tf.set_random_seed(seed)
graph = tf.Graph()
with graph.as_default():
tf_config = tf.ConfigProto(log_device_placement=True)
tf_config.intra_op_parallelism_threads = 8
tf_config.inter_op_parallelism_threads = 8
tf_config.gpu_options.allow_growth=True
with tf.Session(graph=graph, config=tf_config) as sess:
env = build_env("test", [num_models], N)
state_space = env.state_space
action_space = env.action_space
log_action_size = action_space.get_log2_size()
time_frame = 20
color_pool = ColorCollection.get_colors()
# --------------------- Determine the optimal reward --------------------
# Determine the agent count
num_policies = len(policies)
optimal_ih_rew, minimal_ih_rew, min_q, max_q, _ = env.get_optimal(num_steps, 0.99)
min_rew = np.minimum(minimal_ih_rew, min_rew)
max_rew = np.maximum(optimal_ih_rew, max_rew)
# --------------------------------------------------------------------------
# Iterate over all policies and create an agent using that specific policy
agents = list()
q_plots = list()
density_plots = list()
environments = list()
densities = list()
q_functions = list()
get_best_shared = list()
shared_steps = list()
for pol_num in range(num_policies):
# Get policies and unique name
pe = policies[pol_num]
unique_name = str(pol_num)
# extract important fields
policy = pe[1]
policy_config = pe[2]
policy_config['num_models'] = num_models
policy_config['min_q'] = min_q
policy_config['max_q'] = max_q
policy_config['action_space'] = action_space
current_env = env.clone(unique_name)
environments.append(current_env)
agent = QLearningAgent(sess, unique_name, current_env, policy, policy_config)
agents.append(agent)
if 'shared_learning' in policy_config:
shared_steps.append(policy_config['shared_steps'])
if plot_models > 0 and pol_num in record_indices:
# setup densities
if 'pseudo_count_type' in policy_config and policy_config['pseudo_count_type']:
num_densities = 2
densities.append([agent.cb_complete_densities, agent.ref_complete_densities])
else:
num_densities = 1
densities.append([agent.ref_complete_densities])
# setup q functions
q_functions.append([agent.q_tensor])
# get the learn operations
q_plots.append(
MultiDimensionalHeatmap("q_func_{}".format(pol_num), 1,
[plot_models, np.minimum(policy_config['num_heads'], plot_heads),
state_space.get_size(), action_space.get_size()],
0.8, 'viridis'))
density_plots.append(
MultiDimensionalHeatmap("density_{}".format(pol_num), num_densities,
[plot_models, np.minimum(policy_config['num_heads'], plot_heads),
state_space.get_size(), action_space.get_size()],
0.8, 'inferno'))
if 'shared_learning' in policy_config and policy_config['shared_learning']:
get_best_shared.append(agent.get_best_heads)
# init variables
init = tf.global_variables_initializer()
sess.run(init)
feed_dict = {}
for agent in agents:
feed_dict[agent.use_best] = True
# retrieve the learn operations
update_and_receive_rewards = [agent.q_tensor_update for agent in agents]
perform_ops = [agent.apply_actions for agent in agents]
reset_ops = [envs.reset_op for envs in environments]
cum_rew_ops = [envs.cum_rewards for envs in environments]
# start the recording
for i in range(len(q_plots)):
q_plots[i].start_recording(dir_manager.agent_root, fps)
density_plots[i].start_recording(dir_manager.agent_root, fps)
# iterate over episodes
for episode in range(1, num_episodes + 1):
start = time.time()
# reset all environments
sess.run(reset_ops)
# for each agent sample a new head
state_dict = {}
for k in range(num_policies):
agents[k].sample_head()
state_dict[agents[k].use_best] = False
# repeat this for the number of steps
for k in range(num_steps):
shd_stp_lst = list()
for m in range(num_policies):
if 'shared_steps' in policies[m][2]:
if shared_steps[m] > 0 and (k + (episode * num_steps)) % shared_steps[m] == 0:
shd_stp_lst.append(agents[m].get_best_heads)
# receive rewards and add
sess.run(update_and_receive_rewards, feed_dict=state_dict)
sess.run(shd_stp_lst, feed_dict=state_dict)
# copy values
training_rewards[episode, bi, :, :] = (sess.run(cum_rew_ops) - minimal_ih_rew) / (optimal_ih_rew - minimal_ih_rew)
# when a frame should be recorded
if len(record_indices) > 0 and (episode - 1) % save_frame == 0:
feed_dict = {}
for agent in agents:
feed_dict[agent.use_best] = True
res_q_functions, res_densities = sess.run([q_functions, densities], feed_dict)
for i in range(len(record_indices)):
# store the q plot
q_plots[i].plot(res_q_functions[i])
q_plots[i].store_frame()
# store the density
density_plots[i].plot(res_densities[i])
density_plots[i].store_frame()
print("\tEpisode {} finished after {} ms".format(episode, round((time.time() - start) * 1000, 2)))
# start the recording
for i in range(len(q_plots)):
q_plots[i].stop_recording()
density_plots[i].stop_recording()
# --------------------------------------------
# save the plots with all errors
# --------------------------------------------
# determine mean and variance
training_mean = np.mean(training_rewards, axis=(1,3))
training_var = np.var(training_rewards, axis=(1,3))
dir_manager.save_tr_va_plots(training_mean, None, [policy[0] for policy in policies], "all_policies.eps")
# of course print the best policy with variance
##cum_mean = np.sum(training_mean, axis=0)
#best_policy = np.argmax(cum_mean)
#dir_manager.save_tr_va_plots(training_mean[:, best_policy:best_policy+1], None, policies[best_policy][0], "best_policy.eps")
# --------------------------------------------
# Store the rewards etc.
# --------------------------------------------
agent_root = dir_manager.root
np.savetxt(os.path.join(agent_root, "tr_rewards_mean.np"), training_mean)
np.savetxt(os.path.join(agent_root, "tr_rewards_var.np"), training_var) | [
"m.semmler.94@googlemail.com"
] | m.semmler.94@googlemail.com |
863992dedce0ea6eb717daa4383103ec807197fe | 732d62bb15200a8977874c36e0c2624ea45f8a90 | /parser/graph_outputs.py | dedf80165e5544877ee864968672e42d771fcceb | [
"Apache-2.0"
] | permissive | maltintas45/Second_Order_SDP | 16e73d52c4e1d7d030e200eaa34a5c2b9b14b422 | d8e90aef1ff9bade86d602790adf08e37ed4c746 | refs/heads/master | 2022-11-09T01:52:39.787996 | 2020-06-29T14:02:54 | 2020-06-29T14:02:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,396 | py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Copyright 2017 Timothy Dozat
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
import os
try:
import cPickle as pkl
except ImportError:
import pickle as pkl
import curses
import time
import numpy as np
import tensorflow as tf
from parser.neural import nn
from scripts.chuliu_edmonds import chuliu_edmonds_one_root
import pdb
#***************************************************************
class GraphOutputs(object):
""""""
_dataset = None
#UF1 unlabeled score, OLS labeled score, LF1 total score
_print_mapping = [('form', 'Form'),
('lemma', 'Lemma'),
('upos', 'UPOS'),
('xpos', 'XPOS'),
('frame', 'UFeat'),
('dephead', 'UAS'),
('deprel', 'OLS'),
('deptree', 'LAS'),
('semhead', 'UF1'),
('semrel', 'OLS'),
('semgraph', 'LF1'),
('attribute','AF1'),
('label','NF1')]
#=============================================================
def __init__(self, outputs, tokens, load=False, evals=None, factored_deptree=None, factored_semgraph=None, config=None):
""""""
self.outputs=outputs
self._factored_deptree = factored_deptree
self._factored_semgraph = factored_semgraph
self._config = config
self._evals = evals or list(outputs.keys())
#self._evals = config.getlist(self, 'evals')
valid_evals = set([print_map[0] for print_map in self._print_mapping])
#pdb.set_trace()
for eval_ in list(self._evals):
assert eval_ in valid_evals
# pdb.set_trace()
# if 'attribute' in outputs:
# self._loss=outputs['attribute']['loss']
# else:
self._loss = tf.add_n([tf.where(tf.is_finite(output['loss']), output['loss'], 0.) for output in outputs.values()])
self._accuracies = {'total': tokens}
self._probabilities = {}
self.time = None
#-----------------------------------------------------------
for field in outputs:
self._probabilities[field] = outputs[field].pop('probabilities')
self._accuracies[field] = outputs[field]
#-----------------------------------------------------------
filename = os.path.join(self.save_dir, '{}.pkl'.format(self.dataset))
# TODO make a separate History object
if load and os.path.exists(filename):
with open(filename, 'rb') as f:
self.history = pkl.load(f)
else:
self.history = {
'total': {'n_batches' : 0,
'n_tokens': 0,
'n_sequences': 0,
'total_time': 0},
'speed': {'toks/sec': [],
'seqs/sec': [],
'bats/sec': []}
}
for field in self._accuracies:
if field == 'semgraph':
for string in ('head', 'graph'):
self.history['sem'+string] = {
'loss': [0],
'tokens': [0],
'fp_tokens': 0,
'fn_tokens': 0,
'sequences': [0]
}
if self._factored_semgraph:
self.history['semrel'] = {
'loss': [0],
'tokens': [0],
'n_edges': 0,
'sequences': [0]
}
elif field == 'attribute':
self.history[field] = {
'loss': [0],
'tokens': [0],
'fp_tokens': 0,
'fn_tokens': 0,
'sequences': [0]
}
elif field == 'deptree':
for string in ('head', 'tree'):
self.history['dep'+string] = {
'loss': [0],
'tokens': [0],
'sequences': [0]
}
if self._factored_deptree:
self.history['deprel'] = {
'loss': [0],
'tokens': [0],
'sequences': [0]
}
elif field == 'label':
self.history[field] ={
'loss': [0],
'tokens': [0],
'n_tokens': [0],
'sequences': [0]
}
elif field not in ('speed', 'total'):
self.history[field] ={
'loss': [0],
'tokens': [0],
'sequences': [0]
}
self.predictions = {'indices': []}
return
#=============================================================
def probs_to_preds(self, probabilities, lengths, force_MST=False, get_argmax=False):
""""""
predictions = {}
if 'form' in probabilities:
form_probs = probabilities['form']
if isinstance(form_probs, (tuple, list)):
form_samples, form_probs = form_probs
form_preds = np.argmax(form_probs, axis=-1)
predictions['form'] = form_samples[np.arange(len(form_preds)), form_preds]
else:
form_preds = np.argmax(form_probs, axis=-1)
predictions['form'] = form_preds
if 'lemma' in probabilities:
lemma_probs = probabilities['lemma']
lemma_preds = np.argmax(lemma_probs, axis=-1)
predictions['lemma'] = lemma_preds
if 'upos' in probabilities:
upos_probs = probabilities['upos']
upos_preds = np.argmax(upos_probs, axis=-1)
predictions['upos'] = upos_preds
if 'xpos' in probabilities:
xpos_probs = probabilities['xpos']
if isinstance(xpos_probs, (tuple, list)):
xpos_preds = np.concatenate([np.argmax(xpos_prob_mat, axis=-1)[:,:,None] for xpos_prob_mat in xpos_probs], axis=-1)
else:
xpos_preds = np.argmax(xpos_probs, axis=-1)
predictions['xpos'] = xpos_preds
if 'frame' in probabilities:
frame_probs = probabilities['frame']
frame_preds = np.argmax(frame_probs, axis=-1)
predictions['frame'] = frame_preds
#if 'head' in probabilities: # TODO MST algorithms
# head_probs = probabilities['head']
# head_preds = np.argmax(head_probs, axis=-1)
# predictions['head'] = head_preds
if 'deptree' in probabilities:
# (n x m x m x c)
deptree_probs = probabilities['deptree']
if self._factored_deptree:
# (n x m x m x c) -> (n x m x m)
dephead_probs = deptree_probs.sum(axis=-1)
# (n x m x m) -> (n x m)
#dephead_preds = np.argmax(dephead_probs, axis=-1)
dephead_preds = np.zeros(dephead_probs.shape[:2], dtype=np.int32)
for i, (_dephead_probs, length) in enumerate(zip(dephead_probs, lengths)):
#print(_dephead_probs)
#input()
cle = chuliu_edmonds_one_root(_dephead_probs[:length, :length])
dephead_preds[i, :length] = cle
# ()
bucket_size = dephead_preds.shape[1]
# (n x m) -> (n x m x m)
one_hot_dephead_preds = (np.arange(bucket_size) == dephead_preds[...,None]).astype(int)
# (n x m x m) * (n x m x m x c) -> (n x m x c)
deprel_probs = np.einsum('ijk,ijkl->ijl', one_hot_dephead_preds, deptree_probs)
# (n x m x c) -> (n x m)
deprel_preds = np.argmax(deprel_probs, axis=-1)
else:
# (), ()
bucket_size, n_classes = deptree_probs.shape[-2:]
# (n x m x m x c) -> (n x m x mc)
deptree_probs = deptree_probs.reshape([-1, bucket_size, bucket_size*n_classes])
# (n x m x mc) -> (n x m)
deptree_preds = np.argmax(deptree_probs, axis=-1)
# (n x m) -> (n x m)
dephead_preds = deptree_preds // bucket_size
deprel_preds = deptree_preds % n_classes
predictions['dephead'] = dephead_preds
predictions['deprel'] = deprel_preds
if 'semgraph' in probabilities:
if force_MST:
#pdb.set_trace()
deptree_probs = probabilities['semgraph']
# (n x m x m x c) -> (n x m x m)
dephead_probs = deptree_probs.sum(axis=-1)
# (n x m x m) -> (n x m)
#dephead_preds = np.argmax(dephead_probs, axis=-1)
dephead_preds = np.zeros(dephead_probs.shape[:2], dtype=np.int32)
for i, (_dephead_probs, length) in enumerate(zip(dephead_probs, lengths)):
#print(_dephead_probs)
#input()
cle = chuliu_edmonds_one_root(_dephead_probs[:length, :length])
dephead_preds[i, :length] = cle
# ()
bucket_size = dephead_preds.shape[1]
# (n x m) -> (n x m x m)
one_hot_dephead_preds = (np.arange(bucket_size) == dephead_preds[...,None]).astype(int)
# (n x m x m) * (n x m x m x c) -> (n x m x c)
deprel_probs = np.einsum('ijk,ijkl->ijl', one_hot_dephead_preds, deptree_probs)
# (n x m x c) -> (n x m)
deprel_preds = np.argmax(deprel_probs, axis=-1)
predictions['dephead'] = dephead_preds
predictions['deprel'] = deprel_preds
else:
#pdb.set_trace()
# (n x m x m x c)
semgraph_probs = probabilities['semgraph']
if self._factored_semgraph:
# (n x m x m x c) -> (n x m x m)
semhead_probs = semgraph_probs.sum(axis=-1)
if get_argmax:
#pdb.set_trace()
#semhead_preds = np.argmax(semhead_probs,axis=-1)
semhead_preds=semhead_probs.max(axis=-1,keepdims=1) == semhead_probs
semhead_preds*=semhead_probs>0
else:
# (n x m x m) -> (n x m x m)
semhead_preds = np.where(semhead_probs >= .5, 1, 0)
# (n x m x m x c) -> (n x m x m)
semrel_preds = np.argmax(semgraph_probs, axis=-1)
# (n x m x m) (*) (n x m x m) -> (n x m x m)
semgraph_preds = semhead_preds * semrel_preds
else:
# (n x m x m x c) -> (n x m x m)
semgraph_preds = np.argmax(semgraph_probs, axis=-1)
predictions['semrel'] = sparse_semgraph_preds = []
predictions['semhead'] = []
for i in range(len(semgraph_preds)):
sparse_semgraph_preds.append([])
for j in range(len(semgraph_preds[i])):
sparse_semgraph_preds[-1].append([])
for k, pred in enumerate(semgraph_preds[i,j]):
if pred:
sparse_semgraph_preds[-1][-1].append((k, semgraph_preds[i,j,k]))
if 'attribute' in probabilities:
#pdb.set_trace()
attr_probs = probabilities['attribute']
attr_preds = np.where(attr_probs >= .5, 1, 0)
predictions['attr'] = attribute_preds = []
for i in range(len(attr_preds)):
attribute_preds.append([])
for j in range(len(attr_preds[i])):
attribute_preds[-1].append([])
for k, pred in enumerate(attr_preds[i,j]):
if pred:
attribute_preds[-1][-1].append(k)
return predictions
#=============================================================
def cache_predictions(self, tokens, indices):
""""""
self.predictions['indices'].extend(indices)
for field in tokens:
if field not in self.predictions:
self.predictions[field] = []
self.predictions[field].extend(tokens[field])
return
#=============================================================
def print_current_predictions(self):
""""""
order = np.argsort(self.predictions['indices'])
fields = ['form', 'lemma', 'upos', 'xpos', 'frame', 'dephead', 'deprel', 'semrel', 'attr']
for i in order:
j = 1
token = []
while j < len(self.predictions['id'][i]):
token = [self.predictions['id'][i][j]]
for field in fields:
if field in self.predictions:
token.append(self.predictions[field][i][j])
else:
token.append('_')
print(u'\t'.join(token))
j += 1
print('')
self.predictions = {'indices': []}
return
#=============================================================
def dump_current_predictions(self, f):
""""""
order = np.argsort(self.predictions['indices'])
fields = ['form', 'lemma', 'upos', 'xpos', 'frame', 'dephead', 'deprel', 'semrel', 'attr']
for idx, i in enumerate(order):
j = 1
token = []
try:
f.write(self.id_buff[idx]+'\n')
except:
pass
while j < len(self.predictions['id'][i]):
token = [self.predictions['id'][i][j]]
for field in fields:
if field in self.predictions:
token.append(self.predictions[field][i][j])
else:
token.append('_')
f.write('\t'.join(token)+'\n')
j += 1
f.write('\n')
self.predictions = {'indices': []}
return
#=============================================================
def compute_token_accuracy(self, field):
""""""
return self.history[field]['tokens'][-1] / (self.history['total']['n_tokens'] + 1e-12)
#=============================================================
def compute_node_accuracy(self, field):
""""""
return self.history[field]['tokens'][-1] / (self.history[field]['n_tokens'][-1] + 1e-12)
def compute_token_F1(self, field):
""""""
precision = self.history[field]['tokens'][-1] / (self.history[field]['tokens'][-1] + self.history[field]['fp_tokens'] + 1e-12)
# if self.compare_precision:
# #print('use precision for comparing model')
# #return self.compute_token_accuracy(field)
# return precision
recall = self.history[field]['tokens'][-1] / (self.history[field]['tokens'][-1] + self.history[field]['fn_tokens'] + 1e-12)
return 2 * (precision * recall) / (precision + recall + 1e-12)
def compute_sequence_accuracy(self, field):
""""""
return self.history[field]['sequences'][-1] / self.history['total']['n_sequences']
#=============================================================
def get_current_accuracy(self):
""""""
token_accuracy = 0
for field in self.history:
if field in self.evals:
if field.startswith('sem'):
token_accuracy += np.log(self.compute_token_F1(field)+1e-12)
elif field == 'attribute':
token_accuracy += np.log(self.compute_token_F1(field)+1e-12)
elif field == 'label':
token_accuracy += np.log(self.compute_node_accuracy(field)+1e-12)
else:
token_accuracy += np.log(self.compute_token_accuracy(field)+1e-12)
token_accuracy /= len(self.evals)
return np.exp(token_accuracy) * 100
#=============================================================
def get_current_geometric_accuracy(self):
""""""
token_accuracy = 0
for field in self.history:
if field in self.evals:
if field.startswith('sem'):
token_accuracy += np.log(self.compute_token_F1(field)+1e-12)
elif field == 'attribute':
token_accuracy += np.log(self.compute_token_F1(field)+1e-12)
elif field == 'label':
token_accuracy += np.log(self.compute_node_accuracy(field)+1e-12)
else:
token_accuracy += np.log(self.compute_token_accuracy(field)+1e-12)
token_accuracy /= len(self.evals)
return np.exp(token_accuracy) * 100
#=============================================================
def restart_timer(self):
""""""
self.time = time.time()
return
#=============================================================
def update_history(self, outputs):
""""""
self.history['total']['total_time'] += time.time() - self.time
self.time = None
self.history['total']['n_batches'] += 1
self.history['total']['n_tokens'] += outputs['total']['n_tokens']
self.history['total']['n_sequences'] += outputs['total']['n_sequences']
for field, output in six.iteritems(outputs):
#here is how calculate the semrel ...
#So semhead is unlabeled loss, semgraph is total loss? semrel is the labeled loss
if field == 'semgraph':
if self._factored_semgraph:
self.history['semrel']['loss'][-1] += output['label_loss']
self.history['semrel']['tokens'][-1] += output['n_correct_label_tokens']
self.history['semrel']['n_edges'] += output['n_true_positives'] + output['n_false_negatives']
self.history['semrel']['sequences'][-1] += output['n_correct_label_sequences']
self.history['semhead']['loss'][-1] += output['unlabeled_loss']
self.history['semhead']['tokens'][-1] += output['n_unlabeled_true_positives']
self.history['semhead']['fp_tokens'] += output['n_unlabeled_false_positives']
self.history['semhead']['fn_tokens'] += output['n_unlabeled_false_negatives']
self.history['semhead']['sequences'][-1] += output['n_correct_unlabeled_sequences']
self.history['semgraph']['loss'][-1] += output['loss']
self.history['semgraph']['tokens'][-1] += output['n_true_positives']
self.history['semgraph']['fp_tokens'] += output['n_false_positives']
self.history['semgraph']['fn_tokens'] += output['n_false_negatives']
self.history['semgraph']['sequences'][-1] += output['n_correct_sequences']
elif field == 'attribute':
#pdb.set_trace()
self.history[field]['loss'][-1] += output['attribute_loss']
self.history[field]['tokens'][-1] += output['n_true_positives']
self.history[field]['fp_tokens'] += output['n_false_positives']
self.history[field]['fn_tokens'] += output['n_false_negatives']
self.history[field]['sequences'][-1] += output['n_correct_sequences']
elif field == 'deptree':
if self._factored_deptree:
self.history['deprel']['loss'][-1] += output['label_loss']
self.history['deprel']['tokens'][-1] += output['n_correct_label_tokens']
self.history['deprel']['sequences'][-1] += output['n_correct_label_sequences']
self.history['dephead']['loss'][-1] += output['unlabeled_loss']
self.history['dephead']['tokens'][-1] += output['n_correct_unlabeled_tokens']
self.history['dephead']['sequences'][-1] += output['n_correct_unlabeled_sequences']
self.history['deptree']['loss'][-1] += output['loss']
self.history['deptree']['tokens'][-1] += output['n_correct_tokens']
self.history['deptree']['sequences'][-1] += output['n_correct_sequences']
elif field == 'label':
self.history[field]['loss'][-1] += output['loss']
self.history[field]['tokens'][-1] += output['n_correct_tokens']
self.history[field]['sequences'][-1] += output['n_correct_sequences']
self.history[field]['n_tokens'][-1] += output['n_tokens']
elif field != 'total':
# pdb.set_trace()
self.history[field]['loss'][-1] += output['loss']
self.history[field]['tokens'][-1] += output['n_correct_tokens']
self.history[field]['sequences'][-1] += output['n_correct_sequences']
return
#=============================================================
def print_recent_history(self, stdscr=None, dataprint=False):
""""""
n_batches = self.history['total']['n_batches']
n_tokens = self.history['total']['n_tokens']
n_sequences = self.history['total']['n_sequences']
total_time = self.history['total']['total_time']
self.history['total']['n_batches'] = 0
self.history['total']['n_tokens'] = 0
self.history['total']['n_sequences'] = 0
self.history['total']['total_time'] = 0
#-----------------------------------------------------------
if stdscr is not None:
stdscr.addstr('{:5}\n'.format(self.dataset.title()), curses.color_pair(1) | curses.A_BOLD)
stdscr.clrtoeol()
elif dataprint:
pass
else:
print('{:5}\n'.format(self.dataset.title()), end='')
#semhead semgraph semrel
for field, string in self._print_mapping:
if field in self.history:
tokens = self.history[field]['tokens'][-1]
if field in ('semgraph', 'semhead'):
tp = self.history[field]['tokens'][-1]
self.history[field]['tokens'][-1] = self.compute_token_F1(field) * 100
elif field == 'attribute':
tp = self.history[field]['tokens'][-1]
self.history[field]['tokens'][-1] = self.compute_token_F1(field) * 100
elif field == 'label':
tp = self.history[field]['tokens'][-1]
self.history[field]['tokens'][-1] = self.compute_node_accuracy(field) * 100
elif field == 'semrel':
n_edges = self.history[field]['n_edges']
self.history[field]['tokens'][-1] *= 100 / n_edges
self.history[field]['n_edges'] = 0
else:
self.history[field]['tokens'][-1] *= 100 / n_tokens
self.history[field]['loss'][-1] /= n_batches
self.history[field]['sequences'][-1] *= 100 / n_sequences
loss = self.history[field]['loss'][-1]
acc = self.history[field]['tokens'][-1]
acc_seq = self.history[field]['sequences'][-1]
if stdscr is not None:
stdscr.addstr('{:5}'.format(string), curses.color_pair(6) | curses.A_BOLD)
stdscr.addstr(' | ')
stdscr.addstr('Loss: {:.2e}'.format(loss), curses.color_pair(3) | curses.A_BOLD)
stdscr.addstr(' | ')
stdscr.addstr('Acc: {:5.2f}'.format(acc), curses.color_pair(4) | curses.A_BOLD)
stdscr.addstr(' | ')
stdscr.addstr('Seq: {:5.2f}\n'.format(acc_seq), curses.color_pair(4) | curses.A_BOLD)
stdscr.clrtoeol()
elif dataprint:
#print('{:5}'.format(string), end='\t')
print('{:5.2f}'.format(acc), end=' ')
else:
print('{:5}'.format(string), end='')
print(' | ', end='')
print('Loss: {:.2e}'.format(loss), end='')
print(' | ', end='')
print('Acc: {:5.2f}'.format(acc), end='')
print(' | ', end='')
print('Seq: {:5.2f}\n'.format(acc_seq), end='')
for key, value in six.iteritems(self.history[field]):
if hasattr(value, 'append'):
value.append(0)
else:
self.history[field][key] = 0
self.history['speed']['toks/sec'].append(n_tokens / total_time)
self.history['speed']['seqs/sec'].append(n_sequences / total_time)
self.history['speed']['bats/sec'].append(n_batches / total_time)
tps = self.history['speed']['toks/sec'][-1]
sps = self.history['speed']['seqs/sec'][-1]
bps = self.history['speed']['bats/sec'][-1]
if stdscr is not None:
stdscr.clrtoeol()
stdscr.addstr('Speed', curses.color_pair(6) | curses.A_BOLD)
stdscr.addstr(' | ')
stdscr.addstr('Seqs/sec: {:6.1f}'.format(sps), curses.color_pair(5) | curses.A_BOLD)
stdscr.addstr(' | ')
stdscr.addstr('Bats/sec: {:4.2f}\n'.format(bps), curses.color_pair(5) | curses.A_BOLD)
stdscr.clrtoeol()
stdscr.addstr('Count', curses.color_pair(6) | curses.A_BOLD)
stdscr.addstr(' | ')
stdscr.addstr('Toks: {:6d}'.format(n_tokens), curses.color_pair(7) | curses.A_BOLD)
stdscr.addstr(' | ')
stdscr.addstr('Seqs: {:5d}\n'.format(n_sequences), curses.color_pair(7) | curses.A_BOLD)
elif dataprint:
pass
else:
print('Speed', end='')
print(' | ', end='')
print('Seqs/sec: {:6.1f}'.format(sps), end='')
print(' | ', end='')
print('Bats/sec: {:4.2f}\n'.format(bps), end='')
print('Count', end='')
print(' | ', end='')
print('Toks: {:6d}'.format(n_tokens), end='')
print(' | ', end='')
print('Seqs: {:5d}\n'.format(n_sequences), end='')
filename = os.path.join(self.save_dir, '{}.pkl'.format(self.dataset))
with open(filename, 'wb') as f:
pkl.dump(self.history, f, protocol=pkl.HIGHEST_PROTOCOL)
return
#=============================================================
@property
def evals(self):
return self._evals
@property
def accuracies(self):
return dict(self._accuracies)
@property
def probabilities(self):
return dict(self._probabilities)
@property
def loss(self):
return self._loss
@property
def save_dir(self):
return self._config.getstr(self, 'save_dir')
@property
def compare_precision(self):
#pdb.set_trace()
try:
if self._config.getstr(self, 'tb')=='ptb' or self._config.getstr(self, 'tb')=='ctb':
return True
else:
return False
except:
return False
@property
def dataset(self):
return self._dataset
@property
def get_print_dict(self):
evals=self.outputs
printdict={}
if 'semgraph' in evals:
if 'printdata' in evals['semgraph']:
printdict=evals['semgraph']
if 'attribute' in evals:
printdict['attribute']=evals['attribute']
if 'deptree' in evals:
# pdb.set_trace()
if 'printdata' in evals['deptree']:
printdict=evals['deptree']
return printdict
#***************************************************************
class TrainOutputs(GraphOutputs):
_dataset = 'train'
class DevOutputs(GraphOutputs):
_dataset = 'dev'
| [
"wangxy1@shanghaitech.edu.cn"
] | wangxy1@shanghaitech.edu.cn |
ccf31740e2e3925f9c02344a9356aa29e35987da | bbe447a740929eaee1955bd9c1517cf760dd5cb9 | /keygrabber/adwords/adwords_api_python_14.2.1/build/lib.linux-x86_64-2.7/adspygoogle/adwords/zsi/v201101/AdGroupCriterionService_services_types.py | 38d1e18fac7bc8783b52434f5ad41e6b4ac4852e | [
"Apache-2.0"
] | permissive | MujaahidSalie/aranciulla | f3d32e7dd68ecfca620fe4d3bf22ecb4762f5893 | 34197dfbdb01479f288611a0cb700e925c4e56ce | refs/heads/master | 2020-09-07T02:16:25.261598 | 2011-11-01T21:20:46 | 2011-11-01T21:20:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 132,423 | py | ##################################################
# AdGroupCriterionService_services_types.py
# generated by ZSI.generate.wsdl2python
##################################################
import ZSI
import ZSI.TCcompound
from ZSI.schema import LocalElementDeclaration, ElementDeclaration, TypeDefinition, GTD, GED
##############################
# targetNamespace
# https://adwords.google.com/api/adwords/cm/v201101
##############################
class ns0:
targetNamespace = "https://adwords.google.com/api/adwords/cm/v201101"
class AdGroupCriterion_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AdGroupCriterion")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.AdGroupCriterion_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"adGroupId"), aname="_adGroupId", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","CriterionUse",lazy=False)(pname=(ns,"criterionUse"), aname="_criterionUse", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","Criterion",lazy=False)(pname=(ns,"criterion"), aname="_criterion", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"AdGroupCriterion.Type"), aname="_AdGroupCriterion_Type", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._adGroupId = None
self._criterionUse = None
self._criterion = None
self._AdGroupCriterion_Type = None
return
Holder.__name__ = "AdGroupCriterion_Holder"
self.pyclass = Holder
class AdGroupCriterionBids_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AdGroupCriterionBids")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.AdGroupCriterionBids_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"AdGroupCriterionBids.Type"), aname="_AdGroupCriterionBids_Type", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._AdGroupCriterionBids_Type = None
return
Holder.__name__ = "AdGroupCriterionBids_Holder"
self.pyclass = Holder
class AdGroupCriterionError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AdGroupCriterionError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.AdGroupCriterionError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","AdGroupCriterionError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.AdGroupCriterionError_Def.__bases__:
bases = list(ns0.AdGroupCriterionError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.AdGroupCriterionError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class AdGroupCriterionExperimentBidMultiplier_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AdGroupCriterionExperimentBidMultiplier")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.AdGroupCriterionExperimentBidMultiplier_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"AdGroupCriterionExperimentBidMultiplier.Type"), aname="_AdGroupCriterionExperimentBidMultiplier_Type", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._AdGroupCriterionExperimentBidMultiplier_Type = None
return
Holder.__name__ = "AdGroupCriterionExperimentBidMultiplier_Holder"
self.pyclass = Holder
class AdGroupCriterionLimitExceeded_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AdGroupCriterionLimitExceeded")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.AdGroupCriterionLimitExceeded_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","AdGroupCriterionLimitExceeded.CriteriaLimitType",lazy=False)(pname=(ns,"limitType"), aname="_limitType", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.EntityCountLimitExceeded_Def not in ns0.AdGroupCriterionLimitExceeded_Def.__bases__:
bases = list(ns0.AdGroupCriterionLimitExceeded_Def.__bases__)
bases.insert(0, ns0.EntityCountLimitExceeded_Def)
ns0.AdGroupCriterionLimitExceeded_Def.__bases__ = tuple(bases)
ns0.EntityCountLimitExceeded_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class AdGroupCriterionOperation_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AdGroupCriterionOperation")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.AdGroupCriterionOperation_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","AdGroupCriterion",lazy=False)(pname=(ns,"operand"), aname="_operand", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","ExemptionRequest",lazy=False)(pname=(ns,"exemptionRequests"), aname="_exemptionRequests", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.Operation_Def not in ns0.AdGroupCriterionOperation_Def.__bases__:
bases = list(ns0.AdGroupCriterionOperation_Def.__bases__)
bases.insert(0, ns0.Operation_Def)
ns0.AdGroupCriterionOperation_Def.__bases__ = tuple(bases)
ns0.Operation_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class AdGroupCriterionPage_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AdGroupCriterionPage")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.AdGroupCriterionPage_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","AdGroupCriterion",lazy=False)(pname=(ns,"entries"), aname="_entries", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.Page_Def not in ns0.AdGroupCriterionPage_Def.__bases__:
bases = list(ns0.AdGroupCriterionPage_Def.__bases__)
bases.insert(0, ns0.Page_Def)
ns0.AdGroupCriterionPage_Def.__bases__ = tuple(bases)
ns0.Page_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class AdGroupCriterionReturnValue_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AdGroupCriterionReturnValue")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.AdGroupCriterionReturnValue_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","AdGroupCriterion",lazy=False)(pname=(ns,"value"), aname="_value", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","ApiError",lazy=False)(pname=(ns,"partialFailureErrors"), aname="_partialFailureErrors", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ListReturnValue_Def not in ns0.AdGroupCriterionReturnValue_Def.__bases__:
bases = list(ns0.AdGroupCriterionReturnValue_Def.__bases__)
bases.insert(0, ns0.ListReturnValue_Def)
ns0.AdGroupCriterionReturnValue_Def.__bases__ = tuple(bases)
ns0.ListReturnValue_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ApiError_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ApiError")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.ApiError_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"fieldPath"), aname="_fieldPath", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"trigger"), aname="_trigger", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"errorString"), aname="_errorString", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"ApiError.Type"), aname="_ApiError_Type", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._fieldPath = None
self._trigger = None
self._errorString = None
self._ApiError_Type = None
return
Holder.__name__ = "ApiError_Holder"
self.pyclass = Holder
class ApiException_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ApiException")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.ApiException_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","ApiError",lazy=False)(pname=(ns,"errors"), aname="_errors", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApplicationException_Def not in ns0.ApiException_Def.__bases__:
bases = list(ns0.ApiException_Def.__bases__)
bases.insert(0, ns0.ApplicationException_Def)
ns0.ApiException_Def.__bases__ = tuple(bases)
ns0.ApplicationException_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ApplicationException_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ApplicationException")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.ApplicationException_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"message"), aname="_message", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"ApplicationException.Type"), aname="_ApplicationException_Type", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._message = None
self._ApplicationException_Type = None
return
Holder.__name__ = "ApplicationException_Holder"
self.pyclass = Holder
class AuthenticationError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AuthenticationError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.AuthenticationError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","AuthenticationError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.AuthenticationError_Def.__bases__:
bases = list(ns0.AuthenticationError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.AuthenticationError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class AuthorizationError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AuthorizationError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.AuthorizationError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","AuthorizationError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.AuthorizationError_Def.__bases__:
bases = list(ns0.AuthorizationError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.AuthorizationError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class Bid_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Bid")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.Bid_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","Money",lazy=False)(pname=(ns,"amount"), aname="_amount", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._amount = None
return
Holder.__name__ = "Bid_Holder"
self.pyclass = Holder
class BidMultiplier_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "BidMultiplier")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.BidMultiplier_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"multiplier"), aname="_multiplier", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","Bid",lazy=False)(pname=(ns,"multipliedBid"), aname="_multipliedBid", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._multiplier = None
self._multipliedBid = None
return
Holder.__name__ = "BidMultiplier_Holder"
self.pyclass = Holder
class BiddableAdGroupCriterion_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "BiddableAdGroupCriterion")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.BiddableAdGroupCriterion_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","UserStatus",lazy=False)(pname=(ns,"userStatus"), aname="_userStatus", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","SystemServingStatus",lazy=False)(pname=(ns,"systemServingStatus"), aname="_systemServingStatus", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","ApprovalStatus",lazy=False)(pname=(ns,"approvalStatus"), aname="_approvalStatus", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"destinationUrl"), aname="_destinationUrl", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","AdGroupCriterionBids",lazy=False)(pname=(ns,"bids"), aname="_bids", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","BiddableAdGroupCriterionExperimentData",lazy=False)(pname=(ns,"experimentData"), aname="_experimentData", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","Bid",lazy=False)(pname=(ns,"firstPageCpc"), aname="_firstPageCpc", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","QualityInfo",lazy=False)(pname=(ns,"qualityInfo"), aname="_qualityInfo", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","Stats",lazy=False)(pname=(ns,"stats"), aname="_stats", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.AdGroupCriterion_Def not in ns0.BiddableAdGroupCriterion_Def.__bases__:
bases = list(ns0.BiddableAdGroupCriterion_Def.__bases__)
bases.insert(0, ns0.AdGroupCriterion_Def)
ns0.BiddableAdGroupCriterion_Def.__bases__ = tuple(bases)
ns0.AdGroupCriterion_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class BiddableAdGroupCriterionExperimentData_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "BiddableAdGroupCriterionExperimentData")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.BiddableAdGroupCriterionExperimentData_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"experimentId"), aname="_experimentId", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","ExperimentDeltaStatus",lazy=False)(pname=(ns,"experimentDeltaStatus"), aname="_experimentDeltaStatus", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","ExperimentDataStatus",lazy=False)(pname=(ns,"experimentDataStatus"), aname="_experimentDataStatus", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","AdGroupCriterionExperimentBidMultiplier",lazy=False)(pname=(ns,"experimentBidMultiplier"), aname="_experimentBidMultiplier", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._experimentId = None
self._experimentDeltaStatus = None
self._experimentDataStatus = None
self._experimentBidMultiplier = None
return
Holder.__name__ = "BiddableAdGroupCriterionExperimentData_Holder"
self.pyclass = Holder
class BiddingError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "BiddingError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.BiddingError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","BiddingError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.BiddingError_Def.__bases__:
bases = list(ns0.BiddingError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.BiddingError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class BudgetOptimizerAdGroupCriterionBids_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "BudgetOptimizerAdGroupCriterionBids")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.BudgetOptimizerAdGroupCriterionBids_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","Bid",lazy=False)(pname=(ns,"proxyBid"), aname="_proxyBid", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"enhancedCpcEnabled"), aname="_enhancedCpcEnabled", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.AdGroupCriterionBids_Def not in ns0.BudgetOptimizerAdGroupCriterionBids_Def.__bases__:
bases = list(ns0.BudgetOptimizerAdGroupCriterionBids_Def.__bases__)
bases.insert(0, ns0.AdGroupCriterionBids_Def)
ns0.BudgetOptimizerAdGroupCriterionBids_Def.__bases__ = tuple(bases)
ns0.AdGroupCriterionBids_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ClientTermsError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ClientTermsError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.ClientTermsError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","ClientTermsError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.ClientTermsError_Def.__bases__:
bases = list(ns0.ClientTermsError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.ClientTermsError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ComparableValue_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ComparableValue")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.ComparableValue_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"ComparableValue.Type"), aname="_ComparableValue_Type", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._ComparableValue_Type = None
return
Holder.__name__ = "ComparableValue_Holder"
self.pyclass = Holder
class ConversionOptimizerAdGroupCriterionBids_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ConversionOptimizerAdGroupCriterionBids")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.ConversionOptimizerAdGroupCriterionBids_Def.schema
TClist = []
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.AdGroupCriterionBids_Def not in ns0.ConversionOptimizerAdGroupCriterionBids_Def.__bases__:
bases = list(ns0.ConversionOptimizerAdGroupCriterionBids_Def.__bases__)
bases.insert(0, ns0.AdGroupCriterionBids_Def)
ns0.ConversionOptimizerAdGroupCriterionBids_Def.__bases__ = tuple(bases)
ns0.AdGroupCriterionBids_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class Criterion_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Criterion")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.Criterion_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"id"), aname="_id", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"Criterion.Type"), aname="_Criterion_Type", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._id = None
self._Criterion_Type = None
return
Holder.__name__ = "Criterion_Holder"
self.pyclass = Holder
class CriterionError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "CriterionError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.CriterionError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","CriterionError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.CriterionError_Def.__bases__:
bases = list(ns0.CriterionError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.CriterionError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class CriterionPolicyError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "CriterionPolicyError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.CriterionPolicyError_Def.schema
TClist = []
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.PolicyViolationError_Def not in ns0.CriterionPolicyError_Def.__bases__:
bases = list(ns0.CriterionPolicyError_Def.__bases__)
bases.insert(0, ns0.PolicyViolationError_Def)
ns0.CriterionPolicyError_Def.__bases__ = tuple(bases)
ns0.PolicyViolationError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class DatabaseError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "DatabaseError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.DatabaseError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","DatabaseError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.DatabaseError_Def.__bases__:
bases = list(ns0.DatabaseError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.DatabaseError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class DateError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "DateError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.DateError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","DateError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.DateError_Def.__bases__:
bases = list(ns0.DateError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.DateError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class DateRange_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "DateRange")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.DateRange_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"min"), aname="_min", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"max"), aname="_max", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._min = None
self._max = None
return
Holder.__name__ = "DateRange_Holder"
self.pyclass = Holder
class DistinctError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "DistinctError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.DistinctError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","DistinctError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.DistinctError_Def.__bases__:
bases = list(ns0.DistinctError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.DistinctError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class DoubleValue_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "DoubleValue")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.DoubleValue_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"number"), aname="_number", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.NumberValue_Def not in ns0.DoubleValue_Def.__bases__:
bases = list(ns0.DoubleValue_Def.__bases__)
bases.insert(0, ns0.NumberValue_Def)
ns0.DoubleValue_Def.__bases__ = tuple(bases)
ns0.NumberValue_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class EntityAccessDenied_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "EntityAccessDenied")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.EntityAccessDenied_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","EntityAccessDenied.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.EntityAccessDenied_Def.__bases__:
bases = list(ns0.EntityAccessDenied_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.EntityAccessDenied_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class EntityCountLimitExceeded_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "EntityCountLimitExceeded")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.EntityCountLimitExceeded_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","EntityCountLimitExceeded.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"enclosingId"), aname="_enclosingId", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"limit"), aname="_limit", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.EntityCountLimitExceeded_Def.__bases__:
bases = list(ns0.EntityCountLimitExceeded_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.EntityCountLimitExceeded_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class EntityNotFound_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "EntityNotFound")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.EntityNotFound_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","EntityNotFound.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.EntityNotFound_Def.__bases__:
bases = list(ns0.EntityNotFound_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.EntityNotFound_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ExemptionRequest_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ExemptionRequest")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.ExemptionRequest_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","PolicyViolationKey",lazy=False)(pname=(ns,"key"), aname="_key", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._key = None
return
Holder.__name__ = "ExemptionRequest_Holder"
self.pyclass = Holder
class InternalApiError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "InternalApiError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.InternalApiError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","InternalApiError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.InternalApiError_Def.__bases__:
bases = list(ns0.InternalApiError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.InternalApiError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class Keyword_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Keyword")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.Keyword_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"text"), aname="_text", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","KeywordMatchType",lazy=False)(pname=(ns,"matchType"), aname="_matchType", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.Criterion_Def not in ns0.Keyword_Def.__bases__:
bases = list(ns0.Keyword_Def.__bases__)
bases.insert(0, ns0.Criterion_Def)
ns0.Keyword_Def.__bases__ = tuple(bases)
ns0.Criterion_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ListReturnValue_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ListReturnValue")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.ListReturnValue_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"ListReturnValue.Type"), aname="_ListReturnValue_Type", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._ListReturnValue_Type = None
return
Holder.__name__ = "ListReturnValue_Holder"
self.pyclass = Holder
class LongValue_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "LongValue")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.LongValue_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"number"), aname="_number", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.NumberValue_Def not in ns0.LongValue_Def.__bases__:
bases = list(ns0.LongValue_Def.__bases__)
bases.insert(0, ns0.NumberValue_Def)
ns0.LongValue_Def.__bases__ = tuple(bases)
ns0.NumberValue_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ManualCPCAdGroupCriterionBids_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ManualCPCAdGroupCriterionBids")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.ManualCPCAdGroupCriterionBids_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","Bid",lazy=False)(pname=(ns,"maxCpc"), aname="_maxCpc", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","BidSource",lazy=False)(pname=(ns,"bidSource"), aname="_bidSource", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","PositionPreferenceAdGroupCriterionBids",lazy=False)(pname=(ns,"positionPreferenceBids"), aname="_positionPreferenceBids", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"enhancedCpcEnabled"), aname="_enhancedCpcEnabled", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.AdGroupCriterionBids_Def not in ns0.ManualCPCAdGroupCriterionBids_Def.__bases__:
bases = list(ns0.ManualCPCAdGroupCriterionBids_Def.__bases__)
bases.insert(0, ns0.AdGroupCriterionBids_Def)
ns0.ManualCPCAdGroupCriterionBids_Def.__bases__ = tuple(bases)
ns0.AdGroupCriterionBids_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ManualCPCAdGroupCriterionExperimentBidMultiplier_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ManualCPCAdGroupCriterionExperimentBidMultiplier")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.ManualCPCAdGroupCriterionExperimentBidMultiplier_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","BidMultiplier",lazy=False)(pname=(ns,"maxCpcMultiplier"), aname="_maxCpcMultiplier", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","MultiplierSource",lazy=False)(pname=(ns,"multiplierSource"), aname="_multiplierSource", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.AdGroupCriterionExperimentBidMultiplier_Def not in ns0.ManualCPCAdGroupCriterionExperimentBidMultiplier_Def.__bases__:
bases = list(ns0.ManualCPCAdGroupCriterionExperimentBidMultiplier_Def.__bases__)
bases.insert(0, ns0.AdGroupCriterionExperimentBidMultiplier_Def)
ns0.ManualCPCAdGroupCriterionExperimentBidMultiplier_Def.__bases__ = tuple(bases)
ns0.AdGroupCriterionExperimentBidMultiplier_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ManualCPMAdGroupCriterionBids_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ManualCPMAdGroupCriterionBids")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.ManualCPMAdGroupCriterionBids_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","Bid",lazy=False)(pname=(ns,"maxCpm"), aname="_maxCpm", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","BidSource",lazy=False)(pname=(ns,"bidSource"), aname="_bidSource", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.AdGroupCriterionBids_Def not in ns0.ManualCPMAdGroupCriterionBids_Def.__bases__:
bases = list(ns0.ManualCPMAdGroupCriterionBids_Def.__bases__)
bases.insert(0, ns0.AdGroupCriterionBids_Def)
ns0.ManualCPMAdGroupCriterionBids_Def.__bases__ = tuple(bases)
ns0.AdGroupCriterionBids_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class Money_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Money")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.Money_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"microAmount"), aname="_microAmount", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ComparableValue_Def not in ns0.Money_Def.__bases__:
bases = list(ns0.Money_Def.__bases__)
bases.insert(0, ns0.ComparableValue_Def)
ns0.Money_Def.__bases__ = tuple(bases)
ns0.ComparableValue_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class NegativeAdGroupCriterion_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "NegativeAdGroupCriterion")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.NegativeAdGroupCriterion_Def.schema
TClist = []
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.AdGroupCriterion_Def not in ns0.NegativeAdGroupCriterion_Def.__bases__:
bases = list(ns0.NegativeAdGroupCriterion_Def.__bases__)
bases.insert(0, ns0.AdGroupCriterion_Def)
ns0.NegativeAdGroupCriterion_Def.__bases__ = tuple(bases)
ns0.AdGroupCriterion_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class NewEntityCreationError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "NewEntityCreationError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.NewEntityCreationError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","NewEntityCreationError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.NewEntityCreationError_Def.__bases__:
bases = list(ns0.NewEntityCreationError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.NewEntityCreationError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class NotEmptyError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "NotEmptyError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.NotEmptyError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","NotEmptyError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.NotEmptyError_Def.__bases__:
bases = list(ns0.NotEmptyError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.NotEmptyError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class NotWhitelistedError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "NotWhitelistedError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.NotWhitelistedError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","NotWhitelistedError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.NotWhitelistedError_Def.__bases__:
bases = list(ns0.NotWhitelistedError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.NotWhitelistedError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class NullError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "NullError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.NullError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","NullError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.NullError_Def.__bases__:
bases = list(ns0.NullError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.NullError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class NumberValue_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "NumberValue")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.NumberValue_Def.schema
TClist = []
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ComparableValue_Def not in ns0.NumberValue_Def.__bases__:
bases = list(ns0.NumberValue_Def.__bases__)
bases.insert(0, ns0.ComparableValue_Def)
ns0.NumberValue_Def.__bases__ = tuple(bases)
ns0.ComparableValue_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class Operation_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Operation")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.Operation_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","Operator",lazy=False)(pname=(ns,"operator"), aname="_operator", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"Operation.Type"), aname="_Operation_Type", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._operator = None
self._Operation_Type = None
return
Holder.__name__ = "Operation_Holder"
self.pyclass = Holder
class OperationAccessDenied_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "OperationAccessDenied")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.OperationAccessDenied_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","OperationAccessDenied.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.OperationAccessDenied_Def.__bases__:
bases = list(ns0.OperationAccessDenied_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.OperationAccessDenied_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class OrderBy_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "OrderBy")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.OrderBy_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"field"), aname="_field", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","SortOrder",lazy=False)(pname=(ns,"sortOrder"), aname="_sortOrder", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._field = None
self._sortOrder = None
return
Holder.__name__ = "OrderBy_Holder"
self.pyclass = Holder
class Page_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Page")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.Page_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"totalNumEntries"), aname="_totalNumEntries", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"Page.Type"), aname="_Page_Type", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._totalNumEntries = None
self._Page_Type = None
return
Holder.__name__ = "Page_Holder"
self.pyclass = Holder
class Paging_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Paging")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.Paging_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"startIndex"), aname="_startIndex", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"numberResults"), aname="_numberResults", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._startIndex = None
self._numberResults = None
return
Holder.__name__ = "Paging_Holder"
self.pyclass = Holder
class PagingError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "PagingError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.PagingError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","PagingError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.PagingError_Def.__bases__:
bases = list(ns0.PagingError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.PagingError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class PercentCPAAdGroupCriterionBids_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "PercentCPAAdGroupCriterionBids")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.PercentCPAAdGroupCriterionBids_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"percentCpa"), aname="_percentCpa", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","BidSource",lazy=False)(pname=(ns,"source"), aname="_source", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.AdGroupCriterionBids_Def not in ns0.PercentCPAAdGroupCriterionBids_Def.__bases__:
bases = list(ns0.PercentCPAAdGroupCriterionBids_Def.__bases__)
bases.insert(0, ns0.AdGroupCriterionBids_Def)
ns0.PercentCPAAdGroupCriterionBids_Def.__bases__ = tuple(bases)
ns0.AdGroupCriterionBids_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class Placement_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Placement")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.Placement_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"url"), aname="_url", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.Criterion_Def not in ns0.Placement_Def.__bases__:
bases = list(ns0.Placement_Def.__bases__)
bases.insert(0, ns0.Criterion_Def)
ns0.Placement_Def.__bases__ = tuple(bases)
ns0.Criterion_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class PolicyViolationError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "PolicyViolationError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.PolicyViolationError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","PolicyViolationKey",lazy=False)(pname=(ns,"key"), aname="_key", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"externalPolicyName"), aname="_externalPolicyName", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"externalPolicyUrl"), aname="_externalPolicyUrl", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"externalPolicyDescription"), aname="_externalPolicyDescription", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"isExemptable"), aname="_isExemptable", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","PolicyViolationError.Part",lazy=False)(pname=(ns,"violatingParts"), aname="_violatingParts", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.PolicyViolationError_Def.__bases__:
bases = list(ns0.PolicyViolationError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.PolicyViolationError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class PolicyViolationError_Part_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "PolicyViolationError.Part")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.PolicyViolationError_Part_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"index"), aname="_index", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"length"), aname="_length", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._index = None
self._length = None
return
Holder.__name__ = "PolicyViolationError.Part_Holder"
self.pyclass = Holder
class PolicyViolationKey_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "PolicyViolationKey")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.PolicyViolationKey_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"policyName"), aname="_policyName", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"violatingText"), aname="_violatingText", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._policyName = None
self._violatingText = None
return
Holder.__name__ = "PolicyViolationKey_Holder"
self.pyclass = Holder
class PositionPreferenceAdGroupCriterionBids_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "PositionPreferenceAdGroupCriterionBids")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.PositionPreferenceAdGroupCriterionBids_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","Bid",lazy=False)(pname=(ns,"proxyMaxCpc"), aname="_proxyMaxCpc", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"preferredPosition"), aname="_preferredPosition", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"bottomPosition"), aname="_bottomPosition", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._proxyMaxCpc = None
self._preferredPosition = None
self._bottomPosition = None
return
Holder.__name__ = "PositionPreferenceAdGroupCriterionBids_Holder"
self.pyclass = Holder
class Predicate_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Predicate")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.Predicate_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"field"), aname="_field", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","Predicate.Operator",lazy=False)(pname=(ns,"operator"), aname="_operator", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"values"), aname="_values", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._field = None
self._operator = None
self._values = []
return
Holder.__name__ = "Predicate_Holder"
self.pyclass = Holder
class Product_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Product")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.Product_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","ProductCondition",lazy=False)(pname=(ns,"conditions"), aname="_conditions", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"text"), aname="_text", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.Criterion_Def not in ns0.Product_Def.__bases__:
bases = list(ns0.Product_Def.__bases__)
bases.insert(0, ns0.Criterion_Def)
ns0.Product_Def.__bases__ = tuple(bases)
ns0.Criterion_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ProductCondition_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ProductCondition")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.ProductCondition_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"argument"), aname="_argument", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","ProductConditionOperand",lazy=False)(pname=(ns,"operand"), aname="_operand", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._argument = None
self._operand = None
return
Holder.__name__ = "ProductCondition_Holder"
self.pyclass = Holder
class ProductConditionOperand_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ProductConditionOperand")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.ProductConditionOperand_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"operand"), aname="_operand", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._operand = None
return
Holder.__name__ = "ProductConditionOperand_Holder"
self.pyclass = Holder
class QualityInfo_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "QualityInfo")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.QualityInfo_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"isKeywordAdRelevanceAcceptable"), aname="_isKeywordAdRelevanceAcceptable", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"isLandingPageQualityAcceptable"), aname="_isLandingPageQualityAcceptable", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"isLandingPageLatencyAcceptable"), aname="_isLandingPageLatencyAcceptable", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"qualityScore"), aname="_qualityScore", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._isKeywordAdRelevanceAcceptable = None
self._isLandingPageQualityAcceptable = None
self._isLandingPageLatencyAcceptable = None
self._qualityScore = None
return
Holder.__name__ = "QualityInfo_Holder"
self.pyclass = Holder
class QuotaCheckError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "QuotaCheckError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.QuotaCheckError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","QuotaCheckError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.QuotaCheckError_Def.__bases__:
bases = list(ns0.QuotaCheckError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.QuotaCheckError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class RangeError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "RangeError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.RangeError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","RangeError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.RangeError_Def.__bases__:
bases = list(ns0.RangeError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.RangeError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class RateExceededError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "RateExceededError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.RateExceededError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","RateExceededError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"rateName"), aname="_rateName", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"rateScope"), aname="_rateScope", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"retryAfterSeconds"), aname="_retryAfterSeconds", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.RateExceededError_Def.__bases__:
bases = list(ns0.RateExceededError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.RateExceededError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ReadOnlyError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ReadOnlyError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.ReadOnlyError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","ReadOnlyError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.ReadOnlyError_Def.__bases__:
bases = list(ns0.ReadOnlyError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.ReadOnlyError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class RejectedError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "RejectedError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.RejectedError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","RejectedError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.RejectedError_Def.__bases__:
bases = list(ns0.RejectedError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.RejectedError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class RequestError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "RequestError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.RequestError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","RequestError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.RequestError_Def.__bases__:
bases = list(ns0.RequestError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.RequestError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class RequiredError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "RequiredError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.RequiredError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","RequiredError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.RequiredError_Def.__bases__:
bases = list(ns0.RequiredError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.RequiredError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class Selector_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Selector")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.Selector_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"fields"), aname="_fields", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","Predicate",lazy=False)(pname=(ns,"predicates"), aname="_predicates", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","DateRange",lazy=False)(pname=(ns,"dateRange"), aname="_dateRange", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","OrderBy",lazy=False)(pname=(ns,"ordering"), aname="_ordering", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","Paging",lazy=False)(pname=(ns,"paging"), aname="_paging", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._fields = []
self._predicates = []
self._dateRange = None
self._ordering = []
self._paging = None
return
Holder.__name__ = "Selector_Holder"
self.pyclass = Holder
class SelectorError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "SelectorError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.SelectorError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","SelectorError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.SelectorError_Def.__bases__:
bases = list(ns0.SelectorError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.SelectorError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class SizeLimitError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "SizeLimitError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.SizeLimitError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","SizeLimitError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.SizeLimitError_Def.__bases__:
bases = list(ns0.SizeLimitError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.SizeLimitError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class SoapHeader_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "SoapHeader")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.SoapHeader_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"authToken"), aname="_authToken", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"clientCustomerId"), aname="_clientCustomerId", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"clientEmail"), aname="_clientEmail", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"developerToken"), aname="_developerToken", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"userAgent"), aname="_userAgent", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"validateOnly"), aname="_validateOnly", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"partialFailure"), aname="_partialFailure", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._authToken = None
self._clientCustomerId = None
self._clientEmail = None
self._developerToken = None
self._userAgent = None
self._validateOnly = None
self._partialFailure = None
return
Holder.__name__ = "SoapHeader_Holder"
self.pyclass = Holder
class SoapResponseHeader_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "SoapResponseHeader")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.SoapResponseHeader_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"requestId"), aname="_requestId", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"operations"), aname="_operations", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"responseTime"), aname="_responseTime", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"units"), aname="_units", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._requestId = None
self._operations = None
self._responseTime = None
self._units = None
return
Holder.__name__ = "SoapResponseHeader_Holder"
self.pyclass = Holder
class Stats_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Stats")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.Stats_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"startDate"), aname="_startDate", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"endDate"), aname="_endDate", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","Stats.Network",lazy=False)(pname=(ns,"network"), aname="_network", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"clicks"), aname="_clicks", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"impressions"), aname="_impressions", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","Money",lazy=False)(pname=(ns,"cost"), aname="_cost", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"averagePosition"), aname="_averagePosition", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","Money",lazy=False)(pname=(ns,"averageCpc"), aname="_averageCpc", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","Money",lazy=False)(pname=(ns,"averageCpm"), aname="_averageCpm", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"ctr"), aname="_ctr", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"conversions"), aname="_conversions", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"conversionRate"), aname="_conversionRate", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","Money",lazy=False)(pname=(ns,"costPerConversion"), aname="_costPerConversion", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"conversionsManyPerClick"), aname="_conversionsManyPerClick", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"conversionRateManyPerClick"), aname="_conversionRateManyPerClick", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","Money",lazy=False)(pname=(ns,"costPerConversionManyPerClick"), aname="_costPerConversionManyPerClick", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"viewThroughConversions"), aname="_viewThroughConversions", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"totalConvValue"), aname="_totalConvValue", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"valuePerConv"), aname="_valuePerConv", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"valuePerConvManyPerClick"), aname="_valuePerConvManyPerClick", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"invalidClicks"), aname="_invalidClicks", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"invalidClickRate"), aname="_invalidClickRate", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"Stats.Type"), aname="_Stats_Type", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._startDate = None
self._endDate = None
self._network = None
self._clicks = None
self._impressions = None
self._cost = None
self._averagePosition = None
self._averageCpc = None
self._averageCpm = None
self._ctr = None
self._conversions = None
self._conversionRate = None
self._costPerConversion = None
self._conversionsManyPerClick = None
self._conversionRateManyPerClick = None
self._costPerConversionManyPerClick = None
self._viewThroughConversions = None
self._totalConvValue = None
self._valuePerConv = None
self._valuePerConvManyPerClick = None
self._invalidClicks = None
self._invalidClickRate = None
self._Stats_Type = None
return
Holder.__name__ = "Stats_Holder"
self.pyclass = Holder
class StatsQueryError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "StatsQueryError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.StatsQueryError_Def.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","StatsQueryError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.StatsQueryError_Def.__bases__:
bases = list(ns0.StatsQueryError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.StatsQueryError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class CriterionUserInterest_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "CriterionUserInterest")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.CriterionUserInterest_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"userInterestId"), aname="_userInterestId", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"userInterestName"), aname="_userInterestName", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.Criterion_Def not in ns0.CriterionUserInterest_Def.__bases__:
bases = list(ns0.CriterionUserInterest_Def.__bases__)
bases.insert(0, ns0.Criterion_Def)
ns0.CriterionUserInterest_Def.__bases__ = tuple(bases)
ns0.Criterion_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class CriterionUserList_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "CriterionUserList")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.CriterionUserList_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"userListId"), aname="_userListId", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"userListName"), aname="_userListName", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), GTD("https://adwords.google.com/api/adwords/cm/v201101","CriterionUserList.MembershipStatus",lazy=False)(pname=(ns,"userListMembershipStatus"), aname="_userListMembershipStatus", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.Criterion_Def not in ns0.CriterionUserList_Def.__bases__:
bases = list(ns0.CriterionUserList_Def.__bases__)
bases.insert(0, ns0.Criterion_Def)
ns0.CriterionUserList_Def.__bases__ = tuple(bases)
ns0.Criterion_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class Vertical_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Vertical")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.Vertical_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"path"), aname="_path", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.Criterion_Def not in ns0.Vertical_Def.__bases__:
bases = list(ns0.Vertical_Def.__bases__)
bases.insert(0, ns0.Criterion_Def)
ns0.Vertical_Def.__bases__ = tuple(bases)
ns0.Criterion_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class AdGroupCriterionError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AdGroupCriterionError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class AdGroupCriterionLimitExceeded_CriteriaLimitType_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AdGroupCriterionLimitExceeded.CriteriaLimitType")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class ApprovalStatus_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ApprovalStatus")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class AuthenticationError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AuthenticationError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class AuthorizationError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "AuthorizationError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class BidSource_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "BidSource")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class BiddingError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "BiddingError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class ClientTermsError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ClientTermsError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class CriterionError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "CriterionError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class CriterionUse_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "CriterionUse")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class DatabaseError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "DatabaseError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class DateError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "DateError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class DistinctError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "DistinctError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class EntityAccessDenied_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "EntityAccessDenied.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class EntityCountLimitExceeded_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "EntityCountLimitExceeded.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class EntityNotFound_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "EntityNotFound.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class ExperimentDataStatus_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ExperimentDataStatus")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class ExperimentDeltaStatus_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ExperimentDeltaStatus")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class InternalApiError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "InternalApiError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class KeywordMatchType_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "KeywordMatchType")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class MultiplierSource_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "MultiplierSource")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class NewEntityCreationError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "NewEntityCreationError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class NotEmptyError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "NotEmptyError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class NotWhitelistedError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "NotWhitelistedError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class NullError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "NullError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class OperationAccessDenied_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "OperationAccessDenied.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class Operator_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Operator")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class PagingError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "PagingError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class Predicate_Operator_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Predicate.Operator")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class QuotaCheckError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "QuotaCheckError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class RangeError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "RangeError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class RateExceededError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "RateExceededError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class ReadOnlyError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "ReadOnlyError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class RejectedError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "RejectedError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class RequestError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "RequestError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class RequiredError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "RequiredError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class SelectorError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "SelectorError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class SizeLimitError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "SizeLimitError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class SortOrder_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "SortOrder")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class Stats_Network_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "Stats.Network")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class StatsQueryError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "StatsQueryError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class SystemServingStatus_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "SystemServingStatus")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class CriterionUserList_MembershipStatus_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "CriterionUserList.MembershipStatus")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class UserStatus_Def(ZSI.TC.String, TypeDefinition):
schema = "https://adwords.google.com/api/adwords/cm/v201101"
type = (schema, "UserStatus")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class getAdGroupCriterion_Dec(ZSI.TCcompound.ComplexType, ElementDeclaration):
literal = "getAdGroupCriterion"
schema = "https://adwords.google.com/api/adwords/cm/v201101"
def __init__(self, **kw):
ns = ns0.getAdGroupCriterion_Dec.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","Selector",lazy=False)(pname=(ns,"serviceSelector"), aname="_serviceSelector", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
kw["pname"] = ("https://adwords.google.com/api/adwords/cm/v201101","get")
kw["aname"] = "_get"
self.attribute_typecode_dict = {}
ZSI.TCcompound.ComplexType.__init__(self,None,TClist,inorder=0,**kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._serviceSelector = None
return
Holder.__name__ = "get_Holder"
self.pyclass = Holder
class getAdGroupCriterionResponse_Dec(ZSI.TCcompound.ComplexType, ElementDeclaration):
literal = "getAdGroupCriterionResponse"
schema = "https://adwords.google.com/api/adwords/cm/v201101"
def __init__(self, **kw):
ns = ns0.getAdGroupCriterionResponse_Dec.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","AdGroupCriterionPage",lazy=False)(pname=(ns,"rval"), aname="_rval", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
kw["pname"] = ("https://adwords.google.com/api/adwords/cm/v201101","getResponse")
kw["aname"] = "_getResponse"
self.attribute_typecode_dict = {}
ZSI.TCcompound.ComplexType.__init__(self,None,TClist,inorder=0,**kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._rval = None
return
Holder.__name__ = "getAdGroupCriterionResponse_Holder"
self.pyclass = Holder
class ApiExceptionFault_Dec(ElementDeclaration):
literal = "ApiExceptionFault"
schema = "https://adwords.google.com/api/adwords/cm/v201101"
def __init__(self, **kw):
kw["pname"] = ("https://adwords.google.com/api/adwords/cm/v201101","ApiExceptionFault")
kw["aname"] = "_ApiExceptionFault"
if ns0.ApiException_Def not in ns0.ApiExceptionFault_Dec.__bases__:
bases = list(ns0.ApiExceptionFault_Dec.__bases__)
bases.insert(0, ns0.ApiException_Def)
ns0.ApiExceptionFault_Dec.__bases__ = tuple(bases)
ns0.ApiException_Def.__init__(self, **kw)
if self.pyclass is not None: self.pyclass.__name__ = "ApiExceptionFault_Dec_Holder"
class mutateAdGroupCriterion_Dec(ZSI.TCcompound.ComplexType, ElementDeclaration):
literal = "mutateAdGroupCriterion"
schema = "https://adwords.google.com/api/adwords/cm/v201101"
def __init__(self, **kw):
ns = ns0.mutateAdGroupCriterion_Dec.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","AdGroupCriterionOperation",lazy=False)(pname=(ns,"operations"), aname="_operations", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded"))]
kw["pname"] = ("https://adwords.google.com/api/adwords/cm/v201101","mutate")
kw["aname"] = "_mutate"
self.attribute_typecode_dict = {}
ZSI.TCcompound.ComplexType.__init__(self,None,TClist,inorder=0,**kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._operations = []
return
Holder.__name__ = "mutate_Holder"
self.pyclass = Holder
class mutateAdGroupCriterionResponse_Dec(ZSI.TCcompound.ComplexType, ElementDeclaration):
literal = "mutateAdGroupCriterionResponse"
schema = "https://adwords.google.com/api/adwords/cm/v201101"
def __init__(self, **kw):
ns = ns0.mutateAdGroupCriterionResponse_Dec.schema
TClist = [GTD("https://adwords.google.com/api/adwords/cm/v201101","AdGroupCriterionReturnValue",lazy=False)(pname=(ns,"rval"), aname="_rval", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
kw["pname"] = ("https://adwords.google.com/api/adwords/cm/v201101","mutateResponse")
kw["aname"] = "_mutateResponse"
self.attribute_typecode_dict = {}
ZSI.TCcompound.ComplexType.__init__(self,None,TClist,inorder=0,**kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._rval = None
return
Holder.__name__ = "mutateAdGroupCriterionResponse_Holder"
self.pyclass = Holder
class RequestHeader_Dec(ElementDeclaration):
literal = "RequestHeader"
schema = "https://adwords.google.com/api/adwords/cm/v201101"
def __init__(self, **kw):
kw["pname"] = ("https://adwords.google.com/api/adwords/cm/v201101","RequestHeader")
kw["aname"] = "_RequestHeader"
if ns0.SoapHeader_Def not in ns0.RequestHeader_Dec.__bases__:
bases = list(ns0.RequestHeader_Dec.__bases__)
bases.insert(0, ns0.SoapHeader_Def)
ns0.RequestHeader_Dec.__bases__ = tuple(bases)
ns0.SoapHeader_Def.__init__(self, **kw)
if self.pyclass is not None: self.pyclass.__name__ = "RequestHeader_Dec_Holder"
class ResponseHeader_Dec(ElementDeclaration):
literal = "ResponseHeader"
schema = "https://adwords.google.com/api/adwords/cm/v201101"
def __init__(self, **kw):
kw["pname"] = ("https://adwords.google.com/api/adwords/cm/v201101","ResponseHeader")
kw["aname"] = "_ResponseHeader"
if ns0.SoapResponseHeader_Def not in ns0.ResponseHeader_Dec.__bases__:
bases = list(ns0.ResponseHeader_Dec.__bases__)
bases.insert(0, ns0.SoapResponseHeader_Def)
ns0.ResponseHeader_Dec.__bases__ = tuple(bases)
ns0.SoapResponseHeader_Def.__init__(self, **kw)
if self.pyclass is not None: self.pyclass.__name__ = "ResponseHeader_Dec_Holder"
# end class ns0 (tns: https://adwords.google.com/api/adwords/cm/v201101)
| [
"vincenzo.ampolo@gmail.com"
] | vincenzo.ampolo@gmail.com |
61b0ab1ae3ca11fcf2581a67933b5271749e9756 | ef0f2c4557db4388f6ddf46c94abb0ef9c78813d | /main/uppath.py | 725351ab623335d2d4ee859971f2f16eed7c820a | [] | no_license | baopingli/HAttMatting | c3382ccbdcb77419cdfc615eb7380a5388c09419 | 851aba6d1acbcf41749508b8c9cb7d007e4837c2 | refs/heads/main | 2023-02-18T04:10:57.184518 | 2021-01-11T02:51:20 | 2021-01-11T02:51:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,060 | py | import math
import cv2
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
import pytorch_lightning as pl
from .conv_batchnorm_relu import ConvBatchNormRelu
class UpPath(pl.LightningModule):
def __init__(self, *args, **kwargs):
super(UpPath, self).__init__()
self.conv = ConvBatchNormRelu(*args, **kwargs)
self.unpool = nn.MaxUnpool2d(kernel_size=(2, 2), stride=(2, 2))
def forward(self, x, after_pool_feature, indices, output_size, return_conv_result=False):
# print("--------------------------------")
# print(x.shape)
# print(after_pool_feature.shape)
# print(indices.shape)
# print(output_size)
# print("--------------------------------")
if return_conv_result:
conv_result = torch.add(self.conv(x), after_pool_feature)
return self.unpool(conv_result, indices, output_size=output_size), conv_result
return self.unpool(torch.add(self.conv(x), after_pool_feature), indices, output_size=output_size)
| [
"vietnamican@gmail.com"
] | vietnamican@gmail.com |
cc454eceed7f736e10e8d949d9ebe9daf2909b01 | f5f62ed1f738f1f7a863468c7d6c0cf20af490f3 | /Vorlesung/src/Basic/date_class.py | 665b8a0a90ebb42f77ecf18b59b03b61caceeea7 | [] | no_license | AndrejLehmann/my_pfn_2019 | e0ceb7e0b999abb978c10bc15c7e05f31739b8ca | fe4d4ddb7192ecd2c0e0dbe043d72485c5ed9810 | refs/heads/master | 2020-08-31T01:53:37.486097 | 2020-04-20T20:48:08 | 2020-04-20T20:48:08 | 218,550,745 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,014 | py | #!/usr/bin/env python3
import sys, re, argparse
def is_leap_year(year):
return year % 400 == 0 or (year % 4 == 0 and year % 100 != 0)
class Date:
daysinmonth = {1: 31, 2: 28, 3: 31, 4: 30, 5: 31, 6:30,
7: 31, 8: 31, 9: 30, 10: 31, 11: 30, 12:31}
def __init__(self,dstring):
mo = re.search(r'(\d{2})\.(\d{2})\.(\d{4})',dstring)
if mo:
self._day = int(mo.group(1))
self._month = int(mo.group(2))
self._year = int(mo.group(3))
else:
mo = re.search(r'(\d{4})-(\d{2})-(\d{2})',dstring)
if mo:
self._year = int(mo.group(1))
self._month = int(mo.group(2))
self._day = int(mo.group(3))
else:
raise Exception('"{}" is not a valid date'.format(dstring))
def date2number(self):
dayofyear = 0
assert self._month <= 12
for m in range(1,self._month):
dayofyear += Date.daysinmonth[m]
if m == 2 and is_leap_year(self._year):
dayofyear += 1
dayofyear += self._day
return dayofyear
def __str__(self):
return '{:02d}.{:02d}.{}'.format(self._day,self._month,self._year)
def parse_arguments():
p = argparse.ArgumentParser(description='parse dates and output')
p.add_argument('-d','--day2number',action='store_true',default=False,
help='show day of date in year')
p.add_argument('--inputfile',type=str,default='../../../../exercises/programmierung/python/Datetonumber/randomdates.csv',help='specify input file')
return p.parse_args()
if __name__ == '__main__':
args = parse_arguments()
try:
stream = open(args.inputfile,'r')
except IOError as err:
sys.stderr.write('{}: {}\n'.format(sys.argv[0],err))
exit(1)
for line in stream:
line = line.rstrip()
try:
dt = Date(line)
except Exception as err:
sys.stderr.write('{}: {}\n'.format(sys.argv[0],err))
exit(1)
values = [str(dt)]
if args.day2number:
values.append(str(dt.date2number()))
print('\t'.join(values))
stream.close
| [
"alehmann@physnet.uni-hamburg.de"
] | alehmann@physnet.uni-hamburg.de |
69f61deaa21aac62110923ba4cf7ab0bb97e8230 | 8f8e378c0ce4224244582c506c268edda3cc3b30 | /Common/DL/Day2/New/regression3.py | 3d37f5c556a00de3cebaf683e24d265254ed448a | [] | no_license | srsapireddy/Diploma-in-AI_NIELIT_Files | 223318319b2d4b8647d77b99d1ba03f0d6e15cf6 | 9e2ed78fbe03369ebef1aa81f3417fc21bdd4107 | refs/heads/master | 2021-05-17T14:28:00.059617 | 2020-03-29T09:28:04 | 2020-03-29T09:28:04 | 250,820,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,555 | py | # mlp for regression with mse loss function
from sklearn.datasets import make_regression
from sklearn.preprocessing import StandardScaler
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from matplotlib import pyplot
# generate regression dataset
X, y = make_regression(n_samples=1000, n_features=20, noise=0.1)
# standardize dataset
X = StandardScaler().fit_transform(X)
print(y.shape)
y = StandardScaler().fit_transform(y.reshape(len(y),1))[:,0]
print(y.shape)
# split into train and test
n_train = 500
trainX, testX = X[:n_train, :], X[n_train:, :]
trainy, testy = y[:n_train], y[n_train:]
# define model
model = Sequential()
model.add(Dense(25, input_dim=20, activation='relu'))
model.add(Dense(1, activation='linear'))
opt = SGD(lr=0.01, momentum=0.9)
model.compile(loss='mean_squared_logarithmic_error', optimizer=opt,metrics=['mse'])
# fit model
history = model.fit(trainX, trainy, validation_data=(testX, testy), epochs=100, verbose=0)
# evaluate the model
_,train_mse = model.evaluate(trainX, trainy, verbose=0)
_,test_mse = model.evaluate(testX, testy, verbose=0)
print('Train: %.3f, Test: %.3f' % (train_mse, test_mse))
# plot loss during training
pyplot.title('Loss / Mean Squared Error')
pyplot.plot(history.history['loss'], label='train')
pyplot.plot(history.history['val_loss'], label='test')
pyplot.legend()
pyplot.show()
pyplot.plot(history.history['mean_squared_error'], label='train')
pyplot.plot(history.history['val_mean_squared_error'], label='test')
pyplot.legend()
pyplot.show()
| [
"sapireddyrahul@gmail.com"
] | sapireddyrahul@gmail.com |
029528a291961da7c442a5c7e15d8be06bc73bb3 | ddbb2bab9c96705c4ceb9e0d92e285d24d6acc0e | /Basic/chap08.py | 1b344e337ba1816ff33cd4ccd0aff94e429ba1e1 | [] | no_license | louisuss/CrawlingStudy | e25ca256be7c3c0bda026f1d404a9d91c36f9dd7 | 78f33939865bb8eba5adc042d63fe3def67382f2 | refs/heads/master | 2022-12-15T17:54:44.209175 | 2020-08-23T12:27:06 | 2020-08-23T12:27:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,545 | py | # 다음 주식 정보 가져오기
import json
import urllib.request as req
from fake_useragent import UserAgent
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
# Fake Header 정보 (가상으로 UserAgent 생성)
ua = UserAgent()
# print(ua.chrome)
# print(ua.safari)
# print(ua.random)
# 헤더 정보
headers = {
'User-agent': ua.safari,
'referer': 'http://finance.daum.net/'
}
# 다음 주식 요청 URL
url = 'http://finance.daum.net/api/search/ranks?limit=10'
# 요청
# Request() 객체 클래스 안에 url, headers 정보 입력
res = req.urlopen(req.Request(url, headers=headers)).read().decode('UTF-8')
# 응답 데이터 확인 (Json Data)
# print('res', res)
# 응답 데이터 str -> json 변환 및 data 값 출력
rank_json = json.loads(res)['data']
# 중간 확인
# print('중간 확인: \n',rank_json)
# print()
for data in rank_json:
print('순위: {}, 금액: {}, 회사명: {}'.format(
data['rank'], data['tradePrice'], data['name']))
# 순위: 1, 금액: 24800, 회사명: 노터스
# 순위: 2, 금액: 328000, 회사명: 셀트리온
# 순위: 3, 금액: 73000, 회사명: 신풍제약
# 순위: 4, 금액: 325000, 회사명: 카카오
# 순위: 5, 금액: 54400, 회사명: 삼성전자
# 순위: 6, 금액: 117500, 회사명: 현대차
# 순위: 7, 금액: 191000, 회사명: SK바이오팜
# 순위: 8, 금액: 69200, 회사명: 일양약품
# 순위: 9, 금액: 106500, 회사명: 셀트리온헬스케어
# 순위: 10, 금액: 175500, 회사명: 씨젠
| [
"dobi1115@gmail.com"
] | dobi1115@gmail.com |
edd2294d74c818df111097d4dda5b7c88d015b4f | 0410c8e7db491283fe1383731aa2f130fccfaff1 | /main.py | 1d2b7f0dbb572a7087c63387a33e8b920811d516 | [] | no_license | simplifies/PassGetter | e1af86cfed2012cbea76c90c3c4a0b0ac9b7a4fa | d4ded9b966f18c6f7d3fe5718662ea02b2e1e3b3 | refs/heads/master | 2022-12-08T23:30:18.342699 | 2020-09-16T15:20:28 | 2020-09-16T15:20:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,687 | py | import os
import sys
import xml.etree.ElementTree as ET
import time
import sockets.client as client
import glob
from platform import system
system = system()
if system == "Windows":
import win32com.shell.shell as shell
else:
pass
def admin():
if system == "Windows":
if sys.argv[-1] != 'asadmin':
script = os.path.abspath(sys.argv[0])
params = ' '.join([script] + sys.argv[1:] + ['asadmin'])
shell.ShellExecuteEx(lpVerb='runas', lpFile=sys.executable, lpParameters=params)
else:
pass
elif system == "Linux":
os.system('xdg-mime query default x-scheme-handler/http > browser.txt')
def detect_browser():
if system == "Windows":
os.system('dism /online /Export-DefaultAppAssociations:"%UserProfile%\Desktop\FileAssociations.xml"')
time.sleep(5)
root = ET.parse("C:" + os.getenv('HOMEPATH') + r'\Desktop\FileAssociations.xml').getroot()
for type_tag in root:
value = type_tag.get('Identifier')
if value == "https":
browser = type_tag.get("ApplicationName")
os.remove("C:" + os.getenv('HOMEPATH') + r'\Desktop\FileAssociations.xml')
return browser
elif system == "Linux":
with open('browser.txt', 'r') as f:
browser = f.read()
os.remove('browser.txt')
return browser
def run_wizard(browser):
if system == "Windows":
from browser_windows.win_operagx import windows_opera
from browser_windows.win_chrome import windows
import browser_windows.win_firefox as win_firefox
NSS = win_firefox.NSSDecoder()
if "Opera" in browser:
windows_opera()
elif "Chrome" in browser:
windows()
elif "Firefox" in browser:
win_firefox.decrypt_passwords()
else:
print("The browser is not supported")
elif system == "Linux":
from browsers_linux.linux_chrome import main
import browsers_linux.linux_firefox as linux_firefox
NSS = linux_firefox.NSSDecoder()
if 'Firefox' or 'firefox' in browser:
linux_firefox.decrypt_passwords()
elif 'chrome' or 'Chrome' in browser:
main()
else:
print('the browser is not supported')
if __name__ == '__main__':
admin()
browser = detect_browser()
run_wizard(browser)
filename = ["pass.db", "firepass.db", "operagx.db"]
host = ""
port = 5001
for files in filename:
if files in glob.glob('*.db'):
client.send_file(files, host, port)
else:
pass
| [
"noreply@github.com"
] | simplifies.noreply@github.com |
44018e28e2d265972adab72159e70fac25faa7de | 4786216d2a8e9221cc3624366152f47ae513e5c7 | /宁波.py | ff724a08671aa54838c2f79d8e16fb9c9f4ede8f | [] | no_license | injuredangel/- | b6a2502ee026320b96947d41c223edebe3ec65cc | 7988c6aa5e825504ff59b006c37d4383b3bb1da8 | refs/heads/master | 2020-05-25T02:21:15.654253 | 2019-05-20T06:27:42 | 2019-05-20T06:27:42 | 187,575,531 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,300 | py | import urllib.request
import re
import csv
headers = {
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.26 Safari/537.36 Core/1.63.6756.400 QQBrowser/10.3.2545.400',
'Connection':'keep-alive',
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Cookie':'_DM_SID_=2ee7424b9d76336f549c5910ff652279; Hm_lvt_5185a335802fb72073721d2bb161cd94=1540209209; _Z3nY0d4C_=37XgPK9h; JSESSIONID=B344BDEBBCD779A68507944A40EB76AC; f9big=ningbo1166; _DM_S_=233c9eb1959baa3e4ddd77aa01a77ca3; screen=1218; f19big=ip49; _dm_userinfo=%7B%22uid%22%3A0%2C%22stage%22%3A%22%22%2C%22city%22%3A%22%E6%B5%99%E6%B1%9F%3A%E6%9D%AD%E5%B7%9E%22%2C%22ip%22%3A%22115.192.32.72%22%2C%22sex%22%3A%222%22%2C%22frontdomain%22%3A%22ningbo.19lou.com%22%2C%22category%22%3A%22%E6%88%BF%E4%BA%A7%2C%E6%B1%82%E8%81%8C%2C%E6%97%B6%E5%B0%9A%2C%E6%95%B0%E7%A0%81%22%7D; pm_count=%7B%22pc_allCity_threadView_button_adv_190x205_1%22%3A13%7D; dayCount=%5B%7B%22id%22%3A78784%2C%22count%22%3A1%7D%5D; Hm_lvt_768876c24ee8384562c8812bd6191b4f=1540209215,1540274300,1540291740,1540362056; _dm_tagnames=%5B%7B%22k%22%3A%22%E5%AE%81%E6%B3%A2%E7%A7%9F%E6%88%BF%E8%AE%BA%E5%9D%9B%22%2C%22c%22%3A13%7D%2C%7B%22k%22%3A%22%E7%A7%9F%E6%88%BF%E7%BB%8F%E9%AA%8C%E5%88%86%E4%BA%AB%22%2C%22c%22%3A13%7D%2C%7B%22k%22%3A%221%E5%AE%A4%201%E5%8E%85%201%E5%8D%AB%201%E9%98%B3%E5%8F%B0%20%22%2C%22c%22%3A7%7D%2C%7B%22k%22%3A%2220%22%2C%22c%22%3A4%7D%2C%7B%22k%22%3A%22%E4%B8%AD%E5%B1%B1%E4%B8%9C%E8%B7%AF%E7%A6%8F%E6%98%8E%E8%B7%AF%E4%BA%A4%E5%8F%89%E5%8F%A3%22%2C%22c%22%3A4%7D%2C%7B%22k%22%3A%2215%22%2C%22c%22%3A1%7D%2C%7B%22k%22%3A%22%E6%B1%9F%E4%B8%9C%E4%B8%96%E7%BA%AA%E5%A4%A7%E9%81%93%E5%92%8C%E6%83%8A%E9%A9%BE%E8%B7%AF%E8%B7%AF%E5%8F%A3%22%2C%22c%22%3A1%7D%2C%7B%22k%22%3A%22cc%22%2C%22c%22%3A5%7D%2C%7B%22k%22%3A%22%E5%90%88%E7%A7%9F%22%2C%22c%22%3A4%7D%2C%7B%22k%22%3A%22%E5%BA%8A%22%2C%22c%22%3A4%7D%2C%7B%22k%22%3A%22%E9%9B%86%E7%BB%93%E5%8F%B7%22%2C%22c%22%3A4%7D%2C%7B%22k%22%3A%22cc%22%2C%22c%22%3A4%7D%2C%7B%22k%22%3A%22%E7%A7%9F%E6%88%BF%22%2C%22c%22%3A3%7D%2C%7B%22k%22%3A%22spa%22%2C%22c%22%3A3%7D%2C%7B%22k%22%3A%22%E6%8B%9B%E8%81%98%22%2C%22c%22%3A1%7D%2C%7B%22k%22%3A%22Calvin%22%2C%22c%22%3A1%7D%2C%7B%22k%22%3A%22%E7%BF%BB%E6%96%B0%E6%9C%BA%22%2C%22c%22%3A1%7D%2C%7B%22k%22%3A%22ck%22%2C%22c%22%3A1%7D%2C%7B%22k%22%3A%22%E4%BA%A4%E6%98%93%22%2C%22c%22%3A1%7D%2C%7B%22k%22%3A%224%E5%AE%A4%202%E5%8E%85%202%E5%8D%AB%202%E9%98%B3%E5%8F%B0%20%22%2C%22c%22%3A1%7D%5D; Hm_lpvt_768876c24ee8384562c8812bd6191b4f=1540362059',
'Host':'ningbo.19lou.com',
'Referer':'https://ningbo.19lou.com/forum-1996-thread-183351540274298747-1-1.html',
'Upgrade-Insecure-Requests':1
}
second_href = []
second_href1 = []
def create_request(url):
'''这个函数用来构建请求对象'''
request = urllib.request.Request(url=url,headers=headers)
return request
def send_request(request):
'''这个函数用来发送请求'''
response = urllib.request.urlopen(request).read()
# response = response.decode('gbk', 'ignore')
return response
def write_html_page(response,page_name):
'''这个函数用来将爬取的网页写入文件'''
# response = response.decode('gbk','ignore')
with open('./detail/diyiji/%s'%(page_name),'wb+') as file:
file.write(response)
def write_html_page1(response,page_name):
'''这个函数用来将爬取的网页写入文件'''
# response = response.decode('gbk','ignore')
with open('./detail/xianqing/%s'%(page_name),'wb+') as file:
file.write(response)
def tiqu_href(html):
'''这个函数专门用来负责二级页面的超链接提取'''
html = html.decode('utf-8','ignore')
obj = re.findall(r' <a id=".*" class=".*" style=".*" href="(.*?)" itemprop=".*" target=".*" title=".*">', html)
# print(obj)
second_href.extend(obj)
# print(second_href)
def tiqu_href2(html):
'''这个函数专门用来负责二级页面的数据的提取'''
html = html.decode('gbk','ignore')
obj = re.findall(r'<tr><th>.*?</th><td>\s+(.*?)\s+(.*?)\s+</td></tr>', html)
try :
for i in obj:
for x in i:
spar2_list = x.__str__()
# print(type(spar2_list))
second_href1.append(spar2_list)
if '' in second_href1:
second_href1.remove('')
second_href1[1] = second_href1[1] + second_href1[2]
del second_href1[2]
second_href1[3] = second_href1[3] + second_href1[4]
del second_href1[4]
second_href1[11] = second_href1[11] + second_href1[12]
del second_href1[12]
except Exception as e:
print(e)
def write_msg(data):
'''这个函数用来将爬取的数据写入文件'''
with open("./detail/xianqing/test1.csv", 'a+', newline='') as f:
wf = csv.writer(f)
wf.writerow(data)
if __name__ == '__main__':
headers1 = ['房型', '户型', '区域', '建筑面积', '具体地址', '楼层', '朝向', '有效期', '租赁方式', '性别要求','最短租期', '租金', '付款要求', '押金要求','房屋来源', '建筑年代', '装修程度', '配套设施', '可入住时间', '特殊情况说明', '周边公交站点', '房源特点']
with open('./detail/xianqing/test1.csv', 'a+') as f:
wf = csv.writer(f)
wf.writerow(headers1)
f.close()
for n in range(1,80):
url = 'https://ningbo.19lou.com/forum-1996-'+str(n)+'.html'
print(url)
request = create_request(url)
response = send_request(request)
tiqu_href(response)
write_html_page(response,'这是首页的内容')
print(len(second_href))
for i in range(0,len(second_href)):
# print(second_href)
url1 = 'http:'+str(second_href[i])
print(url1)
request1 = create_request(url1)
response1 = send_request(request1)
# print(type(response1))
tiqu_href2(response1)
# write_html_page1(response1, '这是%d页第%d条的内容'%(n,i))
print('这是%d页第%d条的内容'%(n,i))
write_msg(second_href1)
second_href1.clear()
second_href.clear()
| [
"you@example.com"
] | you@example.com |
1fc6b77c43cbb4d11e20de82d9d104daac283aeb | f43d3731a21ee5df09298f5541b52484f408e010 | /spider/news/MongoPipeline.py | 687daf1f028f475fc148563dcbcad22e96ec0d0c | [] | no_license | cash2one/wechat_admin | 2ba8c35deffff37c263b7091229ba2d86f2aaeaf | af0712fdad867d76dcee2092abcf32cada49d075 | refs/heads/master | 2021-05-04T22:22:53.514787 | 2017-09-25T10:03:07 | 2017-09-25T10:03:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 374 | py | import json
from spider.loggers.log import crawler
from spider.news.Pipeline import Pipeline
from spider.util.MongoUtil import MongoUtil
class MongoPipeline(Pipeline):
def __init__(self, collection):
self.collection = collection
def put(self, item):
json_obj = item.to_dict()
MongoUtil.save(self.collection, json_obj)
return item
| [
"“545314690@qq.com”"
] | “545314690@qq.com” |
6c6da653ecf4ee54417895649167041626d316a4 | 5ad9f0e5602c9986c190215c0e5957a35d1a43cb | /venv/Lib/site-packages/nltk/sem/linearlogic.py | 38457a72187cd70e2365263f952bf2f4e4a5eeb4 | [
"MIT"
] | permissive | wlsl4239/Tacotron-Wavenet-Vocoder | cef6606a113add0391f467e102b4a6736d94e2fd | afc60aac989f1fed827e9cf8f7df0c0c05c67885 | refs/heads/master | 2020-07-17T10:18:03.653974 | 2019-11-20T12:40:43 | 2019-11-20T12:40:43 | 205,998,408 | 2 | 0 | MIT | 2019-09-03T05:47:35 | 2019-09-03T05:47:35 | null | UTF-8 | Python | false | false | 16,633 | py | # Natural Language Toolkit: Linear Logic
#
# Author: Dan Garrette <dhgarrette@gmail.com>
#
# Copyright (C) 2001-2017 NLTK Project
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import print_function, unicode_literals
from six import string_types
from nltk.internals import Counter
from nltk.compat import python_2_unicode_compatible
from nltk.sem.logic import LogicParser, APP
_counter = Counter()
class Tokens(object):
#Punctuation
OPEN = '('
CLOSE = ')'
#Operations
IMP = '-o'
PUNCT = [OPEN, CLOSE]
TOKENS = PUNCT + [IMP]
class LinearLogicParser(LogicParser):
"""A linear logic expression parser."""
def __init__(self):
LogicParser.__init__(self)
self.operator_precedence = {APP: 1, Tokens.IMP: 2, None: 3}
self.right_associated_operations += [Tokens.IMP]
def get_all_symbols(self):
return Tokens.TOKENS
def handle(self, tok, context):
if tok not in Tokens.TOKENS:
return self.handle_variable(tok, context)
elif tok == Tokens.OPEN:
return self.handle_open(tok, context)
def get_BooleanExpression_factory(self, tok):
if tok == Tokens.IMP:
return ImpExpression
else:
return None
def make_BooleanExpression(self, factory, first, second):
return factory(first, second)
def attempt_ApplicationExpression(self, expression, context):
"""Attempt to make an application expression. If the next tokens
are an argument in parens, then the argument expression is a
function being applied to the arguments. Otherwise, return the
argument expression."""
if self.has_priority(APP, context):
if self.inRange(0) and self.token(0) == Tokens.OPEN:
self.token() #swallow then open paren
argument = self.process_next_expression(APP)
self.assertNextToken(Tokens.CLOSE)
expression = ApplicationExpression(expression, argument, None)
return expression
def make_VariableExpression(self, name):
if name[0].isupper():
return VariableExpression(name)
else:
return ConstantExpression(name)
@python_2_unicode_compatible
class Expression(object):
_linear_logic_parser = LinearLogicParser()
@classmethod
def fromstring(cls, s):
return cls._linear_logic_parser.parse(s)
def applyto(self, other, other_indices=None):
return ApplicationExpression(self, other, other_indices)
def __call__(self, other):
return self.applyto(other)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self)
@python_2_unicode_compatible
class AtomicExpression(Expression):
def __init__(self, name, dependencies=None):
"""
:param name: str for the constant name
:param dependencies: list of int for the indices on which this atom is dependent
"""
assert isinstance(name, string_types)
self.name = name
if not dependencies:
dependencies = []
self.dependencies = dependencies
def simplify(self, bindings=None):
"""
If 'self' is bound by 'bindings', return the atomic to which it is bound.
Otherwise, return self.
:param bindings: ``BindingDict`` A dictionary of bindings used to simplify
:return: ``AtomicExpression``
"""
if bindings and self in bindings:
return bindings[self]
else:
return self
def compile_pos(self, index_counter, glueFormulaFactory):
"""
From Iddo Lev's PhD Dissertation p108-109
:param index_counter: ``Counter`` for unique indices
:param glueFormulaFactory: ``GlueFormula`` for creating new glue formulas
:return: (``Expression``,set) for the compiled linear logic and any newly created glue formulas
"""
self.dependencies = []
return (self, [])
def compile_neg(self, index_counter, glueFormulaFactory):
"""
From Iddo Lev's PhD Dissertation p108-109
:param index_counter: ``Counter`` for unique indices
:param glueFormulaFactory: ``GlueFormula`` for creating new glue formulas
:return: (``Expression``,set) for the compiled linear logic and any newly created glue formulas
"""
self.dependencies = []
return (self, [])
def initialize_labels(self, fstruct):
self.name = fstruct.initialize_label(self.name.lower())
def __eq__(self, other):
return self.__class__ == other.__class__ and self.name == other.name
def __ne__(self, other):
return not self == other
def __str__(self):
accum = self.name
if self.dependencies:
accum += "%s" % self.dependencies
return accum
def __hash__(self):
return hash(self.name)
class ConstantExpression(AtomicExpression):
def unify(self, other, bindings):
"""
If 'other' is a constant, then it must be equal to 'self'. If 'other' is a variable,
then it must not be bound to anything other than 'self'.
:param other: ``Expression``
:param bindings: ``BindingDict`` A dictionary of all current bindings
:return: ``BindingDict`` A new combined dictionary of of 'bindings' and any new binding
:raise UnificationException: If 'self' and 'other' cannot be unified in the context of 'bindings'
"""
assert isinstance(other, Expression)
if isinstance(other, VariableExpression):
try:
return bindings + BindingDict([(other, self)])
except VariableBindingException:
pass
elif self == other:
return bindings
raise UnificationException(self, other, bindings)
class VariableExpression(AtomicExpression):
def unify(self, other, bindings):
"""
'self' must not be bound to anything other than 'other'.
:param other: ``Expression``
:param bindings: ``BindingDict`` A dictionary of all current bindings
:return: ``BindingDict`` A new combined dictionary of of 'bindings' and the new binding
:raise UnificationException: If 'self' and 'other' cannot be unified in the context of 'bindings'
"""
assert isinstance(other, Expression)
try:
if self == other:
return bindings
else:
return bindings + BindingDict([(self, other)])
except VariableBindingException:
raise UnificationException(self, other, bindings)
@python_2_unicode_compatible
class ImpExpression(Expression):
def __init__(self, antecedent, consequent):
"""
:param antecedent: ``Expression`` for the antecedent
:param consequent: ``Expression`` for the consequent
"""
assert isinstance(antecedent, Expression)
assert isinstance(consequent, Expression)
self.antecedent = antecedent
self.consequent = consequent
def simplify(self, bindings=None):
return self.__class__(self.antecedent.simplify(bindings), self.consequent.simplify(bindings))
def unify(self, other, bindings):
"""
Both the antecedent and consequent of 'self' and 'other' must unify.
:param other: ``ImpExpression``
:param bindings: ``BindingDict`` A dictionary of all current bindings
:return: ``BindingDict`` A new combined dictionary of of 'bindings' and any new bindings
:raise UnificationException: If 'self' and 'other' cannot be unified in the context of 'bindings'
"""
assert isinstance(other, ImpExpression)
try:
return bindings + self.antecedent.unify(other.antecedent, bindings) + self.consequent.unify(other.consequent, bindings)
except VariableBindingException:
raise UnificationException(self, other, bindings)
def compile_pos(self, index_counter, glueFormulaFactory):
"""
From Iddo Lev's PhD Dissertation p108-109
:param index_counter: ``Counter`` for unique indices
:param glueFormulaFactory: ``GlueFormula`` for creating new glue formulas
:return: (``Expression``,set) for the compiled linear logic and any newly created glue formulas
"""
(a, a_new) = self.antecedent.compile_neg(index_counter, glueFormulaFactory)
(c, c_new) = self.consequent.compile_pos(index_counter, glueFormulaFactory)
return (ImpExpression(a,c), a_new + c_new)
def compile_neg(self, index_counter, glueFormulaFactory):
"""
From Iddo Lev's PhD Dissertation p108-109
:param index_counter: ``Counter`` for unique indices
:param glueFormulaFactory: ``GlueFormula`` for creating new glue formulas
:return: (``Expression``,list of ``GlueFormula``) for the compiled linear logic and any newly created glue formulas
"""
(a, a_new) = self.antecedent.compile_pos(index_counter, glueFormulaFactory)
(c, c_new) = self.consequent.compile_neg(index_counter, glueFormulaFactory)
fresh_index = index_counter.get()
c.dependencies.append(fresh_index)
new_v = glueFormulaFactory('v%s' % fresh_index, a, set([fresh_index]))
return (c, a_new + c_new + [new_v])
def initialize_labels(self, fstruct):
self.antecedent.initialize_labels(fstruct)
self.consequent.initialize_labels(fstruct)
def __eq__(self, other):
return self.__class__ == other.__class__ and \
self.antecedent == other.antecedent and self.consequent == other.consequent
def __ne__(self, other):
return not self == other
def __str__(self):
return "%s%s %s %s%s" % (
Tokens.OPEN, self.antecedent, Tokens.IMP, self.consequent, Tokens.CLOSE)
def __hash__(self):
return hash('%s%s%s' % (hash(self.antecedent), Tokens.IMP, hash(self.consequent)))
@python_2_unicode_compatible
class ApplicationExpression(Expression):
def __init__(self, function, argument, argument_indices=None):
"""
:param function: ``Expression`` for the function
:param argument: ``Expression`` for the argument
:param argument_indices: set for the indices of the glue formula from which the argument came
:raise LinearLogicApplicationException: If 'function' cannot be applied to 'argument' given 'argument_indices'.
"""
function_simp = function.simplify()
argument_simp = argument.simplify()
assert isinstance(function_simp, ImpExpression)
assert isinstance(argument_simp, Expression)
bindings = BindingDict()
try:
if isinstance(function, ApplicationExpression):
bindings += function.bindings
if isinstance(argument, ApplicationExpression):
bindings += argument.bindings
bindings += function_simp.antecedent.unify(argument_simp, bindings)
except UnificationException as e:
raise LinearLogicApplicationException('Cannot apply %s to %s. %s' % (function_simp, argument_simp, e))
# If you are running it on complied premises, more conditions apply
if argument_indices:
# A.dependencies of (A -o (B -o C)) must be a proper subset of argument_indices
if not set(function_simp.antecedent.dependencies) < argument_indices:
raise LinearLogicApplicationException('Dependencies unfulfilled when attempting to apply Linear Logic formula %s to %s' % (function_simp, argument_simp))
if set(function_simp.antecedent.dependencies) == argument_indices:
raise LinearLogicApplicationException('Dependencies not a proper subset of indices when attempting to apply Linear Logic formula %s to %s' % (function_simp, argument_simp))
self.function = function
self.argument = argument
self.bindings = bindings
def simplify(self, bindings=None):
"""
Since function is an implication, return its consequent. There should be
no need to check that the application is valid since the checking is done
by the constructor.
:param bindings: ``BindingDict`` A dictionary of bindings used to simplify
:return: ``Expression``
"""
if not bindings:
bindings = self.bindings
return self.function.simplify(bindings).consequent
def __eq__(self, other):
return self.__class__ == other.__class__ and \
self.function == other.function and self.argument == other.argument
def __ne__(self, other):
return not self == other
def __str__(self):
return "%s" % self.function + Tokens.OPEN + "%s" % self.argument + Tokens.CLOSE
def __hash__(self):
return hash('%s%s%s' % (hash(self.antecedent), Tokens.OPEN, hash(self.consequent)))
@python_2_unicode_compatible
class BindingDict(object):
def __init__(self, bindings=None):
"""
:param bindings:
list [(``VariableExpression``, ``AtomicExpression``)] to initialize the dictionary
dict {``VariableExpression``: ``AtomicExpression``} to initialize the dictionary
"""
self.d = {}
if isinstance(bindings, dict):
bindings = bindings.items()
if bindings:
for (v, b) in bindings:
self[v] = b
def __setitem__(self, variable, binding):
"""
A binding is consistent with the dict if its variable is not already bound, OR if its
variable is already bound to its argument.
:param variable: ``VariableExpression`` The variable bind
:param binding: ``Expression`` The expression to which 'variable' should be bound
:raise VariableBindingException: If the variable cannot be bound in this dictionary
"""
assert isinstance(variable, VariableExpression)
assert isinstance(binding, Expression)
assert variable != binding
existing = self.d.get(variable, None)
if not existing or binding == existing:
self.d[variable] = binding
else:
raise VariableBindingException('Variable %s already bound to another value' % (variable))
def __getitem__(self, variable):
"""
Return the expression to which 'variable' is bound
"""
assert isinstance(variable, VariableExpression)
intermediate = self.d[variable]
while intermediate:
try:
intermediate = self.d[intermediate]
except KeyError:
return intermediate
def __contains__(self, item):
return item in self.d
def __add__(self, other):
"""
:param other: ``BindingDict`` The dict with which to combine self
:return: ``BindingDict`` A new dict containing all the elements of both parameters
:raise VariableBindingException: If the parameter dictionaries are not consistent with each other
"""
try:
combined = BindingDict()
for v in self.d:
combined[v] = self.d[v]
for v in other.d:
combined[v] = other.d[v]
return combined
except VariableBindingException:
raise VariableBindingException('Attempting to add two contradicting'\
' VariableBindingsLists: %s, %s' % (self, other))
def __ne__(self, other):
return not self == other
def __eq__(self, other):
if not isinstance(other, BindingDict):
raise TypeError
return self.d == other.d
def __str__(self):
return '{' + ', '.join('%s: %s' % (v, self.d[v]) for v in self.d) + '}'
def __repr__(self):
return 'BindingDict: %s' % self
class VariableBindingException(Exception):
pass
class UnificationException(Exception):
def __init__(self, a, b, bindings):
Exception.__init__(self, 'Cannot unify %s with %s given %s' % (a, b, bindings))
class LinearLogicApplicationException(Exception):
pass
def demo():
lexpr = Expression.fromstring
print(lexpr(r'f'))
print(lexpr(r'(g -o f)'))
print(lexpr(r'((g -o G) -o G)'))
print(lexpr(r'g -o h -o f'))
print(lexpr(r'(g -o f)(g)').simplify())
print(lexpr(r'(H -o f)(g)').simplify())
print(lexpr(r'((g -o G) -o G)((g -o f))').simplify())
print(lexpr(r'(H -o H)((g -o f))').simplify())
if __name__ == '__main__':
demo()
| [
"wlsl4239@naver.com"
] | wlsl4239@naver.com |
c503f0ccea6b194021cfd21c8b840271f6821aaa | 6faedd57c152156ca508be06b15bcd04e27e974b | /peeringdb_server/management/commands/pdb_org_cleanup.py | ad73a8fda226376e7b8a26fe16491ec0c89c9c71 | [
"BSD-2-Clause"
] | permissive | grizz/peeringdb | 1dca75ac7cbb357ff6166285fb89de07a0a1ed5d | 355fff42924a62b1920bd1f263d83e696e96b74b | refs/heads/master | 2023-01-06T14:07:13.176255 | 2022-11-15T18:34:50 | 2022-11-15T18:34:50 | 164,004,395 | 0 | 0 | NOASSERTION | 2019-01-03T17:41:55 | 2019-01-03T17:41:54 | null | UTF-8 | Python | false | false | 1,617 | py | from django.core.management.base import BaseCommand
from peeringdb_server.models import Organization
class Command(BaseCommand):
help = "Cleanup deleted Organization objects"
def add_arguments(self, parser):
parser.add_argument(
"--commit",
action="store_true",
help="commit changes, otherwise run in pretend mode",
)
def log(self, msg):
if not self.commit:
self.stdout.write(f"[pretend] {msg}")
else:
self.stdout.write(msg)
def handle(self, *args, **options):
self.commit = options.get("commit")
orgs = Organization.objects.filter(status="deleted")
# Confirm if user wants to continue via prompt
for org in orgs:
self.log(
f"Cleaning up Organization {org.id} - {org.name} ({org.admin_usergroup.user_set.all().count() + org.usergroup.user_set.all().count()} users)"
)
if self.commit:
# Remove users from user and admin usergroups
aug = org.admin_usergroup.user_set
for user in aug.all():
aug.remove(user)
user.save()
ug = org.usergroup.user_set
for user in ug.all():
ug.remove(user)
user.save()
# Remove all affiliation requests
for affiliation in org.affiliation_requests.filter(status="pending"):
affiliation.cancel()
self.log(f"Removed all users from deleted organization {org.id}")
| [
"noreply@github.com"
] | grizz.noreply@github.com |
96fda186839f0a1f402245e72b52cf206fc61584 | 112882b8d6c5071e7d2610c595bfca9210c79a0a | /tools/leetcode.066.Plus One/leetcode.066.Plus One.submission7.py | f1fb451aca083ed7e1b363869a2b2d3005f58d12 | [
"MIT"
] | permissive | tedye/leetcode | 193b1900d98e35d5c402013cbe3bc993d0235da2 | 975d7e3b8cb9b6be9e80e07febf4bcf6414acd46 | refs/heads/master | 2021-01-01T19:06:06.408135 | 2015-10-24T06:44:40 | 2015-10-24T06:44:40 | 41,804,923 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 601 | py | class Solution:
# @param digits, a list of integer digits
# @return a list of integer digits
def plusOne(self, digits):
if digits[-1] < 9:
digits[-1] += 1
return digits
pos = len(digits) - 1
overflow = False
while digits[pos] == 9:
digits[pos] = 0
if pos == 0:
overflow = True
break
else:
pos -= 1
if overflow:
results = [1] + digits
return results
else:
digits[pos]+=1
return digits | [
"tedye@bu.edu"
] | tedye@bu.edu |
aaebaa9fae2392777c866f60bc43f48468fef328 | cd4eb25911d3e3b092aa97aaa7b8fbba6c3a0704 | /lang/python/gevent/testSigQuit.py | 266a8c3f7e07ad1e923b90327d24877fd4efdf12 | [
"MIT"
] | permissive | liuyang1/test | 29bb142982d2ef0d79b71e8fe5f5e0d51ec5258e | 9a154e0161a1a33baad53f7223ee72e702532001 | refs/heads/master | 2023-08-05T08:56:50.526414 | 2023-07-21T05:49:53 | 2023-07-21T11:16:09 | 26,949,326 | 9 | 1 | null | null | null | null | UTF-8 | Python | false | false | 207 | py | import gevent
import signal
def run_forever():
gevent.sleep(100)
if __name__ == "__main__":
gevent.signal(signal.SIGQUIT, gevent.shutdown)
thread = gevent.spawn(run_forever)
thread.join()
| [
"liuyang1@mail.ustc.edu.cn"
] | liuyang1@mail.ustc.edu.cn |
d6e5f838cf0ff1735ab58b6a342ba0064ed99f4a | b31f44faa4ff1b462585130aff31de959a3e1623 | /Python/Data Structure/Linear List/Sort Array By Parity II.py | 969ae2d0c825bea00b3571a4ee45cadff6fd370b | [] | no_license | fagan2888/Coding-Interview | ac66b1fc33aecdbc2f1e1ec66491561c424e3024 | fe7afbead2f1e252f4bc5692e0f94a6ce32f3c44 | refs/heads/master | 2021-04-21T05:16:34.002298 | 2020-02-02T15:41:05 | 2020-02-02T15:41:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | class Solution:
def sortArrayByParityII(self, A: List[int]) -> List[int]:
# two pointers
j = 1
for i in range(0, len(A), 2): #even
if A[i] % 2:
while A[j] % 2:
j += 2
A[i], A[j] = A[j], A[i]
return A | [
"LIUXinhe@outlook.com"
] | LIUXinhe@outlook.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.