hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
029e0bfc2e4cbbb8148fbd64db58750bb974195c
| 4,014
|
py
|
Python
|
python/tests/test-crystal_diffraction.py
|
mschollmeier/xraylib
|
abf5005cd2a9c8d42d7d70a0f1ef21b6c6bd7ccd
|
[
"BSD-3-Clause"
] | 85
|
2015-03-10T07:21:36.000Z
|
2022-03-15T18:08:13.000Z
|
python/tests/test-crystal_diffraction.py
|
mschollmeier/xraylib
|
abf5005cd2a9c8d42d7d70a0f1ef21b6c6bd7ccd
|
[
"BSD-3-Clause"
] | 107
|
2015-01-30T13:50:38.000Z
|
2022-03-20T17:56:52.000Z
|
python/tests/test-crystal_diffraction.py
|
mschollmeier/xraylib
|
abf5005cd2a9c8d42d7d70a0f1ef21b6c6bd7ccd
|
[
"BSD-3-Clause"
] | 49
|
2015-01-06T20:14:34.000Z
|
2022-02-05T18:02:15.000Z
|
import unittest
import xraylib
import math
class TestCrystalDiffraction(unittest.TestCase):
def test_crystal_diffraction(self):
crystals_list = xraylib.Crystal_GetCrystalsList()
self.assertEqual(len(crystals_list), 38)
for crystal_name in crystals_list:
cs = xraylib.Crystal_GetCrystal(crystal_name)
self.assertEqual(crystal_name, cs['name'])
with self.assertRaises(ValueError):
cs = xraylib.Crystal_GetCrystal(None)
with self.assertRaises(ValueError):
cs = xraylib.Crystal_GetCrystal("non-existent-crystal")
cs = xraylib.Crystal_GetCrystal("Diamond")
cs_copy = xraylib.Crystal_MakeCopy(cs)
with self.assertRaises(ValueError):
xraylib.Crystal_AddCrystal(cs)
with self.assertRaises(ValueError):
xraylib.Crystal_AddCrystal(cs_copy)
cs_copy['name'] = "Diamond-copy"
xraylib.Crystal_AddCrystal(cs_copy)
cs_copy['name'] = 20012016
with self.assertRaises(TypeError):
xraylib.Crystal_AddCrystal(cs_copy)
cs_copy['name'] = "Diamond-copy"
cs_copy['atom'] = list()
with self.assertRaises(TypeError):
xraylib.Crystal_AddCrystal(cs_copy)
cs_copy['atom'] = (25, "jkewjfpwejffj", None, )
with self.assertRaises(TypeError):
xraylib.Crystal_AddCrystal(cs_copy)
del cs_copy['atom']
with self.assertRaises(KeyError):
xraylib.Crystal_AddCrystal(cs_copy)
crystals_list = xraylib.Crystal_GetCrystalsList()
self.assertEqual(len(crystals_list), 39)
for crystal_name in crystals_list:
cs = xraylib.Crystal_GetCrystal(crystal_name)
self.assertEqual(crystal_name, cs['name'])
current_ncrystals = len(crystals_list)
for i in range(xraylib.CRYSTALARRAY_MAX):
cs_copy = xraylib.Crystal_MakeCopy(cs)
cs_copy['name'] = "Diamond copy {}".format(i)
if current_ncrystals < xraylib.CRYSTALARRAY_MAX:
xraylib.Crystal_AddCrystal(cs_copy)
current_ncrystals = current_ncrystals + 1
self.assertEqual(len(xraylib.Crystal_GetCrystalsList()), current_ncrystals)
else:
with self.assertRaises(RuntimeError):
xraylib.Crystal_AddCrystal(cs_copy)
self.assertEqual(len(xraylib.Crystal_GetCrystalsList()), xraylib.CRYSTALARRAY_MAX)
cs = xraylib.Crystal_GetCrystal("Diamond")
# Bragg angle
angle = xraylib.Bragg_angle(cs, 10.0, 1, 1, 1)
self.assertAlmostEqual(angle, 0.3057795845795849)
with self.assertRaises(TypeError):
angle = xraylib.Bragg_angle(None, 10.0, 1, 1, 1)
with self.assertRaises(ValueError):
angle = xraylib.Bragg_angle(cs, -10.0, 1, 1, 1)
with self.assertRaises(TypeError):
angle = xraylib.Bragg_angle(cs, 1, 1, 1)
# Q_scattering_amplitude
tmp = xraylib.Q_scattering_amplitude(cs, 10.0, 1, 1, 1, math.pi/4.0)
self.assertAlmostEqual(tmp, 0.19184445408324474)
tmp = xraylib.Q_scattering_amplitude(cs, 10.0, 0, 0, 0, math.pi/4.0)
self.assertEqual(tmp, 0.0)
# Atomic factors
(f0, f_prime, f_prime2) = xraylib.Atomic_Factors(26, 10.0, 1.0, 10.0)
self.assertAlmostEqual(f0, 65.15)
self.assertAlmostEqual(f_prime, -0.22193271025027966)
self.assertAlmostEqual(f_prime2, 22.420270655080493)
with self.assertRaises(ValueError):
(f0, f_prime, f_prime2) = xraylib.Atomic_Factors(-10, 10.0, 1.0, 10.0)
# unit cell volume
tmp = xraylib.Crystal_UnitCellVolume(cs)
self.assertAlmostEqual(tmp, 45.376673902751)
# crystal dspacing
tmp = xraylib.Crystal_dSpacing(cs, 1, 1, 1)
self.assertAlmostEqual(tmp, 2.0592870875248344)
del cs
if __name__ == '__main__':
unittest.main(verbosity=2)
| 34.603448
| 98
| 0.64275
|
dcd276f7b519265600b14188f55104e6a831da2c
| 6,799
|
py
|
Python
|
torchvision/prototype/datasets/_builtin/caltech.py
|
mechaot/vision
|
8dcb5b810d85bd42edf73280db1ece38c487004c
|
[
"BSD-3-Clause"
] | 2
|
2021-04-01T17:19:21.000Z
|
2021-04-01T18:04:08.000Z
|
torchvision/prototype/datasets/_builtin/caltech.py
|
ejguan/vision
|
aedd39792d07af58e55ec028ed344d63debbd281
|
[
"BSD-3-Clause"
] | null | null | null |
torchvision/prototype/datasets/_builtin/caltech.py
|
ejguan/vision
|
aedd39792d07af58e55ec028ed344d63debbd281
|
[
"BSD-3-Clause"
] | null | null | null |
import io
import pathlib
import re
from typing import Any, Callable, Dict, List, Optional, Tuple
import numpy as np
import torch
from torchdata.datapipes.iter import (
IterDataPipe,
Mapper,
TarArchiveReader,
Shuffler,
Filter,
IterKeyZipper,
)
from torchvision.prototype.datasets.utils import (
Dataset,
DatasetConfig,
DatasetInfo,
HttpResource,
OnlineResource,
DatasetType,
)
from torchvision.prototype.datasets.utils._internal import INFINITE_BUFFER_SIZE, read_mat
from torchvision.prototype.features import Label, BoundingBox
class Caltech101(Dataset):
def _make_info(self) -> DatasetInfo:
return DatasetInfo(
"caltech101",
type=DatasetType.IMAGE,
homepage="http://www.vision.caltech.edu/Image_Datasets/Caltech101",
)
def resources(self, config: DatasetConfig) -> List[OnlineResource]:
images = HttpResource(
"http://www.vision.caltech.edu/Image_Datasets/Caltech101/101_ObjectCategories.tar.gz",
sha256="af6ece2f339791ca20f855943d8b55dd60892c0a25105fcd631ee3d6430f9926",
)
anns = HttpResource(
"http://www.vision.caltech.edu/Image_Datasets/Caltech101/Annotations.tar",
sha256="1717f4e10aa837b05956e3f4c94456527b143eec0d95e935028b30aff40663d8",
)
return [images, anns]
_IMAGES_NAME_PATTERN = re.compile(r"image_(?P<id>\d+)[.]jpg")
_ANNS_NAME_PATTERN = re.compile(r"annotation_(?P<id>\d+)[.]mat")
_ANNS_CATEGORY_MAP = {
"Faces_2": "Faces",
"Faces_3": "Faces_easy",
"Motorbikes_16": "Motorbikes",
"Airplanes_Side_2": "airplanes",
}
def _is_not_background_image(self, data: Tuple[str, Any]) -> bool:
path = pathlib.Path(data[0])
return path.parent.name != "BACKGROUND_Google"
def _is_ann(self, data: Tuple[str, Any]) -> bool:
path = pathlib.Path(data[0])
return bool(self._ANNS_NAME_PATTERN.match(path.name))
def _images_key_fn(self, data: Tuple[str, Any]) -> Tuple[str, str]:
path = pathlib.Path(data[0])
category = path.parent.name
id = self._IMAGES_NAME_PATTERN.match(path.name).group("id") # type: ignore[union-attr]
return category, id
def _anns_key_fn(self, data: Tuple[str, Any]) -> Tuple[str, str]:
path = pathlib.Path(data[0])
category = path.parent.name
if category in self._ANNS_CATEGORY_MAP:
category = self._ANNS_CATEGORY_MAP[category]
id = self._ANNS_NAME_PATTERN.match(path.name).group("id") # type: ignore[union-attr]
return category, id
def _collate_and_decode_sample(
self,
data: Tuple[Tuple[str, str], Tuple[Tuple[str, io.IOBase], Tuple[str, io.IOBase]]],
*,
decoder: Optional[Callable[[io.IOBase], torch.Tensor]],
) -> Dict[str, Any]:
key, (image_data, ann_data) = data
category, _ = key
image_path, image_buffer = image_data
ann_path, ann_buffer = ann_data
label = self.info.categories.index(category)
image = decoder(image_buffer) if decoder else image_buffer
ann = read_mat(ann_buffer)
bbox = BoundingBox(ann["box_coord"].astype(np.int64).squeeze()[[2, 0, 3, 1]], format="xyxy")
contour = torch.tensor(ann["obj_contour"].T)
return dict(
category=category,
label=label,
image=image,
image_path=image_path,
bbox=bbox,
contour=contour,
ann_path=ann_path,
)
def _make_datapipe(
self,
resource_dps: List[IterDataPipe],
*,
config: DatasetConfig,
decoder: Optional[Callable[[io.IOBase], torch.Tensor]],
) -> IterDataPipe[Dict[str, Any]]:
images_dp, anns_dp = resource_dps
images_dp = TarArchiveReader(images_dp)
images_dp = Filter(images_dp, self._is_not_background_image)
images_dp = Shuffler(images_dp, buffer_size=INFINITE_BUFFER_SIZE)
anns_dp = TarArchiveReader(anns_dp)
anns_dp = Filter(anns_dp, self._is_ann)
dp = IterKeyZipper(
images_dp,
anns_dp,
key_fn=self._images_key_fn,
ref_key_fn=self._anns_key_fn,
buffer_size=INFINITE_BUFFER_SIZE,
keep_key=True,
)
return Mapper(dp, self._collate_and_decode_sample, fn_kwargs=dict(decoder=decoder))
def _generate_categories(self, root: pathlib.Path) -> List[str]:
dp = self.resources(self.default_config)[0].to_datapipe(pathlib.Path(root) / self.name)
dp = TarArchiveReader(dp)
dp = Filter(dp, self._is_not_background_image)
return sorted({pathlib.Path(path).parent.name for path, _ in dp})
class Caltech256(Dataset):
def _make_info(self) -> DatasetInfo:
return DatasetInfo(
"caltech256",
type=DatasetType.IMAGE,
homepage="http://www.vision.caltech.edu/Image_Datasets/Caltech256",
)
def resources(self, config: DatasetConfig) -> List[OnlineResource]:
return [
HttpResource(
"http://www.vision.caltech.edu/Image_Datasets/Caltech256/256_ObjectCategories.tar",
sha256="08ff01b03c65566014ae88eb0490dbe4419fc7ac4de726ee1163e39fd809543e",
)
]
def _is_not_rogue_file(self, data: Tuple[str, Any]) -> bool:
path = pathlib.Path(data[0])
return path.name != "RENAME2"
def _collate_and_decode_sample(
self,
data: Tuple[str, io.IOBase],
*,
decoder: Optional[Callable[[io.IOBase], torch.Tensor]],
) -> Dict[str, Any]:
path, buffer = data
dir_name = pathlib.Path(path).parent.name
label_str, category = dir_name.split(".")
label = Label(int(label_str), category=category)
return dict(label=label, image=decoder(buffer) if decoder else buffer)
def _make_datapipe(
self,
resource_dps: List[IterDataPipe],
*,
config: DatasetConfig,
decoder: Optional[Callable[[io.IOBase], torch.Tensor]],
) -> IterDataPipe[Dict[str, Any]]:
dp = resource_dps[0]
dp = TarArchiveReader(dp)
dp = Filter(dp, self._is_not_rogue_file)
dp = Shuffler(dp, buffer_size=INFINITE_BUFFER_SIZE)
return Mapper(dp, self._collate_and_decode_sample, fn_kwargs=dict(decoder=decoder))
def _generate_categories(self, root: pathlib.Path) -> List[str]:
dp = self.resources(self.default_config)[0].to_datapipe(pathlib.Path(root) / self.name)
dp = TarArchiveReader(dp)
dir_names = {pathlib.Path(path).parent.name for path, _ in dp}
return [name.split(".")[1] for name in sorted(dir_names)]
| 34.51269
| 100
| 0.639947
|
8caa85f62e8cdc3d5f65a7734dad58265e46a2e1
| 2,200
|
py
|
Python
|
git/db.py
|
yarikoptic/GitPython
|
7576b282013249a2b20ccda4acacefd5e625ea39
|
[
"BSD-3-Clause"
] | 1
|
2020-10-15T06:16:48.000Z
|
2020-10-15T06:16:48.000Z
|
git/db.py
|
yarikoptic/GitPython
|
7576b282013249a2b20ccda4acacefd5e625ea39
|
[
"BSD-3-Clause"
] | null | null | null |
git/db.py
|
yarikoptic/GitPython
|
7576b282013249a2b20ccda4acacefd5e625ea39
|
[
"BSD-3-Clause"
] | null | null | null |
"""Module with our own gitdb implementation - it uses the git command"""
from exc import (
GitCommandError,
BadObject
)
from gitdb.base import (
OInfo,
OStream
)
from gitdb.util import (
bin_to_hex,
hex_to_bin
)
from gitdb.db import GitDB
from gitdb.db import LooseObjectDB
__all__ = ('GitCmdObjectDB', 'GitDB' )
#class GitCmdObjectDB(CompoundDB, ObjectDBW):
class GitCmdObjectDB(LooseObjectDB):
"""A database representing the default git object store, which includes loose
objects, pack files and an alternates file
It will create objects only in the loose object database.
:note: for now, we use the git command to do all the lookup, just until he
have packs and the other implementations
"""
def __init__(self, root_path, git):
"""Initialize this instance with the root and a git command"""
super(GitCmdObjectDB, self).__init__(root_path)
self._git = git
def info(self, sha):
hexsha, typename, size = self._git.get_object_header(bin_to_hex(sha))
return OInfo(hex_to_bin(hexsha), typename, size)
def stream(self, sha):
"""For now, all lookup is done by git itself"""
hexsha, typename, size, stream = self._git.stream_object_data(bin_to_hex(sha))
return OStream(hex_to_bin(hexsha), typename, size, stream)
# { Interface
def partial_to_complete_sha_hex(self, partial_hexsha):
""":return: Full binary 20 byte sha from the given partial hexsha
:raise AmbiguousObjectName:
:raise BadObject:
:note: currently we only raise BadObject as git does not communicate
AmbiguousObjects separately"""
try:
hexsha, typename, size = self._git.get_object_header(partial_hexsha)
return hex_to_bin(hexsha)
except (GitCommandError, ValueError):
raise BadObject(partial_hexsha)
# END handle exceptions
#} END interface
| 35.483871
| 86
| 0.606818
|
a25e9c1e0f3ef758f86d24de8f9a827dccf79515
| 5,472
|
py
|
Python
|
tests/apps/courses/test_templates_blogpost_detail.py
|
openfun/richie
|
185b2be74ce75de8de1da159773afc43e365df3e
|
[
"MIT"
] | 174
|
2018-04-14T23:36:01.000Z
|
2022-03-10T09:27:01.000Z
|
tests/apps/courses/test_templates_blogpost_detail.py
|
EDUlib/richie
|
3f9b14b929641f9392e54ba9badbb7a9a9fe7d44
|
[
"MIT"
] | 631
|
2018-04-04T11:28:53.000Z
|
2022-03-31T11:18:31.000Z
|
tests/apps/courses/test_templates_blogpost_detail.py
|
EDUlib/richie
|
3f9b14b929641f9392e54ba9badbb7a9a9fe7d44
|
[
"MIT"
] | 64
|
2018-06-27T08:35:01.000Z
|
2022-03-10T09:27:43.000Z
|
"""
End-to-end tests for the blogpost detail view
"""
import re
from datetime import datetime
from unittest import mock
from django.utils import timezone
from cms.test_utils.testcases import CMSTestCase
from richie.apps.core.factories import UserFactory
from richie.apps.courses.factories import (
BlogPostFactory,
CategoryFactory,
PersonFactory,
)
class DetailBlogPostCMSTestCase(CMSTestCase):
"""
End-to-end test suite to validate the content and Ux of the blogpost detail view
"""
def test_templates_blogpost_detail_cms_published_content(self):
"""
Validate that the important elements are displayed on a published blogpost page
"""
author = PersonFactory(
page_title={"en": "Comte de Saint-Germain"}, should_publish=True
)
blogpost = BlogPostFactory(
page_title="Preums", fill_cover=True, fill_body=True, fill_author=[author]
)
page = blogpost.extended_object
# The page should not be visible before it is published
url = page.get_absolute_url()
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
# Publish the blogpost with a past date and ensure the content is correct
with mock.patch(
"cms.models.pagemodel.now",
return_value=datetime(2019, 11, 27, tzinfo=timezone.utc),
):
page.publish("en")
response = self.client.get(url)
self.assertContains(
response, "<title>Preums</title>", html=True, status_code=200
)
self.assertContains(
response, '<h1 class="blogpost-detail__title">Preums</h1>', html=True
)
self.assertNotContains(response, "Comte de Saint-Germain", html=True)
self.assertContains(
response,
'<p class="blogpost-detail__pubdate">11/27/2019</p>',
html=True,
)
def test_templates_blogpost_detail_cms_draft_content(self):
"""
A staff user should see a draft blogpost including only its published linked objects.
"""
user = UserFactory(is_staff=True, is_superuser=True)
self.client.login(username=user.username, password="password")
category = CategoryFactory()
published_category = CategoryFactory(should_publish=True)
author = PersonFactory(
page_title={"en": "Comte de Saint-Germain"}, should_publish=True
)
blogpost = BlogPostFactory(
page_title="Preums",
fill_cover=True,
fill_body=True,
fill_categories=[category, published_category],
fill_author=[author],
)
page = blogpost.extended_object
# The page should be visible as draft to the staff user
url = page.get_absolute_url()
response = self.client.get(url)
self.assertContains(
response, "<title>Preums</title>", html=True, status_code=200
)
self.assertContains(
response, '<h1 class="blogpost-detail__title">Preums</h1>', html=True
)
self.assertContains(response, "Comte de Saint-Germain", html=True)
self.assertContains(
response,
(
# pylint: disable=consider-using-f-string
'<a class="category-tag" '
'href="{:s}"><span class="category-tag__title">{:s}</span></a>'
).format(
published_category.extended_object.get_absolute_url(),
published_category.extended_object.get_title(),
),
html=True,
)
self.assertContains(
response,
(
# pylint: disable=consider-using-f-string
'<a class="category-tag category-tag--draft" '
'href="{:s}"><span class="category-tag__title">{:s}</span></a>'
).format(
category.extended_object.get_absolute_url(),
category.extended_object.get_title(),
),
html=True,
)
self.assertContains(
response,
'<p class="blogpost-detail__pubdate">Not published yet</p>',
html=True,
)
def test_templates_blogpost_detail_cms_published_content_opengraph(self):
"""The blogpost logo should be used as opengraph image."""
blogpost = BlogPostFactory(
fill_cover={
"original_filename": "cover.jpg",
"default_alt_text": "my cover",
},
should_publish=True,
)
url = blogpost.extended_object.get_absolute_url()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<meta property="og:type" content="article" />')
self.assertContains(
response, f'<meta property="og:url" content="http://example.com{url:s}" />'
)
pattern = (
r'<meta property="og:image" content="http://example.com'
r"/media/filer_public_thumbnails/filer_public/.*cover\.jpg__845x500"
)
self.assertIsNotNone(re.search(pattern, str(response.content)))
self.assertContains(
response, '<meta property="og:image:width" content="845" />'
)
self.assertContains(
response, '<meta property="og:image:height" content="500" />'
)
| 35.076923
| 93
| 0.600694
|
85ff46e77ee7d9a888d38e84484db32234aac3bc
| 184
|
py
|
Python
|
quantecon/util/__init__.py
|
bbardoczy/QuantEcon.py
|
2c3b081b3373183be5b4c5393e176ed66feec1e3
|
[
"BSD-3-Clause"
] | 1
|
2022-03-09T14:43:35.000Z
|
2022-03-09T14:43:35.000Z
|
quantecon/util/__init__.py
|
bbardoczy/QuantEcon.py
|
2c3b081b3373183be5b4c5393e176ed66feec1e3
|
[
"BSD-3-Clause"
] | null | null | null |
quantecon/util/__init__.py
|
bbardoczy/QuantEcon.py
|
2c3b081b3373183be5b4c5393e176ed66feec1e3
|
[
"BSD-3-Clause"
] | null | null | null |
"""
API for QuantEcon Utilities
"""
from .array import searchsorted
from .external import jit, numba_installed
from .random import check_random_state
from .timing import tic, tac, toc
| 23
| 42
| 0.793478
|
2fa9d45a8b2e41194d22b72c8dac9d7bffc393a8
| 3,198
|
py
|
Python
|
homeassistant/components/no_ip/__init__.py
|
alemuro/home-assistant
|
9b1315d8e55f0ca906c4c8a1b2ae8c2ea511dc90
|
[
"Apache-2.0"
] | 2
|
2019-10-19T15:07:32.000Z
|
2022-01-29T10:33:20.000Z
|
homeassistant/components/no_ip/__init__.py
|
alemuro/home-assistant
|
9b1315d8e55f0ca906c4c8a1b2ae8c2ea511dc90
|
[
"Apache-2.0"
] | 4
|
2021-02-08T21:05:14.000Z
|
2021-09-08T02:57:03.000Z
|
homeassistant/components/no_ip/__init__.py
|
alemuro/home-assistant
|
9b1315d8e55f0ca906c4c8a1b2ae8c2ea511dc90
|
[
"Apache-2.0"
] | 2
|
2019-01-21T05:49:23.000Z
|
2019-02-19T16:30:48.000Z
|
"""Integrate with NO-IP Dynamic DNS service."""
import asyncio
import base64
from datetime import timedelta
import logging
import aiohttp
from aiohttp.hdrs import USER_AGENT, AUTHORIZATION
import async_timeout
import voluptuous as vol
from homeassistant.const import CONF_DOMAIN, CONF_TIMEOUT, CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.aiohttp_client import SERVER_SOFTWARE
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = "no_ip"
# We should set a dedicated address for the user agent.
EMAIL = "hello@home-assistant.io"
INTERVAL = timedelta(minutes=5)
DEFAULT_TIMEOUT = 10
NO_IP_ERRORS = {
"nohost": "Hostname supplied does not exist under specified account",
"badauth": "Invalid username password combination",
"badagent": "Client disabled",
"!donator": "An update request was sent with a feature that is not available",
"abuse": "Username is blocked due to abuse",
"911": "A fatal error on NO-IP's side such as a database outage",
}
UPDATE_URL = "https://dynupdate.noip.com/nic/update"
HA_USER_AGENT = "{} {}".format(SERVER_SOFTWARE, EMAIL)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_DOMAIN): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Initialize the NO-IP component."""
domain = config[DOMAIN].get(CONF_DOMAIN)
user = config[DOMAIN].get(CONF_USERNAME)
password = config[DOMAIN].get(CONF_PASSWORD)
timeout = config[DOMAIN].get(CONF_TIMEOUT)
auth_str = base64.b64encode("{}:{}".format(user, password).encode("utf-8"))
session = hass.helpers.aiohttp_client.async_get_clientsession()
result = await _update_no_ip(hass, session, domain, auth_str, timeout)
if not result:
return False
async def update_domain_interval(now):
"""Update the NO-IP entry."""
await _update_no_ip(hass, session, domain, auth_str, timeout)
hass.helpers.event.async_track_time_interval(update_domain_interval, INTERVAL)
return True
async def _update_no_ip(hass, session, domain, auth_str, timeout):
"""Update NO-IP."""
url = UPDATE_URL
params = {"hostname": domain}
headers = {
AUTHORIZATION: "Basic {}".format(auth_str.decode("utf-8")),
USER_AGENT: HA_USER_AGENT,
}
try:
with async_timeout.timeout(timeout):
resp = await session.get(url, params=params, headers=headers)
body = await resp.text()
if body.startswith("good") or body.startswith("nochg"):
return True
_LOGGER.warning(
"Updating NO-IP failed: %s => %s", domain, NO_IP_ERRORS[body.strip()]
)
except aiohttp.ClientError:
_LOGGER.warning("Can't connect to NO-IP API")
except asyncio.TimeoutError:
_LOGGER.warning("Timeout from NO-IP API for domain: %s", domain)
return False
| 29.33945
| 87
| 0.676048
|
c77eecb59b15b1d2526750aff5e40a8f058a53a3
| 1,025
|
py
|
Python
|
tango_with_django_project/tango_with_django_project/urls.py
|
shrishabh/rangoProject
|
8ed4d46ecb546f1e3d6418a5d5a7b65421cfff6a
|
[
"MIT"
] | null | null | null |
tango_with_django_project/tango_with_django_project/urls.py
|
shrishabh/rangoProject
|
8ed4d46ecb546f1e3d6418a5d5a7b65421cfff6a
|
[
"MIT"
] | null | null | null |
tango_with_django_project/tango_with_django_project/urls.py
|
shrishabh/rangoProject
|
8ed4d46ecb546f1e3d6418a5d5a7b65421cfff6a
|
[
"MIT"
] | null | null | null |
"""tango_with_django_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.conf.urls import include
from django.conf import settings
from django.conf.urls.static import static
from rango import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^rango/',include('rango.urls')),
] + static(settings.MEDIA_URL,document_root=settings.MEDIA_ROOT)
| 36.607143
| 79
| 0.730732
|
ee335c8acb66a1269e23ef6d44e0f2708d5b2cbe
| 11,394
|
py
|
Python
|
hrl_dynamic_mpc/src/var_comp_optimize.py
|
gt-ros-pkg/hrl-haptic-manip
|
6458187075033ecd3a22fbcdc1a632df39b0cba1
|
[
"Apache-2.0"
] | 1
|
2017-07-13T14:58:35.000Z
|
2017-07-13T14:58:35.000Z
|
hrl_dynamic_mpc/src/var_comp_optimize.py
|
gt-ros-pkg/hrl-haptic-manip
|
6458187075033ecd3a22fbcdc1a632df39b0cba1
|
[
"Apache-2.0"
] | null | null | null |
hrl_dynamic_mpc/src/var_comp_optimize.py
|
gt-ros-pkg/hrl-haptic-manip
|
6458187075033ecd3a22fbcdc1a632df39b0cba1
|
[
"Apache-2.0"
] | 2
|
2017-03-08T14:44:22.000Z
|
2019-07-15T23:46:35.000Z
|
#
# Copyright (c) 2013, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \authors: Marc Killpack (Healthcare Robotics Lab, Georgia Tech.)
# \adviser: Charles Kemp (Healthcare Robotics Lab, Georgia Tech.)
# import epc_skin as es
# import scipy.optimize as so
# import threading
# import roslib; roslib.load_manifest('darpa_m3')
# import rospy
# import software_simulation.ode_sim_arms as osa
# from darpa_m3.msg import SkinContact
# import copy
# import numpy as np
# import itertools as it
# #######################################
# # this is used to do variable impedance
# from darpa_m3.msg import MechanicalImpedanceParams
# class ode_SkinClient(es.SkinClient):
# def __init__(self, skin_topic):
# es.SkinClient.__init__(self, skin_topic)
# def force_normal_loc_joint_list(self, normal_component_only,
# return_time=False):
# self.lock.acquire()
# f_l = copy.copy(self.force_list)
# n_l = copy.copy(self.normal_list)
# nm_l = copy.copy(self.link_names)
# loc_l = copy.copy(self.loc_list)
# stamp = copy.copy(self.stamp)
# self.lock.release()
# jt_l = []
# for i in range(len(f_l)):
# f = f_l[i]
# n = n_l[i]
# if normal_component_only:
# f_l[i] = n * np.linalg.norm(f)
# nm = nm_l[i]
# if 'link1' in nm:
# jt_num = 0
# elif 'link2' in nm:
# jt_num = 1
# elif 'link3' in nm:
# jt_num = 2
# jt_l.append(jt_num)
# if return_time:
# return f_l, n_l, loc_l, jt_l, stamp
# else:
# return f_l, n_l, loc_l, jt_l
# class VarCompOpt():
# def __init__(self, robot, skin_topic):
# rospy.init_node('variable_compliance_optimizer')
# self.robot = robot
# self.scl = ode_SkinClient(skin_topic)
# self.var_imped_pub = rospy.Publisher('/sim_arm/command/joint_impedance', MechanicalImpedanceParams)
# self.var_imped_sub = rospy.Subscriber('/sim_arm/joint_impedance', MechanicalImpedanceParams, self.get_impedance)
# self.impedance = MechanicalImpedanceParams()
# self.k_p = None
# self.k_d = None
# self.k_p_buf = None
# self.k_d_buf = None
# self.lock = threading.RLock()
# while self.k_p_buf == None:
# rospy.sleep(0.01)
# self.k_p_nom = list(self.k_p_buf)
# self.k_d_nom = list(self.k_d_buf)
# def optimize(self, Jc_l, f_l, k_bounds, f_thresh):
# k_p,_,_ = so.fmin_tnc(self.opt_func, self.k_p_nom, args=(Jc_l, f_l, k_bounds, f_thresh), approx_grad=True, bounds=k_bounds, messages = 0)
# return k_p.tolist(), self.k_d_nom
# def opt_func(self, k_p, *args):
# Jc_l = args[0]
# f_l = args[1]
# k_bounds = args[2]
# f_thresh = args[3]
# cost = 0.0
# K = np.matrix(np.diag(k_p))
# for i in xrange(len(Jc_l)):
# J = np.matrix(Jc_l[i])
# f = f_l[i]
# f_mag = np.linalg.norm(f)
# cost = cost - f.T*J*np.linalg.inv(K)*J.T*f
# # if f_mag > f_thresh:
# # cost = cost - f.T*J*np.linalg.inv(K)*J.T*f
# # cost = cost + 0.0001*((K.diagonal()-np.matrix(self.k_p_nom))*(K.diagonal()-np.matrix(self.k_p_nom)).T)[0,0] #this is because the answer is a matrix still
# print "cost is : ", cost
# return cost
# def get_impedance(self, msg):
# self.lock.acquire()
# self.k_p_buf = list(msg.k_p.data)
# self.k_d_buf = list(msg.k_d.data)
# self.lock.release()
# def run_opt(self):
# #######################need a common place for controller dictionary####################
# #######################right now this is defined within Charlie's QP controller.########
# #######################This is referrring to "normal_component_only"####################
# k_bounds = [(1, 60), (1, 40), (1, 30)]
# f_thresh = 5.0
# while not rospy.is_shutdown():
# f_l, n_l, loc_l, jt_l, time_stamp = self.scl.force_normal_loc_joint_list(
# normal_component_only = True,
# return_time = True)
# n = len(n_l)
# # f_mag_list is a list of the magnitudes of the contact
# # forces
# f_mag_list = [np.linalg.norm(f_vec) for f_vec in f_l]
# # stop if a contact force is too high
# q = self.robot.get_joint_angles()
# ####################this needs to defined elsewhere as the number of links##############
# control_point_joint_num = 3
# ########################################################################################
# x_h = self.robot.kinematics.FK(q, control_point_joint_num)[0]
# J_all = self.robot.kinematics.jacobian(q, x_h)
# active_jt_idxs = [0, 1, 2] #not sure this right [jt_index_dict[jt] for jt in active_joints]
# # J_h = Jacobian for the hand (end effector)
# J_h = J_all[0:3, active_jt_idxs]
# J_h[:, control_point_joint_num:] = 0.
# jep = np.array(self.robot.get_ep())
# # phi_curr = phi[t] = current equilibrium angles for
# # the virtual springs on the robot's arm
# #phi_curr = np.matrix(jep[active_jt_idxs]).T
# # compute contact Jacobians
# Jc_l = []
# for jt_li, loc_li in it.izip(jt_l, loc_l):
# Jc = self.robot.kinematics.jacobian(q, loc_li)
# Jc[:, jt_li+1:] = 0.0
# Jc = Jc[0:3, active_jt_idxs]
# Jc_l.append(Jc)
# if f_mag_list == []:
# print "not changing the stiffnesses, could do something intelligent later"
# k_p = self.k_p_nom
# self.impedance.k_p.data = k_p
# diff=1
# while (diff > 0.001):
# self.var_imped_pub.publish(self.impedance)
# diff = np.linalg.norm(np.array(k_p) - np.array(self.k_p_buf))
# rospy.sleep(0.001)
# elif max(f_mag_list) > 100.:
# k_p = [1, 1, 1]
# self.impedance.k_p.data = k_p
# diff = 1
# while (diff > 0.001):
# self.var_imped_pub.publish(self.impedance)
# diff = np.linalg.norm(np.array(k_p) - np.array(self.k_p_buf))
# rospy.sleep(0.001)
# self.lock.acquire()
# #self.k_p = list(self.k_p_buf)
# #self.k_d = list(self.k_d_buf)
# self.lock.release()
# else:
# ########DO OPTIMIZATION HERE######################
# k_p, k_d = self.optimize(Jc_l, f_l, k_bounds, f_thresh)
# self.impedance.k_p.data = k_p
# self.impedance.k_d.data = k_d
# print "here is the k_p and k_d values:"
# print k_p
# print k_d
# diff = 1
# while (diff > 0.001):
# self.var_imped_pub.publish(self.impedance)
# diff = np.linalg.norm(np.array(k_p) - np.array(self.k_p_buf))
# rospy.sleep(0.001)
# self.lock.acquire()
# self.k_p = list(self.k_p_buf)
# self.k_d = list(self.k_d_buf)
# self.lock.release()
# def pub_once(self, k_p, k_d):
# self.impedance.k_p.data = k_p
# self.impedance.k_d.data = k_d
# diff = 1
# while (diff > 0.001):
# self.var_imped_pub.publish(self.impedance)
# diff = np.linalg.norm(np.array(k_p) - np.array(self.k_p_buf))
# rospy.sleep(0.001)
if __name__ == '__main__':
print 'this file is broken with respect to new controller structure, however it could serve as reference for new start on variable impedance control'
# import optparse
# p = optparse.OptionParser()
# p.add_option('--optimize', action='store_true', dest='optimize',
# default=False, help='run continuous optimization to vary compliance')
# p.add_option('--pub_once', action='store', dest='pub_once',
# default=None, help='publish impedance until it takes, make sure you included [[kp1, kp2, kp3 ...],[kd1, kd2, kd3, ...]] as an argument')
# p.add_option('--pub_once_alpha', action='store', dest='pub_once_alpha',
# default=None, help='change compliance parameters by a single gain, make sure you included alpha argument')
# opt, args = p.parse_args()
# skin_topic = '/skin/contacts'
# robot = osa.ODESimArm()
# variable_compliance = VarCompOpt(robot, skin_topic)
# if opt.optimize == True:
# variable_compliance.run_opt()
# elif opt.pub_once != None:
# gains = eval(opt.pub_once)
# variable_compliance.pub_once(gains[0], gains[1])
# elif opt.pub_once_alpha != None:
# alpha = eval(opt.pub_once_alpha)/100.0
# new_kp = (alpha*np.array(variable_compliance.k_p_nom)).tolist()
# #THIS IS A FIXED NOMINAL DAMPING WHICH DOESN'T MAKE MUCH SENSE
# #new_kd = variable_compliance.k_d_nom
# #this damping is varying to keep the damping ratio for each joint constant
# # k_d_nom * sqrt(k_new/k_nom) for each joint individually
# new_kd = (np.array(variable_compliance.k_d_nom)*np.sqrt(np.array(new_kp)/np.array(variable_compliance.k_p_nom))).tolist()
# variable_compliance.pub_once(new_kp, new_kd)
# else:
# print "you didn't specify an option, try the --help command"
| 43.159091
| 165
| 0.572494
|
10187b69a8a2227b118f111bf71bd21b001b5850
| 366
|
py
|
Python
|
python/anyascii/_data/_302.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
python/anyascii/_data/_302.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
python/anyascii/_data/_302.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
b=' Lan Chang Dian Chen Lan Me Kwaak Za Mo Yan Za Wei Gang Xian Yi Jia Que Ye Jian Xi Zhi Zhan Qiang Xian Kui Man Yan Qian Lou Dang Zhuan Yan Bi'
| 366
| 366
| 0.297814
|
41368932968194fefeac4e68932a20d663219207
| 311
|
py
|
Python
|
app.py
|
geekcell/geekle-api
|
0264a67f9652de408af2c91acaada6980d86ab57
|
[
"MIT"
] | null | null | null |
app.py
|
geekcell/geekle-api
|
0264a67f9652de408af2c91acaada6980d86ab57
|
[
"MIT"
] | null | null | null |
app.py
|
geekcell/geekle-api
|
0264a67f9652de408af2c91acaada6980d86ab57
|
[
"MIT"
] | null | null | null |
import os
from flask import Flask
from geekle_api import api_blueprint, current_game
app = Flask(__name__)
app.register_blueprint(api_blueprint)
app.logger.info("Word of the day: " + current_game.get_word_of_the_day())
if __name__ == "__main__":
debug = os.environ.get("DEBUG", False)
app.run(debug)
| 23.923077
| 73
| 0.755627
|
1cb47fc283c10418d648fd970721350ac09a2227
| 3,070
|
py
|
Python
|
L1TriggerConfig/L1GtConfigProducers/test/L1GtTriggerMenuOnlineTest_cfg.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 6
|
2017-09-08T14:12:56.000Z
|
2022-03-09T23:57:01.000Z
|
L1TriggerConfig/L1GtConfigProducers/test/L1GtTriggerMenuOnlineTest_cfg.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 545
|
2017-09-19T17:10:19.000Z
|
2022-03-07T16:55:27.000Z
|
L1TriggerConfig/L1GtConfigProducers/test/L1GtTriggerMenuOnlineTest_cfg.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 14
|
2017-10-04T09:47:21.000Z
|
2019-10-23T18:04:45.000Z
|
from __future__ import print_function
# cfg file to test the online producer of L1GtTriggerMenuRcd
import FWCore.ParameterSet.Config as cms
process = cms.Process("L1ConfigWritePayloadDummy")
###################### user choices ######################
useKey = 'ObjectKey'
#useKey = 'SubsystemKey'
#useKey = 'TscKey'
###################### end user choices ###################
# number of events and source
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.source = cms.Source("EmptyIOVSource",
timetype = cms.string('runnumber'),
firstValue = cms.uint64(1),
lastValue = cms.uint64(1),
interval = cms.uint64(1)
)
# Generate dummy L1TriggerKeyList
process.load("CondTools.L1Trigger.L1TriggerKeyListDummy_cff")
# Get configuration data from OMDS. This is the subclass of L1ConfigOnlineProdBase.
process.load("L1TriggerConfig.L1GtConfigProducers.l1GtTriggerMenuOnline_cfi")
if (useKey == 'ObjectKey') :
process.load("CondTools.L1Trigger.L1TriggerKeyDummy_cff")
process.L1TriggerKeyDummy.objectKeys = cms.VPSet(cms.PSet(
record = cms.string('L1GtTriggerMenuRcd'),
type = cms.string('L1GtTriggerMenu'),
key = cms.string('L1Menu_Commissioning2009_v7/L1T_Scales_20080926_startup/Imp0/0x100f')
))
elif (useKey == 'SubsystemKey') :
process.load("CondTools.L1Trigger.L1TriggerKeyDummy_cff")
process.L1TriggerKeyDummy.objectKeys = cms.VPSet()
process.L1TriggerKeyDummy.label = cms.string('SubsystemKeysOnly')
# xxxKey = csctfKey, dttfKey, rpcKey, gmtKey, rctKey, gctKey, gtKey, or tsp0Key
process.L1TriggerKeyDummy.gtKey = cms.string('gt_2009_test_1')
# Subclass of L1ObjectKeysOnlineProdBase.
process.load("L1TriggerConfig.L1GtConfigProducers.l1GtTscObjectKeysOnline_cfi")
process.l1GtTscObjectKeysOnline.systemLabel = cms.string('')
elif (useKey == 'TscKey') :
# TSC key
process.load("CondTools.L1Trigger.L1SubsystemKeysOnline_cfi")
#process.L1SubsystemKeysOnline.tscKey = cms.string( 'TSC_000618_090304_MIDWEEK2008_GTgt20090bst30_GMTstartup3_DTTF_DT_MI' )
process.L1SubsystemKeysOnline.tscKey = \
cms.string( 'TSC_000990_090723_CRAFT_GTgt200911_GMTsynctf02ro3rpc2_GCT_RCT_DTTF_CSCTF_HCAL_DT_RPC_MI')
# Subclass of L1ObjectKeysOnlineProdBase.
process.load("L1TriggerConfig.L1GtConfigProducers.l1GtTscObjectKeysOnline_cfi")
process.l1GtTscObjectKeysOnline.systemLabel = cms.string('')
else :
print('Error: no such key type ', useKey)
sys.exit()
process.getter = cms.EDAnalyzer("EventSetupRecordDataGetter",
toGet = cms.VPSet(cms.PSet(
record = cms.string('L1GtTriggerMenuRcd'),
data = cms.vstring('L1GtTriggerMenu')
)),
verbose = cms.untracked.bool(True)
)
process.p = cms.Path(process.getter)
# Message Logger
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.MessageLogger.cout.placeholder = cms.untracked.bool(False)
process.MessageLogger.cout.threshold = cms.untracked.string('DEBUG')
process.MessageLogger.debugModules = cms.untracked.vstring('*')
| 35.287356
| 127
| 0.738436
|
ca1acf4b504aafa96a913959e256e428595fc9a4
| 28,412
|
py
|
Python
|
test/functional/rpc_fundrawtransaction.py
|
SegHaxx/jokecoin
|
5cbd75ff506f2197ad5ea6f9e04d31216ab98e80
|
[
"MIT"
] | 8
|
2021-02-21T22:47:09.000Z
|
2021-07-27T03:38:59.000Z
|
test/functional/rpc_fundrawtransaction.py
|
SegHaxx/jokecoin
|
5cbd75ff506f2197ad5ea6f9e04d31216ab98e80
|
[
"MIT"
] | 1
|
2021-02-24T03:58:10.000Z
|
2021-02-24T06:05:21.000Z
|
test/functional/rpc_fundrawtransaction.py
|
clowncrew/jokecoin
|
1547238819f85a76623fc1c356bc32a62a785075
|
[
"MIT"
] | 6
|
2021-02-24T02:13:19.000Z
|
2021-05-31T07:53:50.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
def get_unspent(listunspent, amount):
for utx in listunspent:
if utx['amount'] == amount:
return utx
raise AssertionError('Could not find unspent with amount={}'.format(amount))
class RawTransactionsTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 4
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
def run_test(self):
print("Mining blocks...")
min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
# if the fee's positive delta is higher than this value tests will fail,
# neg. delta always fail the tests.
# The size of the signature of every input may be at most 2 bytes larger
# than a minimum sized signature.
# = 2 bytes * minRelayTxFeePerByte
feeTolerance = 2 * min_relay_tx_fee/1000
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(121)
self.sync_all()
watchonly_address = self.nodes[0].getnewaddress()
watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
watchonly_amount = Decimal(200)
self.nodes[3].importpubkey(watchonly_pubkey, "", True)
watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0)
self.nodes[0].generate(1)
self.sync_all()
###############
# simple test #
###############
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enought inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.2 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
################################
# simple test with two outputs #
################################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
#########################################################################
# test a fundrawtransaction with a VIN greater than the required amount #
#########################################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#####################################################################
# test a fundrawtransaction with which will not get a change output #
#####################################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : Decimal(5.0) - fee - feeTolerance }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(rawtxfund['changepos'], -1)
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
####################################################
# test a fundrawtransaction with an invalid option #
####################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
try:
self.nodes[2].fundrawtransaction(rawtx, {'foo': 'bar'})
raise AssertionError("Accepted invalid option foo")
except JSONRPCException as e:
assert("Unexpected key foo" in e.error['message'])
############################################################
# test a fundrawtransaction with an invalid change address #
############################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
try:
self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': 'foobar'})
raise AssertionError("Accepted invalid jokecoin address")
except JSONRPCException as e:
assert("changeAddress must be a valid jokecoin address" in e.error['message'])
############################################################
# test a fundrawtransaction with a provided change address #
############################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
change = self.nodes[2].getnewaddress()
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 2})
except JSONRPCException as e:
assert('changePosition out of bounds' == e.error['message'])
else:
assert(False)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 0})
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
out = dec_tx['vout'][0];
assert_equal(change, out['scriptPubKey']['addresses'][0])
#########################################################################
# test a fundrawtransaction with a VIN smaller than the required amount #
#########################################################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
# 4-byte version + 1-byte vin count + 36-byte prevout then script_len
rawtx = rawtx[:82] + "0100" + rawtx[84:]
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for i, out in enumerate(dec_tx['vout']):
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
else:
assert_equal(i, rawtxfund['changepos'])
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
###########################################
# test a fundrawtransaction with two VINs #
###########################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
utx2 = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
matchingIns = 0
for vinOut in dec_tx['vin']:
for vinIn in inputs:
if vinIn['txid'] == vinOut['txid']:
matchingIns+=1
assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
#########################################################
# test a fundrawtransaction with two VINs and two vOUTs #
#########################################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
utx2 = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 2)
assert_equal(len(dec_tx['vout']), 3)
##############################################
# test a fundrawtransaction with invalid vin #
##############################################
listunspent = self.nodes[2].listunspent()
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 1.0}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
raise AssertionError("Spent more than available")
except JSONRPCException as e:
assert("Insufficient" in e.error['message'])
############################################################
#compare fee of a standard pubkeyhash transaction
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction with multiple outputs
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1,self.nodes[1].getnewaddress():1.2,self.nodes[1].getnewaddress():0.1,self.nodes[1].getnewaddress():1.3,self.nodes[1].getnewaddress():0.2,self.nodes[1].getnewaddress():0.3}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendmany("", outputs)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a 2of2 multisig p2sh transaction
# create 2of2 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction
# create 4of5 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr3 = self.nodes[1].getnewaddress()
addr4 = self.nodes[1].getnewaddress()
addr5 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
addr3Obj = self.nodes[1].validateaddress(addr3)
addr4Obj = self.nodes[1].validateaddress(addr4)
addr5Obj = self.nodes[1].validateaddress(addr5)
mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
# spend a 2of2 multisig transaction over fundraw
# create 2of2 addr
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
# send 1.2 BTC to msig addr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
oldBalance = self.nodes[1].getbalance()
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawTx)
signedTx = self.nodes[2].signrawtransaction(fundedTx['hex'])
txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('1.10000000'), self.nodes[1].getbalance())
############################################################
# locked wallet test
self.nodes[1].encryptwallet("test")
self.nodes.pop(1)
stop_nodes(self.nodes)
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
# drain the keypool
self.nodes[1].getnewaddress()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
# fund a transaction that requires a new key for the change output
# creating the key must be impossible because the wallet is locked
try:
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('Keypool ran out' in e.error['message'])
#refill the keypool
self.nodes[1].walletpassphrase("test", 100)
self.nodes[1].walletlock()
try:
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.2)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('walletpassphrase' in e.error['message'])
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#now we need to unlock
self.nodes[1].walletpassphrase("test", 100)
signedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance())
###############################################
# multiple (~19) inputs tx test | Compare fee #
###############################################
#empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[1].sendmany("", outputs)
signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs
#############################################
# multiple (~19) inputs tx test | sign/send #
#############################################
#again, empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(oldBalance+Decimal('50.19000000'), self.nodes[0].getbalance()) #0.19+block reward
#####################################################
# test fundrawtransaction with OP_RETURN and no vin #
#####################################################
rawtx = "0100000000010000000000000000066a047465737400000000"
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(len(dec_tx['vin']), 0)
assert_equal(len(dec_tx['vout']), 1)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_greater_than(len(dec_tx['vin']), 0) # at least one vin
assert_equal(len(dec_tx['vout']), 2) # one change output added
##################################################
# test a fundrawtransaction using only watchonly #
##################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, {'includeWatching': True })
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 1)
assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
assert("fee" in result.keys())
assert_greater_than(result["changepos"], -1)
###############################################################
# test fundrawtransaction using the entirety of watched funds #
###############################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
# Backward compatibility test (2nd param is includeWatching)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 2)
assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid)
assert_greater_than(result["fee"], 0)
assert_greater_than(result["changepos"], -1)
assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
signedtx = self.nodes[3].signrawtransaction(result["hex"])
assert(not signedtx["complete"])
signedtx = self.nodes[0].signrawtransaction(signedtx["hex"])
assert(signedtx["complete"])
self.nodes[0].sendrawtransaction(signedtx["hex"])
self.nodes[0].generate(1)
self.sync_all()
#######################
# Test feeRate option #
#######################
# Make sure there is exactly one input so coin selection can't skew the result
assert_equal(len(self.nodes[3].listunspent(1)), 1)
inputs = []
outputs = {self.nodes[2].getnewaddress() : 1}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx) # uses min_relay_tx_fee (set by settxfee)
result2 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee})
result3 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 10*min_relay_tx_fee})
result_fee_rate = result['fee'] * 1000 / count_bytes(result['hex'])
assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate)
assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate)
if __name__ == '__main__':
RawTransactionsTest().main()
| 42.724812
| 223
| 0.562438
|
ee198718985b866b0210330bc030070a40d47e22
| 1,161
|
py
|
Python
|
statping/statping.py
|
danielpalstra/pystatping
|
eb6325229d45cd452528007b440ca545bacc3e04
|
[
"Apache-2.0"
] | null | null | null |
statping/statping.py
|
danielpalstra/pystatping
|
eb6325229d45cd452528007b440ca545bacc3e04
|
[
"Apache-2.0"
] | null | null | null |
statping/statping.py
|
danielpalstra/pystatping
|
eb6325229d45cd452528007b440ca545bacc3e04
|
[
"Apache-2.0"
] | null | null | null |
import requests
# from . import exceptions
import statping.connection as connection
import statping.miscellaneous as miscellaneous
import statping.services as services
import statping.incidents as incidents
import statping.groups as groups
import statping.users as users
import statping.notifiers as notifiers
import statping.messages as messages
import statping.checkins as checkins
import statping.theme as theme
import statping.oauth as oauth
class Statping(object):
def __init__(self, url, **kwargs):
self.connection = connection.StatpingConnection(url, **kwargs)
self.miscellaneous = miscellaneous.Miscellaneous(self.connection)
self.services = services.Services(self.connection)
self.incidents = incidents.Incidents(self.connection)
self.groups = groups.Groups(self.connection)
self.users = users.Users(self.connection)
self.notifiers = notifiers.Notifiers(self.connection)
self.messages = messages.Messages(self.connection)
self.checkins = checkins.Checkins(self.connection)
self.theme = theme.Theme(self.connection)
self.oauth = oauth.Oauth(self.connection)
| 38.7
| 73
| 0.761413
|
901e19d6478b4a59455b6153c9b92e362c98fc44
| 3,146
|
py
|
Python
|
sdk/servicebus/azure-servicebus/samples/async_samples/mgmt_topic_async.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 2,728
|
2015-01-09T10:19:32.000Z
|
2022-03-31T14:50:33.000Z
|
sdk/servicebus/azure-servicebus/samples/async_samples/mgmt_topic_async.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 17,773
|
2015-01-05T15:57:17.000Z
|
2022-03-31T23:50:25.000Z
|
sdk/servicebus/azure-servicebus/samples/async_samples/mgmt_topic_async.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1,916
|
2015-01-19T05:05:41.000Z
|
2022-03-31T19:36:44.000Z
|
#!/usr/bin/env python
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
"""
Example to show managing topic entities under a ServiceBus Namespace, including
- Create a topic
- Get topic properties and runtime information
- Update a topic
- Delete a topic
- List topics under the given ServiceBus Namespace
"""
# pylint: disable=C0111
import os
import asyncio
import uuid
import datetime
from azure.servicebus.aio.management import ServiceBusAdministrationClient
CONNECTION_STR = os.environ['SERVICE_BUS_CONNECTION_STR']
TOPIC_NAME = "sb_mgmt_topic" + str(uuid.uuid4())
async def create_topic(servicebus_mgmt_client):
print("-- Create Topic")
await servicebus_mgmt_client.create_topic(TOPIC_NAME)
print("Topic {} is created.".format(TOPIC_NAME))
print("")
async def delete_topic(servicebus_mgmt_client):
print("-- Delete Topic")
await servicebus_mgmt_client.delete_topic(TOPIC_NAME)
print("Topic {} is deleted.".format(TOPIC_NAME))
print("")
async def list_topics(servicebus_mgmt_client):
print("-- List Topics")
async for topic_properties in servicebus_mgmt_client.list_topics():
print("Topic Name:", topic_properties.name)
print("")
async def get_and_update_topic(servicebus_mgmt_client):
print("-- Get and Update Topic")
topic_properties = await servicebus_mgmt_client.get_topic(TOPIC_NAME)
print("Topic Name:", topic_properties.name)
print("Please refer to TopicDescription for complete available settings.")
print("")
# update by updating the properties in the model
topic_properties.default_message_time_to_live = datetime.timedelta(minutes=10)
await servicebus_mgmt_client.update_topic(topic_properties)
# update by passing keyword arguments
topic_properties = await servicebus_mgmt_client.get_topic(TOPIC_NAME)
await servicebus_mgmt_client.update_topic(
topic_properties,
default_message_time_to_live=datetime.timedelta(minutes=15)
)
async def get_topic_runtime_properties(servicebus_mgmt_client):
print("-- Get Topic Runtime Properties")
get_topic_runtime_properties = await servicebus_mgmt_client.get_topic_runtime_properties(TOPIC_NAME)
print("Topic Name:", get_topic_runtime_properties.name)
print("Please refer to TopicRuntimeProperties from complete available runtime properties.")
print("")
async def main():
async with ServiceBusAdministrationClient.from_connection_string(CONNECTION_STR) as servicebus_mgmt_client:
await create_topic(servicebus_mgmt_client)
await list_topics(servicebus_mgmt_client)
await get_and_update_topic(servicebus_mgmt_client)
await get_topic_runtime_properties(servicebus_mgmt_client)
await delete_topic(servicebus_mgmt_client)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
| 36.581395
| 111
| 0.723776
|
bc0622cbda68adc09182aac60b6b195b42802ce7
| 5,285
|
py
|
Python
|
run.py
|
G4Y8u9/Auto-Lianliankan
|
72cd5d7c28cbd344ea15cff44bfb7feacd18398e
|
[
"Apache-2.0"
] | 12
|
2018-06-23T00:42:08.000Z
|
2020-03-05T08:12:56.000Z
|
run.py
|
G4Y8u9/Auto-Lianliankan
|
72cd5d7c28cbd344ea15cff44bfb7feacd18398e
|
[
"Apache-2.0"
] | null | null | null |
run.py
|
G4Y8u9/Auto-Lianliankan
|
72cd5d7c28cbd344ea15cff44bfb7feacd18398e
|
[
"Apache-2.0"
] | 4
|
2018-06-08T16:33:34.000Z
|
2019-04-09T10:04:12.000Z
|
import metching
import cv2
import numpy as np
import win32api
import win32gui
import win32con
from PIL import ImageGrab
import time
from config import *
import numpy
def getGameWindowPosition():
window = win32gui.FindWindow(None, WINDOW_TITLE)
while not window:
print('unable to find window, try in 3 secs...')
time.sleep(3)
window = win32gui.FindWindow(None, WINDOW_TITLE)
win32gui.SetForegroundWindow(window)
pos = win32gui.GetWindowRect(window)
print("Window found at:" + str(pos))
return pos[0], pos[1]
def getScreenImage():
print('capturing screenshot...')
scim = ImageGrab.grab()
scim.save('screen.png')
return cv2.imread("screen.png")
def getAllSquare(screen_image, game_pos):
print('cutting pics...')
game_x = game_pos[0] + MARGIN_LEFT
game_y = game_pos[1] + MARGIN_HEIGHT
all_square = []
for x in range(0, H_NUM):
for y in range(0, V_NUM):
square = screen_image[game_y + y*SQUARE_HEIGHT:
game_y + (y+1)*SQUARE_HEIGHT,
game_x + x*SQUARE_WIDTH:
game_x + (x+1)*SQUARE_WIDTH]
all_square.append(square)
return list(map(lambda square:
square[SUB_LT_Y:SUB_RB_Y, SUB_LT_X:SUB_RB_X], all_square))
def isImageExist(img, img_list):
for existed_img in img_list:
b = np.subtract(existed_img, img)
if not np.any(b):
return True
else:
continue
return False
def getAllSquareTypes(all_square):
print("sorting pics...")
types = []
empty_img = cv2.imread('empty.png')
types.append(empty_img)
for square in all_square:
if not isImageExist(square, types):
types.append(square)
return types
all_avail_count = 0
def getAllSquareRecord(all_square_list, types):
print("turning pics to matrix...")
record = []
line = []
for square in all_square_list:
num = 0
for type in types:
res = cv2.subtract(square, type)
if not np.any(res):
line.append(num)
break
num += 1
if len(line) == V_NUM:
record.append(line)
line = []
print(record)
record_numpy = numpy.array(record, dtype=numpy.int16)
global all_avail_count
for each in record_numpy.reshape(-1):
if each != 0:
all_avail_count += 1
all_avail_count = all_avail_count / 2
print('%d pairs found' % all_avail_count)
return record
def autoRelease(result, game_x, game_y, num, all_):
for i in range(0, len(result)):
for j in range(0, len(result[0])):
if result[i][j] != 0:
for m in range(0, len(result)):
for n in range(0, len(result[0])):
if result[m][n] != 0:
if metching.canConnect(i, j, m, n, result):
result[i][j] = 0
result[m][n] = 0
x1 = game_x + j*SQUARE_WIDTH
y1 = game_y + i*SQUARE_HEIGHT
x2 = game_x + n*SQUARE_WIDTH
y2 = game_y + m*SQUARE_HEIGHT
win32api.SetCursorPos((x1 + 15, y1 + 18))
win32api.mouse_event(
win32con.MOUSEEVENTF_LEFTDOWN, x1+15,
y1+18, 0, 0)
win32api.mouse_event(
win32con.MOUSEEVENTF_LEFTUP, x1+15,
y1+18, 0, 0)
ti = TIME_INTERVAL_1()
time.sleep(ti)
print('\tdelay 1:%.3f' % ti, end='')
win32api.SetCursorPos((x2 + 15, y2 + 18))
win32api.mouse_event(
win32con.MOUSEEVENTF_LEFTDOWN, x2 + 15,
y2 + 18, 0, 0)
win32api.mouse_event(
win32con.MOUSEEVENTF_LEFTUP, x2 + 15,
y2 + 18, 0, 0)
ti = TIME_INTERVAL(num/all_)
time.sleep(ti)
print('\tdelay 2:%.3f' % ti)
return True
return False
def autoRemove(squares, game_pos):
game_x = game_pos[0] + MARGIN_LEFT
game_y = game_pos[1] + MARGIN_HEIGHT
for i in range(0, int(all_avail_count)):
print('Pair %d\tRemain %d\t' % ((i+1), all_avail_count-i-1), end='')
autoRelease(squares, game_x, game_y, all_avail_count-i, all_avail_count)
if __name__ == '__main__':
game_pos = getGameWindowPosition()
time.sleep(1)
screen_image = getScreenImage()
all_square_list = getAllSquare(screen_image, game_pos)
types = getAllSquareTypes(all_square_list)
result = np.transpose(getAllSquareRecord(all_square_list, types))
autoRemove(result, game_pos)
cv2.waitKey(0)
cv2.destroyAllWindows()
| 34.096774
| 80
| 0.517124
|
95d24b56dd0285be47aed8e774f5acc321641730
| 3,628
|
py
|
Python
|
Worker/models.py
|
mbilalshafiq9/DigitalWorkers
|
6a204329ec97f5a3859ef7bf91a59164bdca5fbc
|
[
"MIT"
] | 1
|
2021-11-23T17:21:03.000Z
|
2021-11-23T17:21:03.000Z
|
Worker/models.py
|
mbilalshafiq9/DigitalWorkers
|
6a204329ec97f5a3859ef7bf91a59164bdca5fbc
|
[
"MIT"
] | null | null | null |
Worker/models.py
|
mbilalshafiq9/DigitalWorkers
|
6a204329ec97f5a3859ef7bf91a59164bdca5fbc
|
[
"MIT"
] | null | null | null |
from django.db import models
# from django.contrib.auth.models import User
from django.conf import settings
from django.db.models.fields import BooleanField
from Buyer.models import Work, Buyer
# Create your models here.
class Worker(models.Model):
id = models.BigAutoField(primary_key=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True)
phoneno = models.IntegerField( null=True)
cnic = models.IntegerField( null=True)
gender = models.CharField(max_length=60, null=True)
city = models.CharField(max_length=60, null=True)
address = models.CharField(max_length=120, null=True)
skills = models.TextField()
cnic_back_pic = models.ImageField(null=True, upload_to="uploads/")
cnic_front_pic = models.ImageField(null=True, upload_to="uploads/")
is_verified=models.BooleanField(default=False)
def __str__(self):
worker=self.user.name+" - "+self.user.email
return worker
class Offer(models.Model):
id = models.BigAutoField(primary_key=True)
budget = models.IntegerField( null=True)
due_date = models.DateTimeField( null=True)
description = models.TextField( null=True)
offer_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True)
work = models.ForeignKey(Work, on_delete=models.CASCADE, null=True)
offer_at = models.DateTimeField(auto_now_add=True)
offer_to = models.CharField(max_length=120, null=True)
is_accepted=models.BooleanField( default=False)
def __str__(self):
offer=self.description
return offer
class Order(models.Model):
id = models.BigAutoField(primary_key=True)
offer = models.ForeignKey(Offer, on_delete=models.CASCADE, null=True)
order_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True,related_name='order_by')
order_to = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True,related_name='order_to')
ordered_at=models.DateTimeField(auto_now_add=True)
status=models.CharField(max_length=60, null=True, default='active')
def __str__(self):
order=self.offer.description
return order
class Commission(models.Model):
id = models.BigAutoField(primary_key=True)
worker = models.ForeignKey(Worker, on_delete=models.CASCADE, null=True,related_name='worker_com')
amount = models.IntegerField(null=True)
created_at=models.DateTimeField(auto_now_add=True)
def __str__(self):
commission=self.worker.user.name +' - Rs.'+ str(self.amount)
return commission
class Commission_Paid(models.Model):
id = models.BigAutoField(primary_key=True)
paid_by = models.ForeignKey(Worker, on_delete=models.CASCADE, null=True,related_name='paid_by')
pay_amount = models.IntegerField(null=True)
paid_at=models.DateTimeField(auto_now_add=True)
def __str__(self):
pay=self.paid_by.user.name +' - Rs.'+ str(self.pay_amount)
return pay
class Review(models.Model):
id = models.BigAutoField(primary_key=True)
worker = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True, related_name="worker_re")
buyer = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True , related_name="buyer_re")
order = models.ForeignKey(Order, on_delete=models.CASCADE, null=True , related_name="order")
rating = models.IntegerField( null=True)
description = models.TextField( null=True)
created_at=models.DateTimeField(auto_now_add=True)
def __str__(self):
review=self.description
return review
| 42.682353
| 119
| 0.742558
|
9afae0cb326bb6219bfcd7837661dac7e686d8f7
| 55,219
|
py
|
Python
|
mavsdk/mission_raw_pb2.py
|
potaito/MAVSDK-Python
|
35a4b25c999b1864518ac16cb023a22014ad43e9
|
[
"BSD-3-Clause"
] | null | null | null |
mavsdk/mission_raw_pb2.py
|
potaito/MAVSDK-Python
|
35a4b25c999b1864518ac16cb023a22014ad43e9
|
[
"BSD-3-Clause"
] | null | null | null |
mavsdk/mission_raw_pb2.py
|
potaito/MAVSDK-Python
|
35a4b25c999b1864518ac16cb023a22014ad43e9
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: mission_raw/mission_raw.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import mavsdk_options_pb2 as mavsdk__options__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='mission_raw/mission_raw.proto',
package='mavsdk.rpc.mission_raw',
syntax='proto3',
serialized_options=b'\n\025io.mavsdk.mission_rawB\017MissionRawProto',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x1dmission_raw/mission_raw.proto\x12\x16mavsdk.rpc.mission_raw\x1a\x14mavsdk_options.proto\"R\n\x14UploadMissionRequest\x12:\n\rmission_items\x18\x01 \x03(\x0b\x32#.mavsdk.rpc.mission_raw.MissionItem\"]\n\x15UploadMissionResponse\x12\x44\n\x12mission_raw_result\x18\x01 \x01(\x0b\x32(.mavsdk.rpc.mission_raw.MissionRawResult\"\x1c\n\x1a\x43\x61ncelMissionUploadRequest\"c\n\x1b\x43\x61ncelMissionUploadResponse\x12\x44\n\x12mission_raw_result\x18\x01 \x01(\x0b\x32(.mavsdk.rpc.mission_raw.MissionRawResult\"\x18\n\x16\x44ownloadMissionRequest\"\x9b\x01\n\x17\x44ownloadMissionResponse\x12\x44\n\x12mission_raw_result\x18\x01 \x01(\x0b\x32(.mavsdk.rpc.mission_raw.MissionRawResult\x12:\n\rmission_items\x18\x02 \x03(\x0b\x32#.mavsdk.rpc.mission_raw.MissionItem\"\x1e\n\x1c\x43\x61ncelMissionDownloadRequest\"e\n\x1d\x43\x61ncelMissionDownloadResponse\x12\x44\n\x12mission_raw_result\x18\x01 \x01(\x0b\x32(.mavsdk.rpc.mission_raw.MissionRawResult\"\x15\n\x13StartMissionRequest\"\\\n\x14StartMissionResponse\x12\x44\n\x12mission_raw_result\x18\x01 \x01(\x0b\x32(.mavsdk.rpc.mission_raw.MissionRawResult\"\x15\n\x13PauseMissionRequest\"\\\n\x14PauseMissionResponse\x12\x44\n\x12mission_raw_result\x18\x01 \x01(\x0b\x32(.mavsdk.rpc.mission_raw.MissionRawResult\"\x15\n\x13\x43learMissionRequest\"\\\n\x14\x43learMissionResponse\x12\x44\n\x12mission_raw_result\x18\x01 \x01(\x0b\x32(.mavsdk.rpc.mission_raw.MissionRawResult\"-\n\x1cSetCurrentMissionItemRequest\x12\r\n\x05index\x18\x01 \x01(\x05\"e\n\x1dSetCurrentMissionItemResponse\x12\x44\n\x12mission_raw_result\x18\x01 \x01(\x0b\x32(.mavsdk.rpc.mission_raw.MissionRawResult\"!\n\x1fSubscribeMissionProgressRequest\"\\\n\x17MissionProgressResponse\x12\x41\n\x10mission_progress\x18\x01 \x01(\x0b\x32\'.mavsdk.rpc.mission_raw.MissionProgress\" \n\x1eSubscribeMissionChangedRequest\"1\n\x16MissionChangedResponse\x12\x17\n\x0fmission_changed\x18\x01 \x01(\x08\";\n\"ImportQgroundcontrolMissionRequest\x12\x15\n\rqgc_plan_path\x18\x01 \x01(\t\"\xb3\x01\n#ImportQgroundcontrolMissionResponse\x12\x44\n\x12mission_raw_result\x18\x01 \x01(\x0b\x32(.mavsdk.rpc.mission_raw.MissionRawResult\x12\x46\n\x13mission_import_data\x18\x02 \x01(\x0b\x32).mavsdk.rpc.mission_raw.MissionImportData\"1\n\x0fMissionProgress\x12\x0f\n\x07\x63urrent\x18\x01 \x01(\x05\x12\r\n\x05total\x18\x02 \x01(\x05\"\xd8\x01\n\x0bMissionItem\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12\r\n\x05\x66rame\x18\x02 \x01(\r\x12\x0f\n\x07\x63ommand\x18\x03 \x01(\r\x12\x0f\n\x07\x63urrent\x18\x04 \x01(\r\x12\x14\n\x0c\x61utocontinue\x18\x05 \x01(\r\x12\x0e\n\x06param1\x18\x06 \x01(\x02\x12\x0e\n\x06param2\x18\x07 \x01(\x02\x12\x0e\n\x06param3\x18\x08 \x01(\x02\x12\x0e\n\x06param4\x18\t \x01(\x02\x12\t\n\x01x\x18\n \x01(\x05\x12\t\n\x01y\x18\x0b \x01(\x05\x12\t\n\x01z\x18\x0c \x01(\x02\x12\x14\n\x0cmission_type\x18\r \x01(\r\"\xc6\x01\n\x11MissionImportData\x12:\n\rmission_items\x18\x01 \x03(\x0b\x32#.mavsdk.rpc.mission_raw.MissionItem\x12;\n\x0egeofence_items\x18\x02 \x03(\x0b\x32#.mavsdk.rpc.mission_raw.MissionItem\x12\x38\n\x0brally_items\x18\x03 \x03(\x0b\x32#.mavsdk.rpc.mission_raw.MissionItem\"\xb2\x03\n\x10MissionRawResult\x12?\n\x06result\x18\x01 \x01(\x0e\x32/.mavsdk.rpc.mission_raw.MissionRawResult.Result\x12\x12\n\nresult_str\x18\x02 \x01(\t\"\xc8\x02\n\x06Result\x12\x12\n\x0eRESULT_UNKNOWN\x10\x00\x12\x12\n\x0eRESULT_SUCCESS\x10\x01\x12\x10\n\x0cRESULT_ERROR\x10\x02\x12!\n\x1dRESULT_TOO_MANY_MISSION_ITEMS\x10\x03\x12\x0f\n\x0bRESULT_BUSY\x10\x04\x12\x12\n\x0eRESULT_TIMEOUT\x10\x05\x12\x1b\n\x17RESULT_INVALID_ARGUMENT\x10\x06\x12\x16\n\x12RESULT_UNSUPPORTED\x10\x07\x12\x1f\n\x1bRESULT_NO_MISSION_AVAILABLE\x10\x08\x12\x1d\n\x19RESULT_TRANSFER_CANCELLED\x10\t\x12\"\n\x1eRESULT_FAILED_TO_OPEN_QGC_PLAN\x10\n\x12#\n\x1fRESULT_FAILED_TO_PARSE_QGC_PLAN\x10\x0b\x32\x93\x0b\n\x11MissionRawService\x12n\n\rUploadMission\x12,.mavsdk.rpc.mission_raw.UploadMissionRequest\x1a-.mavsdk.rpc.mission_raw.UploadMissionResponse\"\x00\x12\x84\x01\n\x13\x43\x61ncelMissionUpload\x12\x32.mavsdk.rpc.mission_raw.CancelMissionUploadRequest\x1a\x33.mavsdk.rpc.mission_raw.CancelMissionUploadResponse\"\x04\x80\xb5\x18\x01\x12t\n\x0f\x44ownloadMission\x12..mavsdk.rpc.mission_raw.DownloadMissionRequest\x1a/.mavsdk.rpc.mission_raw.DownloadMissionResponse\"\x00\x12\x8a\x01\n\x15\x43\x61ncelMissionDownload\x12\x34.mavsdk.rpc.mission_raw.CancelMissionDownloadRequest\x1a\x35.mavsdk.rpc.mission_raw.CancelMissionDownloadResponse\"\x04\x80\xb5\x18\x01\x12k\n\x0cStartMission\x12+.mavsdk.rpc.mission_raw.StartMissionRequest\x1a,.mavsdk.rpc.mission_raw.StartMissionResponse\"\x00\x12k\n\x0cPauseMission\x12+.mavsdk.rpc.mission_raw.PauseMissionRequest\x1a,.mavsdk.rpc.mission_raw.PauseMissionResponse\"\x00\x12k\n\x0c\x43learMission\x12+.mavsdk.rpc.mission_raw.ClearMissionRequest\x1a,.mavsdk.rpc.mission_raw.ClearMissionResponse\"\x00\x12\x86\x01\n\x15SetCurrentMissionItem\x12\x34.mavsdk.rpc.mission_raw.SetCurrentMissionItemRequest\x1a\x35.mavsdk.rpc.mission_raw.SetCurrentMissionItemResponse\"\x00\x12\x88\x01\n\x18SubscribeMissionProgress\x12\x37.mavsdk.rpc.mission_raw.SubscribeMissionProgressRequest\x1a/.mavsdk.rpc.mission_raw.MissionProgressResponse\"\x00\x30\x01\x12\x89\x01\n\x17SubscribeMissionChanged\x12\x36.mavsdk.rpc.mission_raw.SubscribeMissionChangedRequest\x1a..mavsdk.rpc.mission_raw.MissionChangedResponse\"\x04\x80\xb5\x18\x00\x30\x01\x12\x9c\x01\n\x1bImportQgroundcontrolMission\x12:.mavsdk.rpc.mission_raw.ImportQgroundcontrolMissionRequest\x1a;.mavsdk.rpc.mission_raw.ImportQgroundcontrolMissionResponse\"\x04\x80\xb5\x18\x01\x42(\n\x15io.mavsdk.mission_rawB\x0fMissionRawProtob\x06proto3'
,
dependencies=[mavsdk__options__pb2.DESCRIPTOR,])
_MISSIONRAWRESULT_RESULT = _descriptor.EnumDescriptor(
name='Result',
full_name='mavsdk.rpc.mission_raw.MissionRawResult.Result',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='RESULT_UNKNOWN', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESULT_SUCCESS', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESULT_ERROR', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESULT_TOO_MANY_MISSION_ITEMS', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESULT_BUSY', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESULT_TIMEOUT', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESULT_INVALID_ARGUMENT', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESULT_UNSUPPORTED', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESULT_NO_MISSION_AVAILABLE', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESULT_TRANSFER_CANCELLED', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESULT_FAILED_TO_OPEN_QGC_PLAN', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESULT_FAILED_TO_PARSE_QGC_PLAN', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=2244,
serialized_end=2572,
)
_sym_db.RegisterEnumDescriptor(_MISSIONRAWRESULT_RESULT)
_UPLOADMISSIONREQUEST = _descriptor.Descriptor(
name='UploadMissionRequest',
full_name='mavsdk.rpc.mission_raw.UploadMissionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mission_items', full_name='mavsdk.rpc.mission_raw.UploadMissionRequest.mission_items', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=79,
serialized_end=161,
)
_UPLOADMISSIONRESPONSE = _descriptor.Descriptor(
name='UploadMissionResponse',
full_name='mavsdk.rpc.mission_raw.UploadMissionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mission_raw_result', full_name='mavsdk.rpc.mission_raw.UploadMissionResponse.mission_raw_result', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=163,
serialized_end=256,
)
_CANCELMISSIONUPLOADREQUEST = _descriptor.Descriptor(
name='CancelMissionUploadRequest',
full_name='mavsdk.rpc.mission_raw.CancelMissionUploadRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=258,
serialized_end=286,
)
_CANCELMISSIONUPLOADRESPONSE = _descriptor.Descriptor(
name='CancelMissionUploadResponse',
full_name='mavsdk.rpc.mission_raw.CancelMissionUploadResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mission_raw_result', full_name='mavsdk.rpc.mission_raw.CancelMissionUploadResponse.mission_raw_result', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=288,
serialized_end=387,
)
_DOWNLOADMISSIONREQUEST = _descriptor.Descriptor(
name='DownloadMissionRequest',
full_name='mavsdk.rpc.mission_raw.DownloadMissionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=389,
serialized_end=413,
)
_DOWNLOADMISSIONRESPONSE = _descriptor.Descriptor(
name='DownloadMissionResponse',
full_name='mavsdk.rpc.mission_raw.DownloadMissionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mission_raw_result', full_name='mavsdk.rpc.mission_raw.DownloadMissionResponse.mission_raw_result', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mission_items', full_name='mavsdk.rpc.mission_raw.DownloadMissionResponse.mission_items', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=416,
serialized_end=571,
)
_CANCELMISSIONDOWNLOADREQUEST = _descriptor.Descriptor(
name='CancelMissionDownloadRequest',
full_name='mavsdk.rpc.mission_raw.CancelMissionDownloadRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=573,
serialized_end=603,
)
_CANCELMISSIONDOWNLOADRESPONSE = _descriptor.Descriptor(
name='CancelMissionDownloadResponse',
full_name='mavsdk.rpc.mission_raw.CancelMissionDownloadResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mission_raw_result', full_name='mavsdk.rpc.mission_raw.CancelMissionDownloadResponse.mission_raw_result', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=605,
serialized_end=706,
)
_STARTMISSIONREQUEST = _descriptor.Descriptor(
name='StartMissionRequest',
full_name='mavsdk.rpc.mission_raw.StartMissionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=708,
serialized_end=729,
)
_STARTMISSIONRESPONSE = _descriptor.Descriptor(
name='StartMissionResponse',
full_name='mavsdk.rpc.mission_raw.StartMissionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mission_raw_result', full_name='mavsdk.rpc.mission_raw.StartMissionResponse.mission_raw_result', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=731,
serialized_end=823,
)
_PAUSEMISSIONREQUEST = _descriptor.Descriptor(
name='PauseMissionRequest',
full_name='mavsdk.rpc.mission_raw.PauseMissionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=825,
serialized_end=846,
)
_PAUSEMISSIONRESPONSE = _descriptor.Descriptor(
name='PauseMissionResponse',
full_name='mavsdk.rpc.mission_raw.PauseMissionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mission_raw_result', full_name='mavsdk.rpc.mission_raw.PauseMissionResponse.mission_raw_result', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=848,
serialized_end=940,
)
_CLEARMISSIONREQUEST = _descriptor.Descriptor(
name='ClearMissionRequest',
full_name='mavsdk.rpc.mission_raw.ClearMissionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=942,
serialized_end=963,
)
_CLEARMISSIONRESPONSE = _descriptor.Descriptor(
name='ClearMissionResponse',
full_name='mavsdk.rpc.mission_raw.ClearMissionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mission_raw_result', full_name='mavsdk.rpc.mission_raw.ClearMissionResponse.mission_raw_result', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=965,
serialized_end=1057,
)
_SETCURRENTMISSIONITEMREQUEST = _descriptor.Descriptor(
name='SetCurrentMissionItemRequest',
full_name='mavsdk.rpc.mission_raw.SetCurrentMissionItemRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='mavsdk.rpc.mission_raw.SetCurrentMissionItemRequest.index', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1059,
serialized_end=1104,
)
_SETCURRENTMISSIONITEMRESPONSE = _descriptor.Descriptor(
name='SetCurrentMissionItemResponse',
full_name='mavsdk.rpc.mission_raw.SetCurrentMissionItemResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mission_raw_result', full_name='mavsdk.rpc.mission_raw.SetCurrentMissionItemResponse.mission_raw_result', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1106,
serialized_end=1207,
)
_SUBSCRIBEMISSIONPROGRESSREQUEST = _descriptor.Descriptor(
name='SubscribeMissionProgressRequest',
full_name='mavsdk.rpc.mission_raw.SubscribeMissionProgressRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1209,
serialized_end=1242,
)
_MISSIONPROGRESSRESPONSE = _descriptor.Descriptor(
name='MissionProgressResponse',
full_name='mavsdk.rpc.mission_raw.MissionProgressResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mission_progress', full_name='mavsdk.rpc.mission_raw.MissionProgressResponse.mission_progress', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1244,
serialized_end=1336,
)
_SUBSCRIBEMISSIONCHANGEDREQUEST = _descriptor.Descriptor(
name='SubscribeMissionChangedRequest',
full_name='mavsdk.rpc.mission_raw.SubscribeMissionChangedRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1338,
serialized_end=1370,
)
_MISSIONCHANGEDRESPONSE = _descriptor.Descriptor(
name='MissionChangedResponse',
full_name='mavsdk.rpc.mission_raw.MissionChangedResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mission_changed', full_name='mavsdk.rpc.mission_raw.MissionChangedResponse.mission_changed', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1372,
serialized_end=1421,
)
_IMPORTQGROUNDCONTROLMISSIONREQUEST = _descriptor.Descriptor(
name='ImportQgroundcontrolMissionRequest',
full_name='mavsdk.rpc.mission_raw.ImportQgroundcontrolMissionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='qgc_plan_path', full_name='mavsdk.rpc.mission_raw.ImportQgroundcontrolMissionRequest.qgc_plan_path', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1423,
serialized_end=1482,
)
_IMPORTQGROUNDCONTROLMISSIONRESPONSE = _descriptor.Descriptor(
name='ImportQgroundcontrolMissionResponse',
full_name='mavsdk.rpc.mission_raw.ImportQgroundcontrolMissionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mission_raw_result', full_name='mavsdk.rpc.mission_raw.ImportQgroundcontrolMissionResponse.mission_raw_result', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mission_import_data', full_name='mavsdk.rpc.mission_raw.ImportQgroundcontrolMissionResponse.mission_import_data', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1485,
serialized_end=1664,
)
_MISSIONPROGRESS = _descriptor.Descriptor(
name='MissionProgress',
full_name='mavsdk.rpc.mission_raw.MissionProgress',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='current', full_name='mavsdk.rpc.mission_raw.MissionProgress.current', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total', full_name='mavsdk.rpc.mission_raw.MissionProgress.total', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1666,
serialized_end=1715,
)
_MISSIONITEM = _descriptor.Descriptor(
name='MissionItem',
full_name='mavsdk.rpc.mission_raw.MissionItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='seq', full_name='mavsdk.rpc.mission_raw.MissionItem.seq', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='frame', full_name='mavsdk.rpc.mission_raw.MissionItem.frame', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='command', full_name='mavsdk.rpc.mission_raw.MissionItem.command', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='current', full_name='mavsdk.rpc.mission_raw.MissionItem.current', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='autocontinue', full_name='mavsdk.rpc.mission_raw.MissionItem.autocontinue', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='param1', full_name='mavsdk.rpc.mission_raw.MissionItem.param1', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='param2', full_name='mavsdk.rpc.mission_raw.MissionItem.param2', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='param3', full_name='mavsdk.rpc.mission_raw.MissionItem.param3', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='param4', full_name='mavsdk.rpc.mission_raw.MissionItem.param4', index=8,
number=9, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='x', full_name='mavsdk.rpc.mission_raw.MissionItem.x', index=9,
number=10, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='y', full_name='mavsdk.rpc.mission_raw.MissionItem.y', index=10,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='z', full_name='mavsdk.rpc.mission_raw.MissionItem.z', index=11,
number=12, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mission_type', full_name='mavsdk.rpc.mission_raw.MissionItem.mission_type', index=12,
number=13, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1718,
serialized_end=1934,
)
_MISSIONIMPORTDATA = _descriptor.Descriptor(
name='MissionImportData',
full_name='mavsdk.rpc.mission_raw.MissionImportData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mission_items', full_name='mavsdk.rpc.mission_raw.MissionImportData.mission_items', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geofence_items', full_name='mavsdk.rpc.mission_raw.MissionImportData.geofence_items', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rally_items', full_name='mavsdk.rpc.mission_raw.MissionImportData.rally_items', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1937,
serialized_end=2135,
)
_MISSIONRAWRESULT = _descriptor.Descriptor(
name='MissionRawResult',
full_name='mavsdk.rpc.mission_raw.MissionRawResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='mavsdk.rpc.mission_raw.MissionRawResult.result', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='result_str', full_name='mavsdk.rpc.mission_raw.MissionRawResult.result_str', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_MISSIONRAWRESULT_RESULT,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2138,
serialized_end=2572,
)
_UPLOADMISSIONREQUEST.fields_by_name['mission_items'].message_type = _MISSIONITEM
_UPLOADMISSIONRESPONSE.fields_by_name['mission_raw_result'].message_type = _MISSIONRAWRESULT
_CANCELMISSIONUPLOADRESPONSE.fields_by_name['mission_raw_result'].message_type = _MISSIONRAWRESULT
_DOWNLOADMISSIONRESPONSE.fields_by_name['mission_raw_result'].message_type = _MISSIONRAWRESULT
_DOWNLOADMISSIONRESPONSE.fields_by_name['mission_items'].message_type = _MISSIONITEM
_CANCELMISSIONDOWNLOADRESPONSE.fields_by_name['mission_raw_result'].message_type = _MISSIONRAWRESULT
_STARTMISSIONRESPONSE.fields_by_name['mission_raw_result'].message_type = _MISSIONRAWRESULT
_PAUSEMISSIONRESPONSE.fields_by_name['mission_raw_result'].message_type = _MISSIONRAWRESULT
_CLEARMISSIONRESPONSE.fields_by_name['mission_raw_result'].message_type = _MISSIONRAWRESULT
_SETCURRENTMISSIONITEMRESPONSE.fields_by_name['mission_raw_result'].message_type = _MISSIONRAWRESULT
_MISSIONPROGRESSRESPONSE.fields_by_name['mission_progress'].message_type = _MISSIONPROGRESS
_IMPORTQGROUNDCONTROLMISSIONRESPONSE.fields_by_name['mission_raw_result'].message_type = _MISSIONRAWRESULT
_IMPORTQGROUNDCONTROLMISSIONRESPONSE.fields_by_name['mission_import_data'].message_type = _MISSIONIMPORTDATA
_MISSIONIMPORTDATA.fields_by_name['mission_items'].message_type = _MISSIONITEM
_MISSIONIMPORTDATA.fields_by_name['geofence_items'].message_type = _MISSIONITEM
_MISSIONIMPORTDATA.fields_by_name['rally_items'].message_type = _MISSIONITEM
_MISSIONRAWRESULT.fields_by_name['result'].enum_type = _MISSIONRAWRESULT_RESULT
_MISSIONRAWRESULT_RESULT.containing_type = _MISSIONRAWRESULT
DESCRIPTOR.message_types_by_name['UploadMissionRequest'] = _UPLOADMISSIONREQUEST
DESCRIPTOR.message_types_by_name['UploadMissionResponse'] = _UPLOADMISSIONRESPONSE
DESCRIPTOR.message_types_by_name['CancelMissionUploadRequest'] = _CANCELMISSIONUPLOADREQUEST
DESCRIPTOR.message_types_by_name['CancelMissionUploadResponse'] = _CANCELMISSIONUPLOADRESPONSE
DESCRIPTOR.message_types_by_name['DownloadMissionRequest'] = _DOWNLOADMISSIONREQUEST
DESCRIPTOR.message_types_by_name['DownloadMissionResponse'] = _DOWNLOADMISSIONRESPONSE
DESCRIPTOR.message_types_by_name['CancelMissionDownloadRequest'] = _CANCELMISSIONDOWNLOADREQUEST
DESCRIPTOR.message_types_by_name['CancelMissionDownloadResponse'] = _CANCELMISSIONDOWNLOADRESPONSE
DESCRIPTOR.message_types_by_name['StartMissionRequest'] = _STARTMISSIONREQUEST
DESCRIPTOR.message_types_by_name['StartMissionResponse'] = _STARTMISSIONRESPONSE
DESCRIPTOR.message_types_by_name['PauseMissionRequest'] = _PAUSEMISSIONREQUEST
DESCRIPTOR.message_types_by_name['PauseMissionResponse'] = _PAUSEMISSIONRESPONSE
DESCRIPTOR.message_types_by_name['ClearMissionRequest'] = _CLEARMISSIONREQUEST
DESCRIPTOR.message_types_by_name['ClearMissionResponse'] = _CLEARMISSIONRESPONSE
DESCRIPTOR.message_types_by_name['SetCurrentMissionItemRequest'] = _SETCURRENTMISSIONITEMREQUEST
DESCRIPTOR.message_types_by_name['SetCurrentMissionItemResponse'] = _SETCURRENTMISSIONITEMRESPONSE
DESCRIPTOR.message_types_by_name['SubscribeMissionProgressRequest'] = _SUBSCRIBEMISSIONPROGRESSREQUEST
DESCRIPTOR.message_types_by_name['MissionProgressResponse'] = _MISSIONPROGRESSRESPONSE
DESCRIPTOR.message_types_by_name['SubscribeMissionChangedRequest'] = _SUBSCRIBEMISSIONCHANGEDREQUEST
DESCRIPTOR.message_types_by_name['MissionChangedResponse'] = _MISSIONCHANGEDRESPONSE
DESCRIPTOR.message_types_by_name['ImportQgroundcontrolMissionRequest'] = _IMPORTQGROUNDCONTROLMISSIONREQUEST
DESCRIPTOR.message_types_by_name['ImportQgroundcontrolMissionResponse'] = _IMPORTQGROUNDCONTROLMISSIONRESPONSE
DESCRIPTOR.message_types_by_name['MissionProgress'] = _MISSIONPROGRESS
DESCRIPTOR.message_types_by_name['MissionItem'] = _MISSIONITEM
DESCRIPTOR.message_types_by_name['MissionImportData'] = _MISSIONIMPORTDATA
DESCRIPTOR.message_types_by_name['MissionRawResult'] = _MISSIONRAWRESULT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
UploadMissionRequest = _reflection.GeneratedProtocolMessageType('UploadMissionRequest', (_message.Message,), {
'DESCRIPTOR' : _UPLOADMISSIONREQUEST,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.UploadMissionRequest)
})
_sym_db.RegisterMessage(UploadMissionRequest)
UploadMissionResponse = _reflection.GeneratedProtocolMessageType('UploadMissionResponse', (_message.Message,), {
'DESCRIPTOR' : _UPLOADMISSIONRESPONSE,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.UploadMissionResponse)
})
_sym_db.RegisterMessage(UploadMissionResponse)
CancelMissionUploadRequest = _reflection.GeneratedProtocolMessageType('CancelMissionUploadRequest', (_message.Message,), {
'DESCRIPTOR' : _CANCELMISSIONUPLOADREQUEST,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.CancelMissionUploadRequest)
})
_sym_db.RegisterMessage(CancelMissionUploadRequest)
CancelMissionUploadResponse = _reflection.GeneratedProtocolMessageType('CancelMissionUploadResponse', (_message.Message,), {
'DESCRIPTOR' : _CANCELMISSIONUPLOADRESPONSE,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.CancelMissionUploadResponse)
})
_sym_db.RegisterMessage(CancelMissionUploadResponse)
DownloadMissionRequest = _reflection.GeneratedProtocolMessageType('DownloadMissionRequest', (_message.Message,), {
'DESCRIPTOR' : _DOWNLOADMISSIONREQUEST,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.DownloadMissionRequest)
})
_sym_db.RegisterMessage(DownloadMissionRequest)
DownloadMissionResponse = _reflection.GeneratedProtocolMessageType('DownloadMissionResponse', (_message.Message,), {
'DESCRIPTOR' : _DOWNLOADMISSIONRESPONSE,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.DownloadMissionResponse)
})
_sym_db.RegisterMessage(DownloadMissionResponse)
CancelMissionDownloadRequest = _reflection.GeneratedProtocolMessageType('CancelMissionDownloadRequest', (_message.Message,), {
'DESCRIPTOR' : _CANCELMISSIONDOWNLOADREQUEST,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.CancelMissionDownloadRequest)
})
_sym_db.RegisterMessage(CancelMissionDownloadRequest)
CancelMissionDownloadResponse = _reflection.GeneratedProtocolMessageType('CancelMissionDownloadResponse', (_message.Message,), {
'DESCRIPTOR' : _CANCELMISSIONDOWNLOADRESPONSE,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.CancelMissionDownloadResponse)
})
_sym_db.RegisterMessage(CancelMissionDownloadResponse)
StartMissionRequest = _reflection.GeneratedProtocolMessageType('StartMissionRequest', (_message.Message,), {
'DESCRIPTOR' : _STARTMISSIONREQUEST,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.StartMissionRequest)
})
_sym_db.RegisterMessage(StartMissionRequest)
StartMissionResponse = _reflection.GeneratedProtocolMessageType('StartMissionResponse', (_message.Message,), {
'DESCRIPTOR' : _STARTMISSIONRESPONSE,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.StartMissionResponse)
})
_sym_db.RegisterMessage(StartMissionResponse)
PauseMissionRequest = _reflection.GeneratedProtocolMessageType('PauseMissionRequest', (_message.Message,), {
'DESCRIPTOR' : _PAUSEMISSIONREQUEST,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.PauseMissionRequest)
})
_sym_db.RegisterMessage(PauseMissionRequest)
PauseMissionResponse = _reflection.GeneratedProtocolMessageType('PauseMissionResponse', (_message.Message,), {
'DESCRIPTOR' : _PAUSEMISSIONRESPONSE,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.PauseMissionResponse)
})
_sym_db.RegisterMessage(PauseMissionResponse)
ClearMissionRequest = _reflection.GeneratedProtocolMessageType('ClearMissionRequest', (_message.Message,), {
'DESCRIPTOR' : _CLEARMISSIONREQUEST,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.ClearMissionRequest)
})
_sym_db.RegisterMessage(ClearMissionRequest)
ClearMissionResponse = _reflection.GeneratedProtocolMessageType('ClearMissionResponse', (_message.Message,), {
'DESCRIPTOR' : _CLEARMISSIONRESPONSE,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.ClearMissionResponse)
})
_sym_db.RegisterMessage(ClearMissionResponse)
SetCurrentMissionItemRequest = _reflection.GeneratedProtocolMessageType('SetCurrentMissionItemRequest', (_message.Message,), {
'DESCRIPTOR' : _SETCURRENTMISSIONITEMREQUEST,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.SetCurrentMissionItemRequest)
})
_sym_db.RegisterMessage(SetCurrentMissionItemRequest)
SetCurrentMissionItemResponse = _reflection.GeneratedProtocolMessageType('SetCurrentMissionItemResponse', (_message.Message,), {
'DESCRIPTOR' : _SETCURRENTMISSIONITEMRESPONSE,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.SetCurrentMissionItemResponse)
})
_sym_db.RegisterMessage(SetCurrentMissionItemResponse)
SubscribeMissionProgressRequest = _reflection.GeneratedProtocolMessageType('SubscribeMissionProgressRequest', (_message.Message,), {
'DESCRIPTOR' : _SUBSCRIBEMISSIONPROGRESSREQUEST,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.SubscribeMissionProgressRequest)
})
_sym_db.RegisterMessage(SubscribeMissionProgressRequest)
MissionProgressResponse = _reflection.GeneratedProtocolMessageType('MissionProgressResponse', (_message.Message,), {
'DESCRIPTOR' : _MISSIONPROGRESSRESPONSE,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.MissionProgressResponse)
})
_sym_db.RegisterMessage(MissionProgressResponse)
SubscribeMissionChangedRequest = _reflection.GeneratedProtocolMessageType('SubscribeMissionChangedRequest', (_message.Message,), {
'DESCRIPTOR' : _SUBSCRIBEMISSIONCHANGEDREQUEST,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.SubscribeMissionChangedRequest)
})
_sym_db.RegisterMessage(SubscribeMissionChangedRequest)
MissionChangedResponse = _reflection.GeneratedProtocolMessageType('MissionChangedResponse', (_message.Message,), {
'DESCRIPTOR' : _MISSIONCHANGEDRESPONSE,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.MissionChangedResponse)
})
_sym_db.RegisterMessage(MissionChangedResponse)
ImportQgroundcontrolMissionRequest = _reflection.GeneratedProtocolMessageType('ImportQgroundcontrolMissionRequest', (_message.Message,), {
'DESCRIPTOR' : _IMPORTQGROUNDCONTROLMISSIONREQUEST,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.ImportQgroundcontrolMissionRequest)
})
_sym_db.RegisterMessage(ImportQgroundcontrolMissionRequest)
ImportQgroundcontrolMissionResponse = _reflection.GeneratedProtocolMessageType('ImportQgroundcontrolMissionResponse', (_message.Message,), {
'DESCRIPTOR' : _IMPORTQGROUNDCONTROLMISSIONRESPONSE,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.ImportQgroundcontrolMissionResponse)
})
_sym_db.RegisterMessage(ImportQgroundcontrolMissionResponse)
MissionProgress = _reflection.GeneratedProtocolMessageType('MissionProgress', (_message.Message,), {
'DESCRIPTOR' : _MISSIONPROGRESS,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.MissionProgress)
})
_sym_db.RegisterMessage(MissionProgress)
MissionItem = _reflection.GeneratedProtocolMessageType('MissionItem', (_message.Message,), {
'DESCRIPTOR' : _MISSIONITEM,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.MissionItem)
})
_sym_db.RegisterMessage(MissionItem)
MissionImportData = _reflection.GeneratedProtocolMessageType('MissionImportData', (_message.Message,), {
'DESCRIPTOR' : _MISSIONIMPORTDATA,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.MissionImportData)
})
_sym_db.RegisterMessage(MissionImportData)
MissionRawResult = _reflection.GeneratedProtocolMessageType('MissionRawResult', (_message.Message,), {
'DESCRIPTOR' : _MISSIONRAWRESULT,
'__module__' : 'mission_raw.mission_raw_pb2'
# @@protoc_insertion_point(class_scope:mavsdk.rpc.mission_raw.MissionRawResult)
})
_sym_db.RegisterMessage(MissionRawResult)
DESCRIPTOR._options = None
_MISSIONRAWSERVICE = _descriptor.ServiceDescriptor(
name='MissionRawService',
full_name='mavsdk.rpc.mission_raw.MissionRawService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=2575,
serialized_end=4002,
methods=[
_descriptor.MethodDescriptor(
name='UploadMission',
full_name='mavsdk.rpc.mission_raw.MissionRawService.UploadMission',
index=0,
containing_service=None,
input_type=_UPLOADMISSIONREQUEST,
output_type=_UPLOADMISSIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CancelMissionUpload',
full_name='mavsdk.rpc.mission_raw.MissionRawService.CancelMissionUpload',
index=1,
containing_service=None,
input_type=_CANCELMISSIONUPLOADREQUEST,
output_type=_CANCELMISSIONUPLOADRESPONSE,
serialized_options=b'\200\265\030\001',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DownloadMission',
full_name='mavsdk.rpc.mission_raw.MissionRawService.DownloadMission',
index=2,
containing_service=None,
input_type=_DOWNLOADMISSIONREQUEST,
output_type=_DOWNLOADMISSIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CancelMissionDownload',
full_name='mavsdk.rpc.mission_raw.MissionRawService.CancelMissionDownload',
index=3,
containing_service=None,
input_type=_CANCELMISSIONDOWNLOADREQUEST,
output_type=_CANCELMISSIONDOWNLOADRESPONSE,
serialized_options=b'\200\265\030\001',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='StartMission',
full_name='mavsdk.rpc.mission_raw.MissionRawService.StartMission',
index=4,
containing_service=None,
input_type=_STARTMISSIONREQUEST,
output_type=_STARTMISSIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='PauseMission',
full_name='mavsdk.rpc.mission_raw.MissionRawService.PauseMission',
index=5,
containing_service=None,
input_type=_PAUSEMISSIONREQUEST,
output_type=_PAUSEMISSIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ClearMission',
full_name='mavsdk.rpc.mission_raw.MissionRawService.ClearMission',
index=6,
containing_service=None,
input_type=_CLEARMISSIONREQUEST,
output_type=_CLEARMISSIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SetCurrentMissionItem',
full_name='mavsdk.rpc.mission_raw.MissionRawService.SetCurrentMissionItem',
index=7,
containing_service=None,
input_type=_SETCURRENTMISSIONITEMREQUEST,
output_type=_SETCURRENTMISSIONITEMRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SubscribeMissionProgress',
full_name='mavsdk.rpc.mission_raw.MissionRawService.SubscribeMissionProgress',
index=8,
containing_service=None,
input_type=_SUBSCRIBEMISSIONPROGRESSREQUEST,
output_type=_MISSIONPROGRESSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SubscribeMissionChanged',
full_name='mavsdk.rpc.mission_raw.MissionRawService.SubscribeMissionChanged',
index=9,
containing_service=None,
input_type=_SUBSCRIBEMISSIONCHANGEDREQUEST,
output_type=_MISSIONCHANGEDRESPONSE,
serialized_options=b'\200\265\030\000',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ImportQgroundcontrolMission',
full_name='mavsdk.rpc.mission_raw.MissionRawService.ImportQgroundcontrolMission',
index=10,
containing_service=None,
input_type=_IMPORTQGROUNDCONTROLMISSIONREQUEST,
output_type=_IMPORTQGROUNDCONTROLMISSIONRESPONSE,
serialized_options=b'\200\265\030\001',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_MISSIONRAWSERVICE)
DESCRIPTOR.services_by_name['MissionRawService'] = _MISSIONRAWSERVICE
# @@protoc_insertion_point(module_scope)
| 40.483138
| 5,640
| 0.783987
|
f28f0fa75a8a96ec3f09620e838f8dd5572029d9
| 43,689
|
py
|
Python
|
src/sardana/macroserver/macros/standard.py
|
marc2332/sardana
|
48dc9191baaa63f6c714d8c025e8f3f96548ad26
|
[
"CC-BY-3.0"
] | null | null | null |
src/sardana/macroserver/macros/standard.py
|
marc2332/sardana
|
48dc9191baaa63f6c714d8c025e8f3f96548ad26
|
[
"CC-BY-3.0"
] | null | null | null |
src/sardana/macroserver/macros/standard.py
|
marc2332/sardana
|
48dc9191baaa63f6c714d8c025e8f3f96548ad26
|
[
"CC-BY-3.0"
] | null | null | null |
##############################################################################
##
# This file is part of Sardana
##
# http://www.sardana-controls.org/
##
# Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
# Sardana is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
##
# Sardana is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
##
# You should have received a copy of the GNU Lesser General Public License
# along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
"""This is the standard macro module"""
__all__ = ["ct", "mstate", "mv", "mvr", "pwa", "pwm", "repeat", "set_lim",
"set_lm", "set_pos", "settimer", "uct", "umv", "umvr", "wa", "wm",
"tw", "logmacro", "newfile", "plotselect", "pic", "cen",
"where"]
__docformat__ = 'restructuredtext'
import datetime
import os
import numpy as np
from taurus import Device
from taurus.console.table import Table
import PyTango
from PyTango import DevState
from sardana.macroserver.macro import Macro, macro, Type, ViewOption, \
iMacro, Hookable
from sardana.macroserver.msexception import StopException, UnknownEnv
from sardana.macroserver.scan.scandata import Record
from sardana.macroserver.macro import Optional
from sardana import sardanacustomsettings
##########################################################################
#
# Motion related macros
#
##########################################################################
class _wm(Macro):
"""Show motor positions"""
param_def = [
['motor_list', [['motor', Type.Moveable, None, 'Motor to move']],
None, 'List of motor to show'],
]
def run(self, motor_list):
show_dial = self.getViewOption(ViewOption.ShowDial)
show_ctrlaxis = self.getViewOption(ViewOption.ShowCtrlAxis)
pos_format = self.getViewOption(ViewOption.PosFormat)
motor_width = 9
motors = {} # dict(motor name: motor obj)
requests = {} # dict(motor name: request id)
data = {} # dict(motor name: list of motor data)
# sending asynchronous requests: neither Taurus nor Sardana extensions
# allow asynchronous requests - use PyTango asynchronous request model
for motor in motor_list:
name = motor.getName()
motors[name] = motor
args = ('position',)
if show_dial:
args += ('dialposition',)
_id = motor.read_attributes_asynch(args)
requests[name] = _id
motor_width = max(motor_width, len(name))
data[name] = []
# get additional motor information (ctrl name & axis)
if show_ctrlaxis:
for name, motor in motors.items():
ctrl_name = self.getController(motor.controller).name
axis_nb = str(getattr(motor, "axis"))
data[name].extend((ctrl_name, axis_nb))
motor_width = max(motor_width, len(ctrl_name), len(axis_nb))
# collect asynchronous replies
while len(requests) > 0:
req2delete = []
for name, _id in requests.items():
motor = motors[name]
try:
attrs = motor.read_attributes_reply(_id)
for attr in attrs:
value = attr.value
if value is None:
value = float('NaN')
if attr.name == 'dialposition':
value = motor.getDialPosition()
if value is None:
value = float('NaN')
data[name].append(value)
req2delete.append(name)
except PyTango.AsynReplyNotArrived:
continue
except PyTango.DevFailed:
data[name].append(float('NaN'))
if show_dial:
data[name].append(float('NaN'))
req2delete.append(name)
self.debug('Error when reading %s position(s)' % name)
self.debug('Details:', exc_info=1)
continue
# removing motors which alredy replied
for name in req2delete:
requests.pop(name)
# define format for numerical values
fmt = '%c*.%df' % ('%', motor_width - 5)
if pos_format > -1:
fmt = '%c*.%df' % ('%', int(pos_format))
# prepare row headers and formats
row_headers = []
t_format = []
if show_ctrlaxis:
row_headers += ['Ctrl', 'Axis']
t_format += ['%*s', '%*s']
row_headers.append('User')
t_format.append(fmt)
if show_dial:
row_headers.append('Dial')
t_format.append(fmt)
# sort the data dict by keys
col_headers = []
values = []
for mot_name, mot_values in sorted(data.items()):
col_headers.append([mot_name]) # convert name to list
values.append(mot_values)
# create and print table
table = Table(values, elem_fmt=t_format,
col_head_str=col_headers, col_head_width=motor_width,
row_head_str=row_headers)
for line in table.genOutput():
self.output(line)
class _wum(Macro):
"""Show user motor positions"""
param_def = [
['motor_list', [['motor', Type.Moveable, None, 'Motor to move']],
None, 'List of motor to show'],
]
def prepare(self, motor_list, **opts):
self.table_opts = {}
def run(self, motor_list):
motor_width = 9
motor_names = []
motor_pos = []
motor_list = sorted(motor_list)
pos_format = self.getViewOption(ViewOption.PosFormat)
for motor in motor_list:
name = motor.getName()
motor_names.append([name])
pos = motor.getPosition(force=True)
if pos is None:
pos = float('NAN')
motor_pos.append((pos,))
motor_width = max(motor_width, len(name))
fmt = '%c*.%df' % ('%', motor_width - 5)
if pos_format > -1:
fmt = '%c*.%df' % ('%', int(pos_format))
table = Table(motor_pos, elem_fmt=[fmt],
col_head_str=motor_names, col_head_width=motor_width,
**self.table_opts)
for line in table.genOutput():
self.output(line)
class wu(Macro):
"""Show all user motor positions"""
def prepare(self, **opts):
self.all_motors = self.findObjs('.*', type_class=Type.Moveable)
self.table_opts = {}
def run(self):
nr_motors = len(self.all_motors)
if nr_motors == 0:
self.output('No motor defined')
return
self.output('Current positions (user) on %s' %
datetime.datetime.now().isoformat(' '))
self.output('')
self.execMacro('_wum', self.all_motors, **self.table_opts)
class wa(Macro):
"""Show all motor positions"""
# TODO: duplication of the default value definition is a workaround
# for #427. See commit message cc3331a for more details.
param_def = [
['filter', [['filter', Type.String, '.*',
'a regular expression filter'], {'min': 1}],
['.*'], 'a regular expression filter'],
]
def prepare(self, filter, **opts):
self.all_motors = self.findObjs(filter, type_class=Type.Moveable)
self.table_opts = {}
def run(self, filter):
nr_motors = len(self.all_motors)
if nr_motors == 0:
self.output('No motor defined')
return
show_dial = self.getViewOption(ViewOption.ShowDial)
if show_dial:
self.output('Current positions (user, dial) on %s' %
datetime.datetime.now().isoformat(' '))
else:
self.output('Current positions (user) on %s' %
datetime.datetime.now().isoformat(' '))
self.output('')
self.execMacro('_wm', self.all_motors, **self.table_opts)
class pwa(Macro):
"""Show all motor positions in a pretty table"""
# TODO: duplication of the default value definition is a workaround
# for #427. See commit message cc3331a for more details.
param_def = [
['filter', [['filter', Type.String, '.*',
'a regular expression filter'], {'min': 1}],
['.*'], 'a regular expression filter'],
]
def run(self, filter):
self.execMacro('wa', filter, **Table.PrettyOpts)
class set_lim(Macro):
"""Sets the software limits on the specified motor hello"""
param_def = [
["motor", Type.Moveable, None, "Motor name"],
["low", Type.Float, None, "lower limit"],
["high", Type.Float, None, "upper limit"],
]
def run(self, motor, low, high):
name = motor.getName()
self.debug("Setting user limits for %s" % name)
if low > high:
raise ValueError("lower limit must be lower than the upper limit")
motor.getPositionObj().setLimits(low, high)
self.output(
"%s limits set to %.4f %.4f (user units)" % (name, low, high)
)
class set_lm(Macro):
"""Sets the dial limits on the specified motor"""
param_def = [
["motor", Type.Motor, None, "Motor name"],
["low", Type.Float, None, "lower limit"],
["high", Type.Float, None, "upper limit"],
]
def run(self, motor, low, high):
name = motor.getName()
self.debug("Setting dial limits for %s" % name)
if low > high:
raise Exception("lower limit must be lower than the upper limit")
motor.getDialPositionObj().setLimits(low, high)
self.output(
"%s limits set to %.4f %.4f (dial units)" % (name, low, high)
)
class set_pos(Macro):
"""Sets the position of the motor to the specified value"""
param_def = [
['motor', Type.Motor, None, 'Motor name'],
['pos', Type.Float, None, 'Position to move to']
]
def run(self, motor, pos):
name = motor.getName()
old_pos = motor.getPosition(force=True)
motor.definePosition(pos)
self.output("%s reset from %.4f to %.4f" % (name, old_pos, pos))
class set_user_pos(Macro):
"""Sets the USER position of the motor to the specified value (by
changing OFFSET and keeping DIAL)"""
param_def = [
['motor', Type.Motor, None, 'Motor name'],
['pos', Type.Float, None, 'Position to move to']
]
def run(self, motor, pos):
name = motor.getName()
old_pos = motor.getPosition(force=True)
offset_attr = motor.getAttribute('Offset')
old_offset = offset_attr.read().rvalue.magnitude
new_offset = pos - (old_pos - old_offset)
offset_attr.write(new_offset)
msg = "%s reset from %.4f (offset %.4f) to %.4f (offset %.4f)" % (
name, old_pos, old_offset, pos, new_offset)
self.output(msg)
class wm(Macro):
"""Show the position of the specified motors."""
param_def = [
['motor_list', [['motor', Type.Moveable, None,
'Motor to see where it is']],
None, 'List of motor to show'],
]
@staticmethod
def format_value(fmt, str_fmt, value):
"""
Formats given value following the fmt and/or str_fmt rules.
Parameters
----------
fmt : str
The value format.
str_fmt : str
The string format.
value : float
The value to be formatted.
Returns
-------
str
The string formatted value.
"""
if fmt is not None:
fmt_value = fmt % value
fmt_value = str_fmt % fmt_value
else:
fmt_value = str_fmt % value
return fmt_value
def prepare(self, motor_list, **opts):
self.table_opts = {}
def run(self, motor_list):
motor_width = 10
motor_names = []
motor_pos = []
show_dial = self.getViewOption(ViewOption.ShowDial)
show_ctrlaxis = self.getViewOption(ViewOption.ShowCtrlAxis)
pos_format = self.getViewOption(ViewOption.PosFormat)
for motor in motor_list:
max_len = 0
if show_ctrlaxis:
axis_nb = getattr(motor, "axis")
ctrl_name = self.getController(motor.controller).name
max_len = max(max_len, len(ctrl_name), len(str(axis_nb)))
name = motor.getName()
max_len = max(max_len, len(name))
max_len = max_len + 5
if max_len < 14:
max_len = 14 # Length of 'Not specified'
str_fmt = "%c%ds" % ('%', int(max_len))
name = str_fmt % name
motor_names.append([name])
posObj = motor.getPositionObj()
if pos_format > -1:
fmt = '%c.%df' % ('%', int(pos_format))
else:
fmt = None
val1 = motor.getPosition(force=True)
val1 = self.format_value(fmt, str_fmt, val1)
val2 = posObj.getMaxRange().magnitude
val2 = self.format_value(fmt, str_fmt, val2)
val3 = posObj.getMinRange().magnitude
val3 = self.format_value(fmt, str_fmt, val3)
if show_ctrlaxis:
valctrl = str_fmt % (ctrl_name)
valaxis = str_fmt % str(axis_nb)
upos = list(map(str, [valctrl, valaxis, ' ', val2, val1,
val3]))
else:
upos = list(map(str, ['', val2, val1, val3]))
pos_data = upos
if show_dial:
try:
val1 = fmt % motor.getDialPosition(force=True)
val1 = str_fmt % val1
except Exception:
val1 = str_fmt % motor.getDialPosition(force=True)
dPosObj = motor.getDialPositionObj()
val2 = str_fmt % dPosObj.getMaxRange().magnitude
val3 = str_fmt % dPosObj.getMinRange().magnitude
dpos = list(map(str, [val2, val1, val3]))
pos_data += [''] + dpos
motor_pos.append(pos_data)
elem_fmt = (['%*s'] + ['%*s'] * 5) * 2
row_head_str = []
if show_ctrlaxis:
row_head_str += ['Ctrl', 'Axis']
row_head_str += ['User', ' High', ' Current', ' Low']
if show_dial:
row_head_str += ['Dial', ' High', ' Current', ' Low']
table = Table(motor_pos, elem_fmt=elem_fmt, row_head_str=row_head_str,
col_head_str=motor_names, col_head_width=motor_width,
**self.table_opts)
for line in table.genOutput():
self.output(line)
class wum(Macro):
"""Show the user position of the specified motors."""
param_def = [
['motor_list', [['motor', Type.Moveable, None,
'Motor to see where it is']],
None, 'List of motor to show'],
]
def prepare(self, motor_list, **opts):
self.table_opts = {}
def run(self, motor_list):
motor_width = 10
motor_names = []
motor_pos = []
for motor in motor_list:
name = motor.getName()
motor_names.append([name])
posObj = motor.getPositionObj()
upos = list(map(str, [posObj.getMaxRange().magnitude,
motor.getPosition(force=True),
posObj.getMinRange().magnitude]))
pos_data = [''] + upos
motor_pos.append(pos_data)
elem_fmt = (['%*s'] + ['%*s'] * 3) * 2
row_head_str = ['User', ' High', ' Current', ' Low', ]
table = Table(motor_pos, elem_fmt=elem_fmt, row_head_str=row_head_str,
col_head_str=motor_names, col_head_width=motor_width,
**self.table_opts)
for line in table.genOutput():
self.output(line)
class pwm(Macro):
"""Show the position of the specified motors in a pretty table"""
param_def = [
['motor_list', [['motor', Type.Moveable, None, 'Motor to move']],
None, 'List of motor to show'],
]
def run(self, motor_list):
self.execMacro('wm', motor_list, **Table.PrettyOpts)
class mv(Macro, Hookable):
"""Move motor(s) to the specified position(s)"""
hints = {'allowsHooks': ('pre-move', 'post-move')}
param_def = [
['motor_pos_list',
[['motor', Type.Moveable, None, 'Motor to move'],
['pos', Type.Float, None, 'Position to move to']],
None, 'List of motor/position pairs'],
]
def run(self, motor_pos_list):
self.motors, positions = [], []
for m, p in motor_pos_list:
self.motors.append(m)
positions.append(p)
enable_hooks = getattr(sardanacustomsettings,
'PRE_POST_MOVE_HOOK_IN_MV',
True)
if enable_hooks:
for preMoveHook in self.getHooks('pre-move'):
preMoveHook()
for m, p in zip(self.motors, positions):
self.debug("Starting %s movement to %s", m.getName(), p)
motion = self.getMotion(self.motors)
try:
state, pos = motion.move(positions)
except Exception as e:
self.warning("Motion failed due to: {}".format(e))
self._log_information()
raise e
if state != DevState.ON:
self.warning("Motion ended in %s", state.name)
self._log_information()
if enable_hooks:
for postMoveHook in self.getHooks('post-move'):
postMoveHook()
def _log_information(self):
msg = []
for motor in self.motors:
msg.append(motor.information())
self.info("\n".join(msg))
class mstate(Macro):
"""Prints the state of a motor"""
param_def = [['motor', Type.Moveable, None, 'Motor to check state']]
def run(self, motor):
self.info("Motor %s" % str(motor.stateObj.read().rvalue))
class umv(Macro, Hookable):
"""Move motor(s) to the specified position(s) and update"""
hints = {'allowsHooks': ('pre-move', 'post-move')}
param_def = mv.param_def
def prepare(self, motor_pos_list, **opts):
self.all_names = []
self.all_pos = []
self.motors = []
self.print_pos = False
for motor, pos in motor_pos_list:
self.all_names.append([motor.getName()])
self.motors.append(motor)
pos, posObj = motor.getPosition(force=True), motor.getPositionObj()
self.all_pos.append([pos])
posObj.subscribeEvent(self.positionChanged, motor)
def run(self, motor_pos_list):
self.print_pos = True
try:
mv, _ = self.createMacro('mv', motor_pos_list)
mv._setHooks(self.hooks)
self.runMacro(mv)
finally:
self.finish()
def finish(self):
self._clean()
def _clean(self):
for motor, pos in self.getParameters()[0]:
posObj = motor.getPositionObj()
try:
posObj.unsubscribeEvent(self.positionChanged, motor)
except Exception as e:
print(str(e))
raise e
def positionChanged(self, motor, position):
idx = self.all_names.index([motor.getName()])
self.all_pos[idx] = [position]
if self.print_pos:
self.printAllPos()
def printAllPos(self):
motor_width = 10
pos_format = self.getViewOption(ViewOption.PosFormat)
fmt = '%*.4f'
if pos_format > -1:
fmt = '%c*.%df' % ('%', int(pos_format))
table = Table(self.all_pos, elem_fmt=[fmt],
col_head_str=self.all_names, col_head_width=motor_width)
self.outputBlock(table.genOutput())
self.flushOutput()
class mvr(Macro, Hookable):
"""Move motor(s) relative to the current position(s)"""
hints = {'allowsHooks': ('pre-move', 'post-move')}
param_def = [
['motor_disp_list',
[['motor', Type.Moveable, None, 'Motor to move'],
['disp', Type.Float, None, 'Relative displacement']],
None, 'List of motor/displacement pairs'],
]
def run(self, motor_disp_list):
self.motors, motor_pos_list = [], []
for motor, disp in motor_disp_list:
pos = motor.getPosition(force=True)
self.motors.append(motor)
if pos is None:
self.error("Cannot get %s position" % motor.getName())
return
else:
pos += disp
motor_pos_list.append([motor, pos])
mv, _ = self.createMacro('mv', motor_pos_list)
mv._setHooks(self.hooks)
self.runMacro(mv)
class umvr(Macro, Hookable):
"""Move motor(s) relative to the current position(s) and update"""
hints = {'allowsHooks': ('pre-move', 'post-move')}
param_def = mvr.param_def
def run(self, motor_disp_list):
self.motors, motor_pos_list = [], []
for motor, disp in motor_disp_list:
pos = motor.getPosition(force=True)
self.motors.append(motor)
if pos is None:
self.error("Cannot get %s position" % motor.getName())
return
else:
pos += disp
motor_pos_list.append([motor, pos])
umv, _ = self.createMacro('umv', motor_pos_list)
umv._setHooks(self.hooks)
self.runMacro(umv)
# TODO: implement tw macro with param repeats in order to be able to pass
# multiple motors and multiple deltas. Also allow to pass the integration time
# in order to execute the measurement group acquisition after each move and
# print the results. Basically follow the SPEC's API:
# https://certif.com/spec_help/tw.html
class tw(iMacro):
"""Tweak motor by variable delta"""
param_def = [
['motor', Type.Moveable, "test", 'Motor to move'],
['delta', Type.Float, None, 'Amount to tweak']
]
def run(self, motor, delta):
self.output(
"Indicate direction with + (or p) or - (or n) or enter")
self.output(
"new step size. Type something else (or ctrl-C) to quit.")
self.output("")
if np.sign(delta) == -1:
a = "-"
if np.sign(delta) == 1:
a = "+"
while a in ('+', '-', 'p', 'n'):
pos = motor.position
a = self.input("%s = %s, which way? " % (
motor, pos), default_value=a, data_type=Type.String)
try:
# check if the input is a new delta
delta = float(a)
# obtain the sign of the new delta
if np.sign(delta) == -1:
a = "-"
else:
a = "+"
except:
# convert to the common sign
if a == "p":
a = "+"
# convert to the common sign
elif a == "n":
a = "-"
# the sign is already correct, just continue
elif a in ("+", "-"):
pass
else:
msg = "Typing '%s' caused 'tw' macro to stop." % a
self.info(msg)
raise StopException()
# invert the delta if necessary
if (a == "+" and np.sign(delta) < 0) or \
(a == "-" and np.sign(delta) > 0):
delta = -delta
pos += delta
self.mv(motor, pos)
##########################################################################
#
# Data acquisition related macros
#
##########################################################################
def _value_to_repr(data):
if data is None:
return "<nodata>"
elif np.ndim(data) > 0:
return list(np.shape(data))
else:
return data
class _ct:
def dump_information(self, elements):
msg = ["Elements ended acquisition with:"]
for element in elements:
msg.append(element.information())
self.info("\n".join(msg))
class ct(Macro, Hookable, _ct):
"""Count for the specified time on the measurement group
or experimental channel given as second argument
(if not given the active measurement group is used)"""
hints = {'allowsHooks': ('pre-acq', 'post-acq')}
param_def = [
['integ_time', Type.Float, 1.0, 'Integration time'],
['countable_elem', Type.Countable, Optional,
'Countable element e.g. MeasurementGroup or ExpChannel']
]
def prepare(self, integ_time, countable_elem, **opts):
if countable_elem is None:
try:
self.countable_elem_name = self.getEnv('ActiveMntGrp')
except UnknownEnv:
return
else:
self.countable_elem_name = countable_elem.name
self.countable_elem = self.getObj(self.countable_elem_name)
def run(self, integ_time, countable_elem):
if self.countable_elem is None:
msg = ('Unknown countable {0} element. Use macro parameter or'
'ActiveMntGrp environment variable'.format(
self.countable_elem_name))
self.error(msg)
return
# integration time has to be accessible from with in the hooks
# so declare it also instance attribute
self.integ_time = integ_time
self.debug("Counting for %s sec", integ_time)
self.outputDate()
self.output('')
self.flushOutput()
for preAcqHook in self.getHooks('pre-acq'):
preAcqHook()
try:
state, data = self.countable_elem.count(integ_time)
except Exception:
if self.countable_elem.type == Type.MeasurementGroup:
names = self.countable_elem.ElementList
elements = [self.getObj(name) for name in names]
self.dump_information(elements)
raise
if state != DevState.ON:
if self.countable_elem.type == Type.MeasurementGroup:
names = self.countable_elem.ElementList
elements = [self.getObj(name) for name in names]
self.dump_information(elements)
raise ValueError("Acquisition ended with {}".format(
state.name.capitalize()))
for postAcqHook in self.getHooks('post-acq'):
postAcqHook()
names, counts = [], []
if self.countable_elem.type == Type.MeasurementGroup:
meas_grp = self.countable_elem
for ch_info in meas_grp.getChannelsEnabledInfo():
names.append(' %s' % ch_info.label)
ch_data = data.get(ch_info.full_name)
counts.append(_value_to_repr(ch_data))
else:
channel = self.countable_elem
names.append(" %s" % channel.name)
counts.append(_value_to_repr(data))
# to be compatible with measurement group count
data = {channel.full_name: data}
self.setData(Record(data))
table = Table([counts], row_head_str=names, row_head_fmt='%*s',
col_sep=' = ')
for line in table.genOutput():
self.output(line)
class uct(Macro, _ct):
"""Count on the active measurement group and update"""
param_def = [
['integ_time', Type.Float, 1.0, 'Integration time'],
['countable_elem', Type.Countable, Optional,
'Countable element e.g. MeasurementGroup or ExpChannel']
]
def prepare(self, integ_time, countable_elem, **opts):
self.print_value = False
if countable_elem is None:
try:
self.countable_elem_name = self.getEnv('ActiveMntGrp')
except UnknownEnv:
return
self.countable_elem = self.getObj(self.countable_elem_name)
else:
self.countable_elem_name = countable_elem.name
self.countable_elem = countable_elem
if self.countable_elem is None:
return
self.channels = []
self.values = []
names = []
if self.countable_elem.type == Type.MeasurementGroup:
meas_grp = self.countable_elem
for channel_info in meas_grp.getChannelsEnabledInfo():
names.append(channel_info.label)
self.names = [[n] for n in names]
for channel_info in meas_grp.getChannelsEnabledInfo():
full_name = channel_info.full_name
channel = Device(full_name)
self.channels.append(channel)
value = channel.getValue(force=True)
self.values.append([value])
valueObj = channel.getValueObj_()
valueObj.subscribeEvent(self.counterChanged, channel)
else:
channel = self.countable_elem
self.names = [[channel.getName()]]
channel = Device(channel.full_name)
self.channels.append(channel)
value = channel.getValue(force=True)
self.values.append([value])
valueObj = channel.getValueObj_()
valueObj.subscribeEvent(self.counterChanged, channel)
def run(self, integ_time, countable_elem):
if self.countable_elem is None:
msg = ('Unknown countable {0} element. Use macro parameter or'
'ActiveMntGrp environment variable'.format(
self.countable_elem_name))
self.error(msg)
return
self.print_value = True
try:
state, data = self.countable_elem.count(integ_time)
except Exception:
if self.countable_elem.type == Type.MeasurementGroup:
names = self.countable_elem.ElementList
elements = [self.getObj(name) for name in names]
self.dump_information(elements)
raise
finally:
self.finish()
if state != DevState.ON:
if self.countable_elem.type == Type.MeasurementGroup:
names = self.countable_elem.ElementList
elements = [self.getObj(name) for name in names]
self.dump_information(elements)
raise ValueError("Acquisition ended with {}".format(
state.name.capitalize()))
self.setData(Record(data))
self.printAllValues()
def finish(self):
self._clean()
def _clean(self):
for channel in self.channels:
valueObj = channel.getValueObj_()
valueObj.unsubscribeEvent(self.counterChanged, channel)
def counterChanged(self, channel, value):
idx = self.names.index([channel.getName()])
self.values[idx] = [value]
if self.print_value and not self.isStopped():
self.printAllValues()
def printAllValues(self):
ch_width = 10
table = Table(self.values, elem_fmt=['%*.4f'], col_head_str=self.names,
col_head_width=ch_width)
self.outputBlock(table.genOutput())
self.flushOutput()
class settimer(Macro):
"""Defines the timer channel for the active measurement group"""
env = ('ActiveMntGrp',)
param_def = [
['timer', Type.ExpChannel, None, 'Timer'],
]
def run(self, timer):
mnt_grp_name = self.getEnv('ActiveMntGrp')
mnt_grp = self.getObj(mnt_grp_name, type_class=Type.MeasurementGroup)
if mnt_grp is None:
self.error('ActiveMntGrp is not defined or has invalid value.\n'
'please define a valid active measurement group '
'before setting a timer')
return
try:
mnt_grp.getConfiguration().setTimer(timer.getName())
except Exception as e:
self.output(str(e))
self.output(
"%s is not a valid channel in the active measurement group"
% timer)
@macro([['message', [['message_item', Type.String, None,
'message item to be reported']], None,
'message to be reported']])
def report(self, message):
"""Logs a new record into the message report system (if active)"""
self.report(' '.join(message))
class logmacro(Macro):
""" Turn on/off logging of the spock output.
.. note::
The logmacro class has been included in Sardana
on a provisional basis. Backwards incompatible changes
(up to and including its removal) may occur if
deemed necessary by the core developers
"""
param_def = [
['offon', Type.Boolean, None, 'Unset/Set logging'],
['mode', Type.Integer, -1, 'Mode: 0 append, 1 new file'],
]
def run(self, offon, mode):
if offon:
if mode == 1:
self.setEnv('LogMacroMode', True)
elif mode == 0:
self.setEnv('LogMacroMode', False)
self.setEnv('LogMacro', True)
else:
self.setEnv('LogMacro', False)
class repeat(Hookable, Macro):
"""This macro executes as many repetitions of a set of macros as
specified by nr parameter. The macros to be repeated can be
given as parameters or as body hooks.
If both are given first will be executed the ones given as
parameters and then the ones given as body hooks.
If nr has negative value, repetitions will be executed until you
stop repeat macro.
.. note::
The repeat macro has been included in Sardana
on a provisional basis. Backwards incompatible changes
(up to and including removal of the macro) may occur if
deemed necessary by the core developers."""
hints = {'allowsHooks': ('body',)}
param_def = [
['nr', Type.Integer, None, 'Nr of iterations'],
['macro_name_params', [
['token', Type.String,
None, 'Macro name and parameters (if any)'],
{'min': 0}
],
None, "List with macro name and parameters (if any)"]
]
def prepare(self, nr, macro_name_params):
self.bodyHooks = self.getHooks("body")
self.macro_name_params = macro_name_params
def __loop(self):
self.checkPoint()
if len(self.macro_name_params) > 0:
for macro_cmd in self.macro_name_params:
self.execMacro(macro_cmd)
for bodyHook in self.bodyHooks:
bodyHook()
def run(self, nr, macro_name_params):
if nr < 0:
while True:
self.__loop()
else:
for i in range(nr):
self.__loop()
progress = ((i + 1) / nr) * 100
yield progress
class newfile(Hookable, Macro):
""" Sets the ScanDir and ScanFile as well as ScanID in the environment.
If ScanFilePath is only a file name, the ScanDir must be set externally
via `senv ScanDir <PathToScanFile>` or using the %expconf. Otherwise,
the path in ScanFilePath must be absolute and existing on the
MacroServer host.
The ScanID should be set to the value before the upcoming scan number.
Default value is 0.
"""
hints = {'allowsHooks': ('post-newfile')}
param_def = [
['ScanFilePath_list',
[['ScanFilePath', Type.String, None, '(ScanDir/)ScanFile']],
None, 'List of (ScanDir/)ScanFile'],
['ScanID', Type.Integer, 0, 'Scan ID'],
]
def run(self, ScanFilePath_list, ScanID):
path_list = []
fileName_list = []
# traverse the repeat parameters for the ScanFilePath_list
for i, ScanFilePath in enumerate(ScanFilePath_list):
path = os.path.dirname(ScanFilePath)
fileName = os.path.basename(ScanFilePath)
if not path and i == 0:
# first entry and no given ScanDir: check if ScanDir exists
try:
ScanDir = self.getEnv('ScanDir')
except UnknownEnv:
ScanDir = ''
if not (isinstance(ScanDir, str) and len(ScanDir) > 0):
msg = ('Data is not stored until ScanDir is correctly '
'set! Provide ScanDir with newfile macro: '
'`newfile [<ScanDir>/<ScanFile>] <ScanID>` '
'or `senv ScanDir <ScanDir>` or with %expconf')
self.error(msg)
return
else:
path = ScanDir
elif not path and i > 0:
# not first entry and no given path: use path of last iteration
path = path_list[i-1]
elif not os.path.isabs(path):
# relative path
self.error('Only absolute path are allowed!')
return
else:
# absolute path
path = os.path.normpath(path)
if i > 0 and (path not in path_list):
# check if paths are equal
self.error('Multiple paths to the data files are not allowed')
return
elif not os.path.exists(path):
# check if folder exists
self.error('Path %s does not exists on the host of the '
'MacroServer and has to be created in '
'advance.' % path)
return
else:
self.debug('Path %s appended.' % path)
path_list.append(path)
if not fileName:
self.error('No filename is given.')
return
elif fileName in fileName_list:
self.error('Duplicate filename %s is not allowed.' % fileName)
return
else:
self.debug('Filename is %s.' % fileName)
fileName_list.append(fileName)
if ScanID < 1:
ScanID = 0
self.setEnv('ScanFile', fileName_list)
self.setEnv('ScanDir', path_list[0])
self.setEnv('ScanID', ScanID)
self.output('ScanDir is\t: %s', path_list[0])
for i, ScanFile in enumerate(fileName_list):
if i == 0:
self.output('ScanFile set to\t: %s', ScanFile)
else:
self.output('\t\t %s', ScanFile)
self.output('Next scan is\t: #%d', ScanID+1)
for postNewfileHook in self.getHooks('post-newfile'):
postNewfileHook()
class plotselect(Macro):
"""select channels for plotting in the active measurement group"""
env = ("ActiveMntGrp", )
param_def = [
['channel',
[['channel', Type.ExpChannel, 'None', ""], {'min': 0}],
None,
"List of channels to plot"],
]
def run(self, channel):
active_meas_grp = self.getEnv('ActiveMntGrp')
meas_grp = self.getMeasurementGroup(active_meas_grp)
self.output("Active measurement group: {}".format(meas_grp.name))
plot_channels_ok = []
enabled_channels = meas_grp.getEnabled()
# check channels first
for chan in channel:
enabled = enabled_channels.get(chan.name)
if enabled is None:
self.warning("{} not in {}".format(chan.name, meas_grp.name))
else:
plot_channels_ok.append(chan.name)
if not enabled:
self.warning("{} is disabled".format(chan.name))
else:
self.output("{} selected for plotting".format(chan.name))
# set the plot type and plot axis in the meas_group
meas_grp.setPlotType("No", apply=False)
meas_grp.setPlotType("Spectrum", *plot_channels_ok, apply=False)
meas_grp.setPlotAxes(["<mov>"], *plot_channels_ok)
class _movetostatspos(Macro):
"""This macro does the logic for pic and cen"""
env = ("ScanStats", )
param_def = [
['channel', Type.ExpChannel, Optional, 'name of channel'],
['caller', Type.String, None, 'caller (pic or cen)']
]
def run(self, channel, caller):
stats = self.getEnv('ScanStats', door_name=self.getDoorName())
if channel is None:
# use first channel in stats
channel = next(iter(stats['Stats']))
else:
if channel.name in stats['Stats']:
channel = channel.name
else:
raise Exception("channel {} not present in ScanStats".format(
channel.name))
if caller == 'pic':
stats_value = 'maxpos'
stats_str = 'PIC'
elif caller == 'cen':
stats_value = 'cen'
stats_str = 'CEN'
else:
raise Exception("caller {} is unknown".format(caller))
motor_name = stats['Motor']
motor = self.getMotion([motor_name])
current_pos = motor.readPosition()[0]
pos = stats['Stats'][channel][stats_value]
self.info("move motor {:s} from current position\nat {:.4f}\n"
"to {:s} of counter {:s}\nat {:.4f}".format(motor_name,
current_pos,
stats_str,
channel,
pos))
motor.move(pos)
class pic(Macro):
"""This macro moves the motor of the last scan to the PEAK position for a
given channel. If no channel is given, it selects the first channel from
the ScanStats env variable.
"""
param_def = [
['channel', Type.ExpChannel, Optional, 'name of channel']
]
def run(self, channel):
self.execMacro('_movetostatspos', channel, 'pic')
class cen(Macro):
"""This macro moves the motor of the last scan to the CEN position for a
given channel. If no channel is given, it selects the first channel from
the ScanStats env variable.
"""
param_def = [
['channel', Type.ExpChannel, Optional, 'name of channel']
]
def run(self, channel):
self.execMacro('_movetostatspos', channel, 'cen')
class where(Macro):
"""This macro shows the current position of the last scanned motor."""
env = ("ScanStats", )
def run(self):
motor_name = self.getEnv('ScanStats')['Motor']
motor = self.getMoveable(motor_name)
self.info("motor {:s} is\nat {:.4f}".format(motor_name,
motor.getPosition()))
| 34.618859
| 79
| 0.546316
|
fa00a4d46d3516557377aaa4c9e520aa4070d1f5
| 3,836
|
py
|
Python
|
lib/file_strip/comments.py
|
bitst0rm-st3/ScopeHunter
|
0a3742785b3f4ae0d947fd3d333e779f8a0473b1
|
[
"Unlicense",
"MIT"
] | 83
|
2015-01-05T03:16:38.000Z
|
2022-02-11T13:25:28.000Z
|
lib/file_strip/comments.py
|
evandroforks/ScopeHunter
|
c33fcc7f1bf455bc02a1f9d00c91dcdda9aa784d
|
[
"Unlicense",
"MIT"
] | 47
|
2015-01-29T16:53:03.000Z
|
2022-02-28T14:09:41.000Z
|
lib/file_strip/comments.py
|
evandroforks/ScopeHunter
|
c33fcc7f1bf455bc02a1f9d00c91dcdda9aa784d
|
[
"Unlicense",
"MIT"
] | 15
|
2015-01-29T16:36:37.000Z
|
2021-11-26T13:28:56.000Z
|
"""
File Strip.
Licensed under MIT
Copyright (c) 2012 - 2016 Isaac Muse <isaacmuse@gmail.com>
"""
import re
LINE_PRESERVE = re.compile(r"\r?\n", re.MULTILINE)
CSS_PATTERN = re.compile(
r'''(?x)
(?P<comments>
/\*[^*]*\*+(?:[^/*][^*]*\*+)*/ # multi-line comments
)
| (?P<code>
"(?:\\.|[^"\\])*" # double quotes
| '(?:\\.|[^'\\])*' # single quotes
| .[^/"']* # everything else
)
''',
re.DOTALL
)
CPP_PATTERN = re.compile(
r'''(?x)
(?P<comments>
/\*[^*]*\*+(?:[^/*][^*]*\*+)*/ # multi-line comments
| \s*//(?:[^\r\n])* # single line comments
)
| (?P<code>
"(?:\\.|[^"\\])*" # double quotes
| '(?:\\.|[^'\\])*' # single quotes
| .[^/"']* # everything else
)
''',
re.DOTALL
)
PY_PATTERN = re.compile(
r'''(?x)
(?P<comments>
\s*\#(?:[^\r\n])* # single line comments
)
| (?P<code>
"{3}(?:\\.|[^\\])*"{3} # triple double quotes
| '{3}(?:\\.|[^\\])*'{3} # triple single quotes
| "(?:\\.|[^"\\])*" # double quotes
| '(?:\\.|[^'])*' # single quotes
| .[^\#"']* # everything else
)
''',
re.DOTALL
)
def _strip_regex(pattern, text, preserve_lines):
"""Generic function that strips out comments pased on the given pattern."""
def remove_comments(group, preserve_lines=False):
"""Remove comments."""
return ''.join([x[0] for x in LINE_PRESERVE.findall(group)]) if preserve_lines else ''
def evaluate(m, preserve_lines):
"""Search for comments."""
g = m.groupdict()
return g["code"] if g["code"] is not None else remove_comments(g["comments"], preserve_lines)
return ''.join(map(lambda m: evaluate(m, preserve_lines), pattern.finditer(text)))
@staticmethod
def _cpp(text, preserve_lines=False):
"""C/C++ style comment stripper."""
return _strip_regex(
CPP_PATTERN,
text,
preserve_lines
)
@staticmethod
def _python(text, preserve_lines=False):
"""Python style comment stripper."""
return _strip_regex(
PY_PATTERN,
text,
preserve_lines
)
@staticmethod
def _css(text, preserve_lines=False):
"""CSS style comment stripper."""
return _strip_regex(
CSS_PATTERN,
text,
preserve_lines
)
class CommentException(Exception):
"""Comment exception."""
def __init__(self, value):
"""Setup exception."""
self.value = value
def __str__(self):
"""Return exception value repr on string convert."""
return repr(self.value)
class Comments(object):
"""Comment strip class."""
styles = []
def __init__(self, style=None, preserve_lines=False):
"""Initialize."""
self.preserve_lines = preserve_lines
self.call = self.__get_style(style)
@classmethod
def add_style(cls, style, fn):
"""Add comment style."""
if style not in cls.__dict__:
setattr(cls, style, fn)
cls.styles.append(style)
def __get_style(self, style):
"""Get the comment style."""
if style in self.styles:
return getattr(self, style)
else:
raise CommentException(style)
def strip(self, text):
"""Strip comments."""
return self.call(text, self.preserve_lines)
Comments.add_style("c", _cpp)
Comments.add_style("json", _cpp)
Comments.add_style("cpp", _cpp)
Comments.add_style("python", _python)
Comments.add_style("css", _css)
| 24.589744
| 101
| 0.50756
|
d677191174db6c4cf681ea4f9a6d1fe9e623815d
| 11,966
|
py
|
Python
|
counter_attack/utils.py
|
samuelemarro/anti-attacks
|
f63829ee26e24d40aecdd2d6cc6bd7026d11a016
|
[
"MIT"
] | null | null | null |
counter_attack/utils.py
|
samuelemarro/anti-attacks
|
f63829ee26e24d40aecdd2d6cc6bd7026d11a016
|
[
"MIT"
] | null | null | null |
counter_attack/utils.py
|
samuelemarro/anti-attacks
|
f63829ee26e24d40aecdd2d6cc6bd7026d11a016
|
[
"MIT"
] | null | null | null |
import collections
import configparser
import csv
import datetime
import gzip
import itertools
import json
import logging
import pathlib
import pickle
import urllib.request
from typing import Tuple
import foolbox
import numpy as np
import sklearn.metrics as metrics
logger = logging.getLogger(__name__)
class AverageMeter(object):
"""
Computes and stores the average and current value.
"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
if self.count > 0:
self.avg = self.sum / self.count
def save_zip(object, path, protocol=0):
"""
Saves a compressed object to disk
"""
# Create the folder, if necessary
pathlib.Path(path).parent.mkdir(parents=True, exist_ok=True)
file = gzip.GzipFile(path, 'wb')
pickled = pickle.dumps(object, protocol)
file.write(pickled)
file.close()
def load_zip(path):
"""
Loads a compressed object from disk
"""
file = gzip.GzipFile(path, 'rb')
buffer = b""
while True:
data = file.read()
if data == b"":
break
buffer += data
object = pickle.loads(buffer)
file.close()
return object
def lp_norm(array, p):
# L_infinity: Maximum difference
if np.isinf(p):
value = np.max(np.abs(array))
# No normalisation for L-inf
# L_0: Count of different values
elif p == 0:
value = np.count_nonzero(np.reshape(array, -1))
# L_p: p-root of the sum of diff^p
else:
value = np.power(np.sum(np.power(np.abs(array), p)), 1 / p)
return value
def lp_distance(x, y, p, batch):
# If x or y are empty, we return an empty array
empty_x = hasattr(x, '__len__') and len(x) == 0
empty_y = hasattr(y, '__len__') and len(y) == 0
if batch and (empty_x or empty_y):
return np.array([], dtype=np.float)
if batch:
return np.array([lp_norm(_x - _y, p) for _x, _y in zip(x, y)])
else:
return lp_norm(x - y, p)
def random_unit_vector(shape):
dimensions = np.prod(shape)
vector = np.random.normal(size=(dimensions))
magnitude = np.linalg.norm(vector)
unit_vector = vector / magnitude
return unit_vector.reshape(shape).astype(np.float32)
def roc_curve(positive_values, negative_values):
ground_truths = np.concatenate(
[np.zeros_like(negative_values), np.ones_like(positive_values)], 0)
predictions = np.concatenate([negative_values, positive_values], 0)
# Since sklearn.metrics.roc_curve cannot handle infinity, we
# use the maximum float value
max_value = np.finfo(np.float).max
predictions = [np.sign(x) * max_value if np.isinf(x)
else x for x in predictions]
predictions = np.array(predictions)
# Create the ROC curve
false_positive_rate, true_positive_rate, thresholds = metrics.roc_curve(
ground_truths, predictions, pos_label=1)
# Replace max_value with np.Infinity
thresholds = [np.sign(x) * np.Infinity if np.abs(x) ==
max_value else x for x in thresholds]
thresholds = np.array(thresholds)
return false_positive_rate, true_positive_rate, thresholds
def get_best_threshold(true_positive_rates, false_positive_rates, thresholds):
"""
Computes the best threshold, corresponding to the threshold with the maximum value of Youden's Index
"""
# Standard way to compute Youden's Index: sensitivity + specificity - 1
# Since sensitivity = tpr and specificity = 1 - fpr, we use:
# youden = tpr + (1 - fpr) - 1 = tpr - fpr
youden_indices = true_positive_rates - false_positive_rates
# Find the index of the best youden index
best_threshold_index = np.argmax(youden_indices, 0)
return thresholds[best_threshold_index].item(), true_positive_rates[best_threshold_index].item(), false_positive_rates[best_threshold_index].item()
def accuracy_distortion_curve(base_accuracy, base_attack_success_rate, distances):
ascending_distances = np.sort(distances)
x = [0]
y = [base_accuracy]
for i, distance in enumerate(ascending_distances):
# If we're considering the nth distance, it means that n attacks are successful,
# so the success rate for the nth distance is n / number of distances. Since the
# distances are only computed for the successful attacks, we have to multiply by
# the attack base success rate
attack_success_rate = base_attack_success_rate * \
(i + 1) / len(distances)
accuracy = base_accuracy - attack_success_rate
# If distance is already in x, it means that two or more distances are equal.
# In that case, update the accuracy for the corrensponding distance
if distance in x:
assert x[-1] == distance
y[-1] = accuracy
else:
x.append(distance)
y.append(accuracy)
x = np.array(x)
y = np.array(y)
return x, y
def filter_lists(condition, *lists):
tuple_size = len(lists)
length = len(lists[0])
for _list in lists:
if len(_list) != length:
raise ValueError('All lists must have the same length.')
is_numpy = [isinstance(_list, np.ndarray) for _list in lists]
# Convert a tuple of lists into a list of tuples
list_of_tuples = list(zip(*lists))
final_lists = [list() for i in range(tuple_size)]
for _tuple in list_of_tuples:
if(condition(*_tuple)):
for i, value in enumerate(_tuple):
final_lists[i].append(value)
for i, _list in enumerate(lists):
if is_numpy[i]:
final_lists[i] = np.array(_list)
return final_lists
def is_top_k(predictions, label, k=1):
sorted_args = np.argsort(predictions)
top_k = sorted_args[-k:][::-1]
return label in top_k
def top_k_count(batch_predictions, labels, k=1):
correct_samples = [is_top_k(predictions, label, k)
for predictions, label in zip(batch_predictions, labels)]
return len(np.nonzero(correct_samples)[0])
def distance_statistics(distances: np.ndarray, failure_count: int) -> Tuple[float, float, float, float]:
"""Computes the distance statistics, treating failures as Infinity.
Parameters
----------
distances : numpy.ndarray
A 1D array with the computed distances (without the failed ones).
failure_count : [type]
The number of failed distance computations.
Returns
-------
Tuple[float, float, float, float]
A tuple composed of:
The average distance
The median distance
The success rate
The adjusted median distance, which treats failures as Infinity.
"""
if len(distances) == 0:
average_distance = np.nan
median_distance = np.nan
adjusted_median_distance = np.nan
else:
average_distance = np.average(distances)
median_distance = np.median(distances)
adjusted_median_distance = np.median(
distances + [np.Infinity] * failure_count)
return average_distance, median_distance, adjusted_median_distance
def attack_statistics_info(samples_count, correct_count, successful_attack_count, distances):
accuracy = correct_count / samples_count
success_rate = successful_attack_count / correct_count
failure_count = correct_count - successful_attack_count
average_distance, median_distance, adjusted_median_distance = distance_statistics(
distances, failure_count)
info = [['Base Accuracy', '{:2.2f}%'.format(accuracy * 100.0)],
['Success Rate', '{:2.2f}%'.format(success_rate * 100.0)],
['Average Distance', '{:2.2e}'.format(average_distance)],
['Median Distance', '{:2.2e}'.format(median_distance)],
['Adjusted Median Distance', '{:2.2e}'.format(
adjusted_median_distance)],
['Samples Count', str(samples_count)],
['Correct Count', str(correct_count)],
['Successful Attack Count', str(successful_attack_count)]]
return info
def save_results(path, table=None, command=None, info=None, header=None, delimiter='\t'):
# Add the command used to the info
if command is not None:
if info is None:
info = [['Command:'] + [command]]
else:
info = [['Command:'] + [command]] + info
# Add an empty row
info += []
# Transpose the table
if table is not None:
table = itertools.zip_longest(*table, fillvalue='')
save_csv(path, table=table, info=info, header=header, delimiter=delimiter)
def save_csv(path, table=None, info=None, header=None, delimiter='\t'):
pathlib.Path(path).parent.mkdir(parents=True, exist_ok=True)
with open(path, 'w', newline='') as file:
wr = csv.writer(file, delimiter=delimiter, quoting=csv.QUOTE_MINIMAL)
if info is not None:
for row in info:
wr.writerow(row)
if header is not None:
wr.writerow(header)
if table is not None:
for row in table:
if not isinstance(row, collections.Iterable):
row = [row]
wr.writerow(row)
def download(url, path):
r = urllib.request.urlopen(url)
content = r.read()
open(path, 'wb').write(content)
def download_from_config(config_path, download_path, link_section, link_name):
try:
config = configparser.ConfigParser()
config.read(config_path)
download_link = config.get(link_section, link_name)
except KeyError:
raise IOError(
'The configuration file does not contain the link for \'{}\''.format(link_name))
except IOError:
raise IOError('Could not read the configuration file.')
try:
logger.info('Downloading from {}'.format(download_link))
download(download_link, download_path)
except IOError as e:
raise IOError(
'Could not download from \'{}\': {}. '
'Please check that your internet connection is working, '
'or download the model manually and store it in {}.'.format(download_link, e, download_path)) from e
def load_json(path):
with open('data.json') as f:
data = json.load(f)
return data
def p_to_foolbox(p):
if p == 2:
return foolbox.distances.MeanSquaredDistance
elif np.isinf(p):
return foolbox.distances.Linfinity
else:
raise NotImplementedError('Unsupported Lp.')
class Filter(dict):
def __init__(self, o=None, **kwArgs):
if o is None:
o = {}
super().__init__(o, **kwArgs)
def __setitem__(self, key, value):
value_length = len(value)
for existing_key, existing_value in self.items():
if value_length != len(existing_value):
raise ValueError('The inserted value must have the same length (along the first dimension)'
'of the other values (inserted value of length {}, mismatched with \"{}\": {})'.format(value_length, existing_key, len(existing_value)))
super().__setitem__(key, value)
def empty(self):
for key in self.keys():
super().__setitem__(key, [])
def filter(self, indices):
if len(indices) == 0:
self.empty()
return
for key in self.keys():
if isinstance(self[key], np.ndarray):
super().__setitem__(key, self[key][indices])
elif isinstance(self[key], list):
super().__setitem__(key, [self[key][i] for i in indices])
else:
raise NotImplementedError('The filter for collection "{}" of type {} is not implemented.'.format(key, type(key)))
| 31.572559
| 169
| 0.635801
|
06e53578d777f55a0bb0bbaba22d4f6a65960e65
| 938
|
py
|
Python
|
clients/client/python/test/test_stripe_customer_response.py
|
russelg/sdk
|
2515b35981784319bd7d58fcf0b5ab85b501b62f
|
[
"Apache-2.0"
] | null | null | null |
clients/client/python/test/test_stripe_customer_response.py
|
russelg/sdk
|
2515b35981784319bd7d58fcf0b5ab85b501b62f
|
[
"Apache-2.0"
] | null | null | null |
clients/client/python/test/test_stripe_customer_response.py
|
russelg/sdk
|
2515b35981784319bd7d58fcf0b5ab85b501b62f
|
[
"Apache-2.0"
] | null | null | null |
"""
Ory APIs
Documentation for all public and administrative Ory APIs. Administrative APIs can only be accessed with a valid Personal Access Token. Public APIs are mostly used in browsers. # noqa: E501
The version of the OpenAPI document: v0.0.1-alpha.42
Contact: support@ory.sh
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import ory_client
from ory_client.model.stripe_customer_response import StripeCustomerResponse
class TestStripeCustomerResponse(unittest.TestCase):
"""StripeCustomerResponse unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testStripeCustomerResponse(self):
"""Test StripeCustomerResponse"""
# FIXME: construct object with mandatory attributes with example values
# model = StripeCustomerResponse() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 25.351351
| 194
| 0.716418
|
9b68136bdd5c8c4ce0cbc2fbb7b51cb02d349a7b
| 115,712
|
py
|
Python
|
test/orm/test_froms.py
|
lelit/sqlalchemy
|
55f930ef3d4e60bed02a2dad16e331fe42cfd12b
|
[
"MIT"
] | null | null | null |
test/orm/test_froms.py
|
lelit/sqlalchemy
|
55f930ef3d4e60bed02a2dad16e331fe42cfd12b
|
[
"MIT"
] | null | null | null |
test/orm/test_froms.py
|
lelit/sqlalchemy
|
55f930ef3d4e60bed02a2dad16e331fe42cfd12b
|
[
"MIT"
] | null | null | null |
import sqlalchemy as sa
from sqlalchemy import and_
from sqlalchemy import asc
from sqlalchemy import cast
from sqlalchemy import desc
from sqlalchemy import exc as sa_exc
from sqlalchemy import exists
from sqlalchemy import ForeignKey
from sqlalchemy import func
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import literal_column
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import testing
from sqlalchemy import text
from sqlalchemy import util
from sqlalchemy.engine import default
from sqlalchemy.orm import aliased
from sqlalchemy.orm import backref
from sqlalchemy.orm import clear_mappers
from sqlalchemy.orm import column_property
from sqlalchemy.orm import configure_mappers
from sqlalchemy.orm import contains_alias
from sqlalchemy.orm import contains_eager
from sqlalchemy.orm import create_session
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import joinedload_all
from sqlalchemy.orm import mapper
from sqlalchemy.orm import relation
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
from sqlalchemy.orm.util import join
from sqlalchemy.sql import column
from sqlalchemy.sql import table
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing.schema import Column
from test.orm import _fixtures
class QueryTest(_fixtures.FixtureTest):
run_setup_mappers = "once"
run_inserts = "once"
run_deletes = None
@classmethod
def setup_mappers(cls):
(
Node,
composite_pk_table,
users,
Keyword,
items,
Dingaling,
order_items,
item_keywords,
Item,
User,
dingalings,
Address,
keywords,
CompositePk,
nodes,
Order,
orders,
addresses,
) = (
cls.classes.Node,
cls.tables.composite_pk_table,
cls.tables.users,
cls.classes.Keyword,
cls.tables.items,
cls.classes.Dingaling,
cls.tables.order_items,
cls.tables.item_keywords,
cls.classes.Item,
cls.classes.User,
cls.tables.dingalings,
cls.classes.Address,
cls.tables.keywords,
cls.classes.CompositePk,
cls.tables.nodes,
cls.classes.Order,
cls.tables.orders,
cls.tables.addresses,
)
mapper(
User,
users,
properties={
"addresses": relationship(
Address, backref="user", order_by=addresses.c.id
),
"orders": relationship(
Order, backref="user", order_by=orders.c.id
), # o2m, m2o
},
)
mapper(
Address,
addresses,
properties={
"dingaling": relationship(
Dingaling, uselist=False, backref="address"
) # o2o
},
)
mapper(Dingaling, dingalings)
mapper(
Order,
orders,
properties={
"items": relationship(
Item, secondary=order_items, order_by=items.c.id
), # m2m
"address": relationship(Address), # m2o
},
)
mapper(
Item,
items,
properties={
"keywords": relationship(Keyword, secondary=item_keywords)
},
) # m2m
mapper(Keyword, keywords)
mapper(
Node,
nodes,
properties={
"children": relationship(
Node, backref=backref("parent", remote_side=[nodes.c.id])
)
},
)
mapper(CompositePk, composite_pk_table)
configure_mappers()
class QueryCorrelatesLikeSelect(QueryTest, AssertsCompiledSQL):
__dialect__ = "default"
query_correlated = (
"SELECT users.name AS users_name, "
"(SELECT count(addresses.id) AS count_1 FROM addresses "
"WHERE addresses.user_id = users.id) AS anon_1 FROM users"
)
query_not_correlated = (
"SELECT users.name AS users_name, "
"(SELECT count(addresses.id) AS count_1 FROM addresses, users "
"WHERE addresses.user_id = users.id) AS anon_1 FROM users"
)
def test_as_scalar_select_auto_correlate(self):
addresses, users = self.tables.addresses, self.tables.users
query = select(
[func.count(addresses.c.id)], addresses.c.user_id == users.c.id
).as_scalar()
query = select([users.c.name.label("users_name"), query])
self.assert_compile(
query, self.query_correlated, dialect=default.DefaultDialect()
)
def test_as_scalar_select_explicit_correlate(self):
addresses, users = self.tables.addresses, self.tables.users
query = (
select(
[func.count(addresses.c.id)], addresses.c.user_id == users.c.id
)
.correlate(users)
.as_scalar()
)
query = select([users.c.name.label("users_name"), query])
self.assert_compile(
query, self.query_correlated, dialect=default.DefaultDialect()
)
def test_as_scalar_select_correlate_off(self):
addresses, users = self.tables.addresses, self.tables.users
query = (
select(
[func.count(addresses.c.id)], addresses.c.user_id == users.c.id
)
.correlate(None)
.as_scalar()
)
query = select([users.c.name.label("users_name"), query])
self.assert_compile(
query, self.query_not_correlated, dialect=default.DefaultDialect()
)
def test_as_scalar_query_auto_correlate(self):
sess = create_session()
Address, User = self.classes.Address, self.classes.User
query = (
sess.query(func.count(Address.id))
.filter(Address.user_id == User.id)
.as_scalar()
)
query = sess.query(User.name, query)
self.assert_compile(
query, self.query_correlated, dialect=default.DefaultDialect()
)
def test_as_scalar_query_explicit_correlate(self):
sess = create_session()
Address, User = self.classes.Address, self.classes.User
query = (
sess.query(func.count(Address.id))
.filter(Address.user_id == User.id)
.correlate(self.tables.users)
.as_scalar()
)
query = sess.query(User.name, query)
self.assert_compile(
query, self.query_correlated, dialect=default.DefaultDialect()
)
def test_as_scalar_query_correlate_off(self):
sess = create_session()
Address, User = self.classes.Address, self.classes.User
query = (
sess.query(func.count(Address.id))
.filter(Address.user_id == User.id)
.correlate(None)
.as_scalar()
)
query = sess.query(User.name, query)
self.assert_compile(
query, self.query_not_correlated, dialect=default.DefaultDialect()
)
def test_correlate_to_union(self):
User = self.classes.User
sess = create_session()
q = sess.query(User)
q = sess.query(User).union(q)
u_alias = aliased(User)
raw_subq = exists().where(u_alias.id > User.id)
orm_subq = sess.query(u_alias).filter(u_alias.id > User.id).exists()
self.assert_compile(
q.add_column(raw_subq),
"SELECT anon_1.users_id AS anon_1_users_id, "
"anon_1.users_name AS anon_1_users_name, "
"EXISTS (SELECT * FROM users AS users_1 "
"WHERE users_1.id > anon_1.users_id) AS anon_2 "
"FROM ("
"SELECT users.id AS users_id, users.name AS users_name FROM users "
"UNION SELECT users.id AS users_id, users.name AS users_name "
"FROM users) AS anon_1",
)
# only difference is "1" vs. "*" (not sure why that is)
self.assert_compile(
q.add_column(orm_subq),
"SELECT anon_1.users_id AS anon_1_users_id, "
"anon_1.users_name AS anon_1_users_name, "
"EXISTS (SELECT 1 FROM users AS users_1 "
"WHERE users_1.id > anon_1.users_id) AS anon_2 "
"FROM ("
"SELECT users.id AS users_id, users.name AS users_name FROM users "
"UNION SELECT users.id AS users_id, users.name AS users_name "
"FROM users) AS anon_1",
)
class RawSelectTest(QueryTest, AssertsCompiledSQL):
"""compare a bunch of select() tests with the equivalent Query using
straight table/columns.
Results should be the same as Query should act as a select() pass-
thru for ClauseElement entities.
"""
__dialect__ = "default"
def test_select(self):
addresses, users = self.tables.addresses, self.tables.users
sess = create_session()
self.assert_compile(
sess.query(users)
.select_entity_from(users.select())
.with_labels()
.statement,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users, "
"(SELECT users.id AS id, users.name AS name FROM users) AS anon_1",
)
self.assert_compile(
sess.query(users, exists([1], from_obj=addresses))
.with_labels()
.statement,
"SELECT users.id AS users_id, users.name AS users_name, EXISTS "
"(SELECT 1 FROM addresses) AS anon_1 FROM users",
)
# a little tedious here, adding labels to work around Query's
# auto-labelling.
s = (
sess.query(
addresses.c.id.label("id"),
addresses.c.email_address.label("email"),
)
.filter(addresses.c.user_id == users.c.id)
.correlate(users)
.statement.alias()
)
self.assert_compile(
sess.query(users, s.c.email)
.select_entity_from(users.join(s, s.c.id == users.c.id))
.with_labels()
.statement,
"SELECT users.id AS users_id, users.name AS users_name, "
"anon_1.email AS anon_1_email "
"FROM users JOIN (SELECT addresses.id AS id, "
"addresses.email_address AS email FROM addresses, users "
"WHERE addresses.user_id = users.id) AS anon_1 "
"ON anon_1.id = users.id",
)
x = func.lala(users.c.id).label("foo")
self.assert_compile(
sess.query(x).filter(x == 5).statement,
"SELECT lala(users.id) AS foo FROM users WHERE "
"lala(users.id) = :param_1",
)
self.assert_compile(
sess.query(func.sum(x).label("bar")).statement,
"SELECT sum(lala(users.id)) AS bar FROM users",
)
class FromSelfTest(QueryTest, AssertsCompiledSQL):
__dialect__ = "default"
def test_filter(self):
User = self.classes.User
eq_(
[User(id=8), User(id=9)],
create_session()
.query(User)
.filter(User.id.in_([8, 9]))
.from_self()
.all(),
)
eq_(
[User(id=8), User(id=9)],
create_session()
.query(User)
.order_by(User.id)
.slice(1, 3)
.from_self()
.all(),
)
eq_(
[User(id=8)],
list(
create_session()
.query(User)
.filter(User.id.in_([8, 9]))
.from_self()
.order_by(User.id)[0:1]
),
)
def test_join(self):
User, Address = self.classes.User, self.classes.Address
eq_(
[
(User(id=8), Address(id=2)),
(User(id=8), Address(id=3)),
(User(id=8), Address(id=4)),
(User(id=9), Address(id=5)),
],
create_session()
.query(User)
.filter(User.id.in_([8, 9]))
.from_self()
.join("addresses")
.add_entity(Address)
.order_by(User.id, Address.id)
.all(),
)
def test_group_by(self):
Address = self.classes.Address
eq_(
create_session()
.query(Address.user_id, func.count(Address.id).label("count"))
.group_by(Address.user_id)
.order_by(Address.user_id)
.all(),
[(7, 1), (8, 3), (9, 1)],
)
eq_(
create_session()
.query(Address.user_id, Address.id)
.from_self(Address.user_id, func.count(Address.id))
.group_by(Address.user_id)
.order_by(Address.user_id)
.all(),
[(7, 1), (8, 3), (9, 1)],
)
def test_having(self):
User = self.classes.User
s = create_session()
self.assert_compile(
s.query(User.id).group_by(User.id).having(User.id > 5).from_self(),
"SELECT anon_1.users_id AS anon_1_users_id FROM "
"(SELECT users.id AS users_id FROM users GROUP "
"BY users.id HAVING users.id > :id_1) AS anon_1",
)
def test_no_joinedload(self):
"""test that joinedloads are pushed outwards and not rendered in
subqueries."""
User = self.classes.User
s = create_session()
self.assert_compile(
s.query(User)
.options(joinedload(User.addresses))
.from_self()
.statement,
"SELECT anon_1.users_id, anon_1.users_name, addresses_1.id, "
"addresses_1.user_id, addresses_1.email_address FROM "
"(SELECT users.id AS users_id, users.name AS "
"users_name FROM users) AS anon_1 LEFT OUTER JOIN "
"addresses AS addresses_1 ON anon_1.users_id = "
"addresses_1.user_id ORDER BY addresses_1.id",
)
def test_aliases(self):
"""test that aliased objects are accessible externally to a from_self()
call."""
User, Address = self.classes.User, self.classes.Address
s = create_session()
ualias = aliased(User)
eq_(
s.query(User, ualias)
.filter(User.id > ualias.id)
.from_self(User.name, ualias.name)
.order_by(User.name, ualias.name)
.all(),
[
("chuck", "ed"),
("chuck", "fred"),
("chuck", "jack"),
("ed", "jack"),
("fred", "ed"),
("fred", "jack"),
],
)
eq_(
s.query(User, ualias)
.filter(User.id > ualias.id)
.from_self(User.name, ualias.name)
.filter(ualias.name == "ed")
.order_by(User.name, ualias.name)
.all(),
[("chuck", "ed"), ("fred", "ed")],
)
eq_(
s.query(User, ualias)
.filter(User.id > ualias.id)
.from_self(ualias.name, Address.email_address)
.join(ualias.addresses)
.order_by(ualias.name, Address.email_address)
.all(),
[
("ed", "fred@fred.com"),
("jack", "ed@bettyboop.com"),
("jack", "ed@lala.com"),
("jack", "ed@wood.com"),
("jack", "fred@fred.com"),
],
)
def test_multiple_entities(self):
User, Address = self.classes.User, self.classes.Address
sess = create_session()
eq_(
sess.query(User, Address)
.filter(User.id == Address.user_id)
.filter(Address.id.in_([2, 5]))
.from_self()
.all(),
[(User(id=8), Address(id=2)), (User(id=9), Address(id=5))],
)
eq_(
sess.query(User, Address)
.filter(User.id == Address.user_id)
.filter(Address.id.in_([2, 5]))
.from_self()
.options(joinedload("addresses"))
.first(),
(
User(id=8, addresses=[Address(), Address(), Address()]),
Address(id=2),
),
)
def test_multiple_with_column_entities(self):
User = self.classes.User
sess = create_session()
eq_(
sess.query(User.id)
.from_self()
.add_column(func.count().label("foo"))
.group_by(User.id)
.order_by(User.id)
.from_self()
.all(),
[(7, 1), (8, 1), (9, 1), (10, 1)],
)
class ColumnAccessTest(QueryTest, AssertsCompiledSQL):
"""test access of columns after _from_selectable has been applied"""
__dialect__ = "default"
def test_from_self(self):
User = self.classes.User
sess = create_session()
q = sess.query(User).from_self()
self.assert_compile(
q.filter(User.name == "ed"),
"SELECT anon_1.users_id AS anon_1_users_id, anon_1.users_name AS "
"anon_1_users_name FROM (SELECT users.id AS users_id, users.name "
"AS users_name FROM users) AS anon_1 WHERE anon_1.users_name = "
":name_1",
)
def test_from_self_twice(self):
User = self.classes.User
sess = create_session()
q = sess.query(User).from_self(User.id, User.name).from_self()
self.assert_compile(
q.filter(User.name == "ed"),
"SELECT anon_1.anon_2_users_id AS anon_1_anon_2_users_id, "
"anon_1.anon_2_users_name AS anon_1_anon_2_users_name FROM "
"(SELECT anon_2.users_id AS anon_2_users_id, anon_2.users_name "
"AS anon_2_users_name FROM (SELECT users.id AS users_id, "
"users.name AS users_name FROM users) AS anon_2) AS anon_1 "
"WHERE anon_1.anon_2_users_name = :name_1",
)
def test_select_entity_from(self):
User = self.classes.User
sess = create_session()
q = sess.query(User)
q = sess.query(User).select_entity_from(q.statement)
self.assert_compile(
q.filter(User.name == "ed"),
"SELECT anon_1.id AS anon_1_id, anon_1.name AS anon_1_name "
"FROM (SELECT users.id AS id, users.name AS name FROM "
"users) AS anon_1 WHERE anon_1.name = :name_1",
)
def test_select_entity_from_no_entities(self):
User = self.classes.User
sess = create_session()
assert_raises_message(
sa.exc.ArgumentError,
r"A selectable \(FromClause\) instance is "
"expected when the base alias is being set",
sess.query(User).select_entity_from,
User,
)
def test_select_from_no_aliasing(self):
User = self.classes.User
sess = create_session()
q = sess.query(User)
q = sess.query(User).select_from(q.statement)
self.assert_compile(
q.filter(User.name == "ed"),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users, (SELECT users.id AS id, users.name AS name FROM "
"users) AS anon_1 WHERE users.name = :name_1",
)
def test_anonymous_expression(self):
from sqlalchemy.sql import column
sess = create_session()
c1, c2 = column("c1"), column("c2")
q1 = sess.query(c1, c2).filter(c1 == "dog")
q2 = sess.query(c1, c2).filter(c1 == "cat")
q3 = q1.union(q2)
self.assert_compile(
q3.order_by(c1),
"SELECT anon_1.c1 AS anon_1_c1, anon_1.c2 "
"AS anon_1_c2 FROM (SELECT c1, c2 WHERE "
"c1 = :c1_1 UNION SELECT c1, c2 "
"WHERE c1 = :c1_2) AS anon_1 ORDER BY anon_1.c1",
)
def test_anonymous_expression_from_self_twice(self):
from sqlalchemy.sql import column
sess = create_session()
c1, c2 = column("c1"), column("c2")
q1 = sess.query(c1, c2).filter(c1 == "dog")
q1 = q1.from_self().from_self()
self.assert_compile(
q1.order_by(c1),
"SELECT anon_1.anon_2_c1 AS anon_1_anon_2_c1, anon_1.anon_2_c2 AS "
"anon_1_anon_2_c2 FROM (SELECT anon_2.c1 AS anon_2_c1, anon_2.c2 "
"AS anon_2_c2 "
"FROM (SELECT c1, c2 WHERE c1 = :c1_1) AS "
"anon_2) AS anon_1 ORDER BY anon_1.anon_2_c1",
)
def test_anonymous_expression_union(self):
from sqlalchemy.sql import column
sess = create_session()
c1, c2 = column("c1"), column("c2")
q1 = sess.query(c1, c2).filter(c1 == "dog")
q2 = sess.query(c1, c2).filter(c1 == "cat")
q3 = q1.union(q2)
self.assert_compile(
q3.order_by(c1),
"SELECT anon_1.c1 AS anon_1_c1, anon_1.c2 "
"AS anon_1_c2 FROM (SELECT c1, c2 WHERE "
"c1 = :c1_1 UNION SELECT c1, c2 "
"WHERE c1 = :c1_2) AS anon_1 ORDER BY anon_1.c1",
)
def test_table_anonymous_expression_from_self_twice(self):
from sqlalchemy.sql import column
sess = create_session()
t1 = table("t1", column("c1"), column("c2"))
q1 = sess.query(t1.c.c1, t1.c.c2).filter(t1.c.c1 == "dog")
q1 = q1.from_self().from_self()
self.assert_compile(
q1.order_by(t1.c.c1),
"SELECT anon_1.anon_2_t1_c1 "
"AS anon_1_anon_2_t1_c1, anon_1.anon_2_t1_c2 "
"AS anon_1_anon_2_t1_c2 "
"FROM (SELECT anon_2.t1_c1 AS anon_2_t1_c1, "
"anon_2.t1_c2 AS anon_2_t1_c2 FROM (SELECT t1.c1 AS t1_c1, t1.c2 "
"AS t1_c2 FROM t1 WHERE t1.c1 = :c1_1) AS anon_2) AS anon_1 "
"ORDER BY anon_1.anon_2_t1_c1",
)
def test_anonymous_labeled_expression(self):
sess = create_session()
c1, c2 = column("c1"), column("c2")
q1 = sess.query(c1.label("foo"), c2.label("bar")).filter(c1 == "dog")
q2 = sess.query(c1.label("foo"), c2.label("bar")).filter(c1 == "cat")
q3 = q1.union(q2)
self.assert_compile(
q3.order_by(c1),
"SELECT anon_1.foo AS anon_1_foo, anon_1.bar AS anon_1_bar FROM "
"(SELECT c1 AS foo, c2 AS bar WHERE c1 = :c1_1 UNION SELECT "
"c1 AS foo, c2 AS bar "
"WHERE c1 = :c1_2) AS anon_1 ORDER BY anon_1.foo",
)
def test_anonymous_expression_plus_aliased_join(self):
"""test that the 'dont alias non-ORM' rule remains for other
kinds of aliasing when _from_selectable() is used."""
User = self.classes.User
Address = self.classes.Address
addresses = self.tables.addresses
sess = create_session()
q1 = sess.query(User.id).filter(User.id > 5)
q1 = q1.from_self()
q1 = q1.join(User.addresses, aliased=True).order_by(
User.id, Address.id, addresses.c.id
)
self.assert_compile(
q1,
"SELECT anon_1.users_id AS anon_1_users_id "
"FROM (SELECT users.id AS users_id FROM users "
"WHERE users.id > :id_1) AS anon_1 JOIN addresses AS addresses_1 "
"ON anon_1.users_id = addresses_1.user_id "
"ORDER BY anon_1.users_id, addresses_1.id, addresses.id",
)
class AddEntityEquivalenceTest(fixtures.MappedTest, AssertsCompiledSQL):
run_setup_mappers = "once"
@classmethod
def define_tables(cls, metadata):
Table(
"a",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("name", String(50)),
Column("type", String(20)),
Column("bid", Integer, ForeignKey("b.id")),
)
Table(
"b",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("name", String(50)),
Column("type", String(20)),
)
Table(
"c",
metadata,
Column("id", Integer, ForeignKey("b.id"), primary_key=True),
Column("age", Integer),
)
Table(
"d",
metadata,
Column("id", Integer, ForeignKey("a.id"), primary_key=True),
Column("dede", Integer),
)
@classmethod
def setup_classes(cls):
a, c, b, d = (cls.tables.a, cls.tables.c, cls.tables.b, cls.tables.d)
class A(cls.Comparable):
pass
class B(cls.Comparable):
pass
class C(B):
pass
class D(A):
pass
mapper(
A,
a,
polymorphic_identity="a",
polymorphic_on=a.c.type,
with_polymorphic=("*", None),
properties={"link": relation(B, uselist=False, backref="back")},
)
mapper(
B,
b,
polymorphic_identity="b",
polymorphic_on=b.c.type,
with_polymorphic=("*", None),
)
mapper(C, c, inherits=B, polymorphic_identity="c")
mapper(D, d, inherits=A, polymorphic_identity="d")
@classmethod
def insert_data(cls):
A, C, B = (cls.classes.A, cls.classes.C, cls.classes.B)
sess = create_session()
sess.add_all(
[
B(name="b1"),
A(name="a1", link=C(name="c1", age=3)),
C(name="c2", age=6),
A(name="a2"),
]
)
sess.flush()
def test_add_entity_equivalence(self):
A, C, B = (self.classes.A, self.classes.C, self.classes.B)
sess = create_session()
for q in [
sess.query(A, B).join(A.link),
sess.query(A).join(A.link).add_entity(B),
]:
eq_(
q.all(),
[
(
A(bid=2, id=1, name="a1", type="a"),
C(age=3, id=2, name="c1", type="c"),
)
],
)
for q in [
sess.query(B, A).join(B.back),
sess.query(B).join(B.back).add_entity(A),
sess.query(B).add_entity(A).join(B.back),
]:
eq_(
q.all(),
[
(
C(age=3, id=2, name="c1", type="c"),
A(bid=2, id=1, name="a1", type="a"),
)
],
)
class InstancesTest(QueryTest, AssertsCompiledSQL):
def test_from_alias_one(self):
User, addresses, users = (
self.classes.User,
self.tables.addresses,
self.tables.users,
)
query = (
users.select(users.c.id == 7)
.union(users.select(users.c.id > 7))
.alias("ulist")
.outerjoin(addresses)
.select(
use_labels=True, order_by=[text("ulist.id"), addresses.c.id]
)
)
sess = create_session()
q = sess.query(User)
def go():
result = list(
q.options(
contains_alias("ulist"), contains_eager("addresses")
).instances(query.execute())
)
assert self.static.user_address_result == result
self.assert_sql_count(testing.db, go, 1)
def test_from_alias_two(self):
User, addresses, users = (
self.classes.User,
self.tables.addresses,
self.tables.users,
)
query = (
users.select(users.c.id == 7)
.union(users.select(users.c.id > 7))
.alias("ulist")
.outerjoin(addresses)
.select(
use_labels=True, order_by=[text("ulist.id"), addresses.c.id]
)
)
sess = create_session()
q = sess.query(User)
def go():
result = (
q.options(contains_alias("ulist"), contains_eager("addresses"))
.from_statement(query)
.all()
)
assert self.static.user_address_result == result
self.assert_sql_count(testing.db, go, 1)
def test_from_alias_three(self):
User, addresses, users = (
self.classes.User,
self.tables.addresses,
self.tables.users,
)
query = (
users.select(users.c.id == 7)
.union(users.select(users.c.id > 7))
.alias("ulist")
.outerjoin(addresses)
.select(
use_labels=True, order_by=[text("ulist.id"), addresses.c.id]
)
)
sess = create_session()
# better way. use select_entity_from()
def go():
result = (
sess.query(User)
.select_entity_from(query)
.options(contains_eager("addresses"))
.all()
)
assert self.static.user_address_result == result
self.assert_sql_count(testing.db, go, 1)
def test_from_alias_four(self):
User, addresses, users = (
self.classes.User,
self.tables.addresses,
self.tables.users,
)
sess = create_session()
# same thing, but alias addresses, so that the adapter
# generated by select_entity_from() is wrapped within
# the adapter created by contains_eager()
adalias = addresses.alias()
query = (
users.select(users.c.id == 7)
.union(users.select(users.c.id > 7))
.alias("ulist")
.outerjoin(adalias)
.select(use_labels=True, order_by=[text("ulist.id"), adalias.c.id])
)
def go():
result = (
sess.query(User)
.select_entity_from(query)
.options(contains_eager("addresses", alias=adalias))
.all()
)
assert self.static.user_address_result == result
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager(self):
users, addresses, User = (
self.tables.users,
self.tables.addresses,
self.classes.User,
)
sess = create_session()
# test that contains_eager suppresses the normal outer join rendering
q = (
sess.query(User)
.outerjoin(User.addresses)
.options(contains_eager(User.addresses))
.order_by(User.id, addresses.c.id)
)
self.assert_compile(
q.with_labels().statement,
"SELECT addresses.id AS addresses_id, "
"addresses.user_id AS addresses_user_id, "
"addresses.email_address AS "
"addresses_email_address, users.id AS "
"users_id, users.name AS users_name FROM "
"users LEFT OUTER JOIN addresses ON "
"users.id = addresses.user_id ORDER BY "
"users.id, addresses.id",
dialect=default.DefaultDialect(),
)
def go():
assert self.static.user_address_result == q.all()
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
adalias = addresses.alias()
q = (
sess.query(User)
.select_entity_from(users.outerjoin(adalias))
.options(contains_eager(User.addresses, alias=adalias))
.order_by(User.id, adalias.c.id)
)
def go():
eq_(self.static.user_address_result, q.all())
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
selectquery = users.outerjoin(addresses).select(
users.c.id < 10,
use_labels=True,
order_by=[users.c.id, addresses.c.id],
)
q = sess.query(User)
def go():
result = list(
q.options(contains_eager("addresses")).instances(
selectquery.execute()
)
)
assert self.static.user_address_result[0:3] == result
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
result = list(
q.options(contains_eager(User.addresses)).instances(
selectquery.execute()
)
)
assert self.static.user_address_result[0:3] == result
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
result = (
q.options(contains_eager("addresses"))
.from_statement(selectquery)
.all()
)
assert self.static.user_address_result[0:3] == result
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_string_alias(self):
addresses, users, User = (
self.tables.addresses,
self.tables.users,
self.classes.User,
)
sess = create_session()
q = sess.query(User)
adalias = addresses.alias("adalias")
selectquery = users.outerjoin(adalias).select(
use_labels=True, order_by=[users.c.id, adalias.c.id]
)
# string alias name
def go():
result = list(
q.options(
contains_eager("addresses", alias="adalias")
).instances(selectquery.execute())
)
assert self.static.user_address_result == result
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_aliased_instances(self):
addresses, users, User = (
self.tables.addresses,
self.tables.users,
self.classes.User,
)
sess = create_session()
q = sess.query(User)
adalias = addresses.alias("adalias")
selectquery = users.outerjoin(adalias).select(
use_labels=True, order_by=[users.c.id, adalias.c.id]
)
# expression.Alias object
def go():
result = list(
q.options(
contains_eager("addresses", alias=adalias)
).instances(selectquery.execute())
)
assert self.static.user_address_result == result
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_aliased(self):
User, Address = self.classes.User, self.classes.Address
sess = create_session()
q = sess.query(User)
# Aliased object
adalias = aliased(Address)
def go():
result = (
q.options(contains_eager("addresses", alias=adalias))
.outerjoin(adalias, User.addresses)
.order_by(User.id, adalias.id)
)
assert self.static.user_address_result == result.all()
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_multi_string_alias(self):
orders, items, users, order_items, User = (
self.tables.orders,
self.tables.items,
self.tables.users,
self.tables.order_items,
self.classes.User,
)
sess = create_session()
q = sess.query(User)
oalias = orders.alias("o1")
ialias = items.alias("i1")
query = (
users.outerjoin(oalias)
.outerjoin(order_items)
.outerjoin(ialias)
.select(use_labels=True)
.order_by(users.c.id, oalias.c.id, ialias.c.id)
)
# test using string alias with more than one level deep
def go():
result = list(
q.options(
contains_eager("orders", alias="o1"),
contains_eager("orders.items", alias="i1"),
).instances(query.execute())
)
assert self.static.user_order_result == result
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_multi_alias(self):
orders, items, users, order_items, User = (
self.tables.orders,
self.tables.items,
self.tables.users,
self.tables.order_items,
self.classes.User,
)
sess = create_session()
q = sess.query(User)
oalias = orders.alias("o1")
ialias = items.alias("i1")
query = (
users.outerjoin(oalias)
.outerjoin(order_items)
.outerjoin(ialias)
.select(use_labels=True)
.order_by(users.c.id, oalias.c.id, ialias.c.id)
)
# test using Alias with more than one level deep
# new way:
# from sqlalchemy.orm.strategy_options import Load
# opt = Load(User).contains_eager('orders', alias=oalias).
# contains_eager('items', alias=ialias)
def go():
result = list(
q.options(
contains_eager("orders", alias=oalias),
contains_eager("orders.items", alias=ialias),
).instances(query.execute())
)
assert self.static.user_order_result == result
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_multi_aliased(self):
Item, User, Order = (
self.classes.Item,
self.classes.User,
self.classes.Order,
)
sess = create_session()
q = sess.query(User)
# test using Aliased with more than one level deep
oalias = aliased(Order)
ialias = aliased(Item)
def go():
result = (
q.options(
contains_eager(User.orders, alias=oalias),
contains_eager(User.orders, Order.items, alias=ialias),
)
.outerjoin(oalias, User.orders)
.outerjoin(ialias, oalias.items)
.order_by(User.id, oalias.id, ialias.id)
)
assert self.static.user_order_result == result.all()
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_chaining(self):
"""test that contains_eager() 'chains' by default."""
Dingaling, User, Address = (
self.classes.Dingaling,
self.classes.User,
self.classes.Address,
)
sess = create_session()
q = (
sess.query(User)
.join(User.addresses)
.join(Address.dingaling)
.options(contains_eager(User.addresses, Address.dingaling))
)
def go():
eq_(
q.all(),
# note we only load the Address records that
# have a Dingaling here due to using the inner
# join for the eager load
[
User(
name="ed",
addresses=[
Address(
email_address="ed@wood.com",
dingaling=Dingaling(data="ding 1/2"),
)
],
),
User(
name="fred",
addresses=[
Address(
email_address="fred@fred.com",
dingaling=Dingaling(data="ding 2/5"),
)
],
),
],
)
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_chaining_aliased_endpoint(self):
"""test that contains_eager() 'chains' by default and supports
an alias at the end."""
Dingaling, User, Address = (
self.classes.Dingaling,
self.classes.User,
self.classes.Address,
)
sess = create_session()
da = aliased(Dingaling, name="foob")
q = (
sess.query(User)
.join(User.addresses)
.join(da, Address.dingaling)
.options(
contains_eager(User.addresses, Address.dingaling, alias=da)
)
)
def go():
eq_(
q.all(),
# note we only load the Address records that
# have a Dingaling here due to using the inner
# join for the eager load
[
User(
name="ed",
addresses=[
Address(
email_address="ed@wood.com",
dingaling=Dingaling(data="ding 1/2"),
)
],
),
User(
name="fred",
addresses=[
Address(
email_address="fred@fred.com",
dingaling=Dingaling(data="ding 2/5"),
)
],
),
],
)
self.assert_sql_count(testing.db, go, 1)
def test_mixed_eager_contains_with_limit(self):
Order, User, Address = (
self.classes.Order,
self.classes.User,
self.classes.Address,
)
sess = create_session()
q = sess.query(User)
def go():
# outerjoin to User.orders, offset 1/limit 2 so we get user
# 7 + second two orders. then joinedload the addresses.
# User + Order columns go into the subquery, address left
# outer joins to the subquery, joinedloader for User.orders
# applies context.adapter to result rows. This was
# [ticket:1180].
result = (
q.outerjoin(User.orders)
.options(
joinedload(User.addresses), contains_eager(User.orders)
)
.order_by(User.id, Order.id)
.offset(1)
.limit(2)
.all()
)
eq_(
result,
[
User(
id=7,
addresses=[
Address(
email_address="jack@bean.com", user_id=7, id=1
)
],
name="jack",
orders=[
Order(
address_id=1,
user_id=7,
description="order 3",
isopen=1,
id=3,
),
Order(
address_id=None,
user_id=7,
description="order 5",
isopen=0,
id=5,
),
],
)
],
)
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
# same as above, except Order is aliased, so two adapters
# are applied by the eager loader
oalias = aliased(Order)
result = (
q.outerjoin(oalias, User.orders)
.options(
joinedload(User.addresses),
contains_eager(User.orders, alias=oalias),
)
.order_by(User.id, oalias.id)
.offset(1)
.limit(2)
.all()
)
eq_(
result,
[
User(
id=7,
addresses=[
Address(
email_address="jack@bean.com", user_id=7, id=1
)
],
name="jack",
orders=[
Order(
address_id=1,
user_id=7,
description="order 3",
isopen=1,
id=3,
),
Order(
address_id=None,
user_id=7,
description="order 5",
isopen=0,
id=5,
),
],
)
],
)
self.assert_sql_count(testing.db, go, 1)
class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
__dialect__ = "default"
def test_values(self):
Address, users, User = (
self.classes.Address,
self.tables.users,
self.classes.User,
)
sess = create_session()
assert list(sess.query(User).values()) == list()
sel = users.select(User.id.in_([7, 8])).alias()
q = sess.query(User)
q2 = q.select_entity_from(sel).values(User.name)
eq_(list(q2), [("jack",), ("ed",)])
q = sess.query(User)
q2 = q.order_by(User.id).values(
User.name, User.name + " " + cast(User.id, String(50))
)
eq_(
list(q2),
[
("jack", "jack 7"),
("ed", "ed 8"),
("fred", "fred 9"),
("chuck", "chuck 10"),
],
)
q2 = (
q.join("addresses")
.filter(User.name.like("%e%"))
.order_by(User.id, Address.id)
.values(User.name, Address.email_address)
)
eq_(
list(q2),
[
("ed", "ed@wood.com"),
("ed", "ed@bettyboop.com"),
("ed", "ed@lala.com"),
("fred", "fred@fred.com"),
],
)
q2 = (
q.join("addresses")
.filter(User.name.like("%e%"))
.order_by(desc(Address.email_address))
.slice(1, 3)
.values(User.name, Address.email_address)
)
eq_(list(q2), [("ed", "ed@wood.com"), ("ed", "ed@lala.com")])
adalias = aliased(Address)
q2 = (
q.join(adalias, "addresses")
.filter(User.name.like("%e%"))
.order_by(adalias.email_address)
.values(User.name, adalias.email_address)
)
eq_(
list(q2),
[
("ed", "ed@bettyboop.com"),
("ed", "ed@lala.com"),
("ed", "ed@wood.com"),
("fred", "fred@fred.com"),
],
)
q2 = q.values(func.count(User.name))
assert next(q2) == (4,)
q2 = (
q.select_entity_from(sel)
.filter(User.id == 8)
.values(User.name, sel.c.name, User.name)
)
eq_(list(q2), [("ed", "ed", "ed")])
# using User.xxx is alised against "sel", so this query returns nothing
q2 = (
q.select_entity_from(sel)
.filter(User.id == 8)
.filter(User.id > sel.c.id)
.values(User.name, sel.c.name, User.name)
)
eq_(list(q2), [])
# whereas this uses users.c.xxx, is not aliased and creates a new join
q2 = (
q.select_entity_from(sel)
.filter(users.c.id == 8)
.filter(users.c.id > sel.c.id)
.values(users.c.name, sel.c.name, User.name)
)
eq_(list(q2), [("ed", "jack", "jack")])
def test_alias_naming(self):
User = self.classes.User
sess = create_session()
ua = aliased(User, name="foobar")
q = sess.query(ua)
self.assert_compile(
q,
"SELECT foobar.id AS foobar_id, "
"foobar.name AS foobar_name FROM users AS foobar",
)
@testing.fails_on("mssql", "FIXME: unknown")
def test_values_specific_order_by(self):
users, User = self.tables.users, self.classes.User
sess = create_session()
assert list(sess.query(User).values()) == list()
sel = users.select(User.id.in_([7, 8])).alias()
q = sess.query(User)
u2 = aliased(User)
q2 = (
q.select_entity_from(sel)
.filter(u2.id > 1)
.order_by(User.id, sel.c.id, u2.id)
.values(User.name, sel.c.name, u2.name)
)
eq_(
list(q2),
[
("jack", "jack", "jack"),
("jack", "jack", "ed"),
("jack", "jack", "fred"),
("jack", "jack", "chuck"),
("ed", "ed", "jack"),
("ed", "ed", "ed"),
("ed", "ed", "fred"),
("ed", "ed", "chuck"),
],
)
@testing.fails_on("mssql", "FIXME: unknown")
@testing.fails_on(
"oracle", "Oracle doesn't support boolean expressions as " "columns"
)
@testing.fails_on(
"postgresql+pg8000",
"pg8000 parses the SQL itself before passing on "
"to PG, doesn't parse this",
)
@testing.fails_on(
"postgresql+zxjdbc",
"zxjdbc parses the SQL itself before passing on "
"to PG, doesn't parse this",
)
@testing.fails_on("firebird", "unknown")
def test_values_with_boolean_selects(self):
"""Tests a values clause that works with select boolean
evaluations"""
User = self.classes.User
sess = create_session()
q = sess.query(User)
q2 = (
q.group_by(User.name.like("%j%"))
.order_by(desc(User.name.like("%j%")))
.values(User.name.like("%j%"), func.count(User.name.like("%j%")))
)
eq_(list(q2), [(True, 1), (False, 3)])
q2 = q.order_by(desc(User.name.like("%j%"))).values(
User.name.like("%j%")
)
eq_(list(q2), [(True,), (False,), (False,), (False,)])
def test_correlated_subquery(self):
"""test that a subquery constructed from ORM attributes doesn't leak
out those entities to the outermost query."""
Address, users, User = (
self.classes.Address,
self.tables.users,
self.classes.User,
)
sess = create_session()
subq = (
select([func.count()])
.where(User.id == Address.user_id)
.correlate(users)
.label("count")
)
# we don't want Address to be outside of the subquery here
eq_(
list(sess.query(User, subq)[0:3]),
[
(User(id=7, name="jack"), 1),
(User(id=8, name="ed"), 3),
(User(id=9, name="fred"), 1),
],
)
# same thing without the correlate, as it should
# not be needed
subq = (
select([func.count()])
.where(User.id == Address.user_id)
.label("count")
)
# we don't want Address to be outside of the subquery here
eq_(
list(sess.query(User, subq)[0:3]),
[
(User(id=7, name="jack"), 1),
(User(id=8, name="ed"), 3),
(User(id=9, name="fred"), 1),
],
)
def test_column_queries(self):
Address, users, User = (
self.classes.Address,
self.tables.users,
self.classes.User,
)
sess = create_session()
eq_(
sess.query(User.name).all(),
[("jack",), ("ed",), ("fred",), ("chuck",)],
)
sel = users.select(User.id.in_([7, 8])).alias()
q = sess.query(User.name)
q2 = q.select_entity_from(sel).all()
eq_(list(q2), [("jack",), ("ed",)])
eq_(
sess.query(User.name, Address.email_address)
.filter(User.id == Address.user_id)
.all(),
[
("jack", "jack@bean.com"),
("ed", "ed@wood.com"),
("ed", "ed@bettyboop.com"),
("ed", "ed@lala.com"),
("fred", "fred@fred.com"),
],
)
eq_(
sess.query(User.name, func.count(Address.email_address))
.outerjoin(User.addresses)
.group_by(User.id, User.name)
.order_by(User.id)
.all(),
[("jack", 1), ("ed", 3), ("fred", 1), ("chuck", 0)],
)
eq_(
sess.query(User, func.count(Address.email_address))
.outerjoin(User.addresses)
.group_by(User)
.order_by(User.id)
.all(),
[
(User(name="jack", id=7), 1),
(User(name="ed", id=8), 3),
(User(name="fred", id=9), 1),
(User(name="chuck", id=10), 0),
],
)
eq_(
sess.query(func.count(Address.email_address), User)
.outerjoin(User.addresses)
.group_by(User)
.order_by(User.id)
.all(),
[
(1, User(name="jack", id=7)),
(3, User(name="ed", id=8)),
(1, User(name="fred", id=9)),
(0, User(name="chuck", id=10)),
],
)
adalias = aliased(Address)
eq_(
sess.query(User, func.count(adalias.email_address))
.outerjoin(adalias, "addresses")
.group_by(User)
.order_by(User.id)
.all(),
[
(User(name="jack", id=7), 1),
(User(name="ed", id=8), 3),
(User(name="fred", id=9), 1),
(User(name="chuck", id=10), 0),
],
)
eq_(
sess.query(func.count(adalias.email_address), User)
.outerjoin(adalias, User.addresses)
.group_by(User)
.order_by(User.id)
.all(),
[
(1, User(name="jack", id=7)),
(3, User(name="ed", id=8)),
(1, User(name="fred", id=9)),
(0, User(name="chuck", id=10)),
],
)
# select from aliasing + explicit aliasing
eq_(
sess.query(User, adalias.email_address, adalias.id)
.outerjoin(adalias, User.addresses)
.from_self(User, adalias.email_address)
.order_by(User.id, adalias.id)
.all(),
[
(User(name="jack", id=7), "jack@bean.com"),
(User(name="ed", id=8), "ed@wood.com"),
(User(name="ed", id=8), "ed@bettyboop.com"),
(User(name="ed", id=8), "ed@lala.com"),
(User(name="fred", id=9), "fred@fred.com"),
(User(name="chuck", id=10), None),
],
)
# anon + select from aliasing
eq_(
sess.query(User)
.join(User.addresses, aliased=True)
.filter(Address.email_address.like("%ed%"))
.from_self()
.all(),
[User(name="ed", id=8), User(name="fred", id=9)],
)
# test eager aliasing, with/without select_entity_from aliasing
for q in [
sess.query(User, adalias.email_address)
.outerjoin(adalias, User.addresses)
.options(joinedload(User.addresses))
.order_by(User.id, adalias.id)
.limit(10),
sess.query(User, adalias.email_address, adalias.id)
.outerjoin(adalias, User.addresses)
.from_self(User, adalias.email_address)
.options(joinedload(User.addresses))
.order_by(User.id, adalias.id)
.limit(10),
]:
eq_(
q.all(),
[
(
User(
addresses=[
Address(
user_id=7,
email_address="jack@bean.com",
id=1,
)
],
name="jack",
id=7,
),
"jack@bean.com",
),
(
User(
addresses=[
Address(
user_id=8,
email_address="ed@wood.com",
id=2,
),
Address(
user_id=8,
email_address="ed@bettyboop.com",
id=3,
),
Address(
user_id=8,
email_address="ed@lala.com",
id=4,
),
],
name="ed",
id=8,
),
"ed@wood.com",
),
(
User(
addresses=[
Address(
user_id=8,
email_address="ed@wood.com",
id=2,
),
Address(
user_id=8,
email_address="ed@bettyboop.com",
id=3,
),
Address(
user_id=8,
email_address="ed@lala.com",
id=4,
),
],
name="ed",
id=8,
),
"ed@bettyboop.com",
),
(
User(
addresses=[
Address(
user_id=8,
email_address="ed@wood.com",
id=2,
),
Address(
user_id=8,
email_address="ed@bettyboop.com",
id=3,
),
Address(
user_id=8,
email_address="ed@lala.com",
id=4,
),
],
name="ed",
id=8,
),
"ed@lala.com",
),
(
User(
addresses=[
Address(
user_id=9,
email_address="fred@fred.com",
id=5,
)
],
name="fred",
id=9,
),
"fred@fred.com",
),
(User(addresses=[], name="chuck", id=10), None),
],
)
def test_column_from_limited_joinedload(self):
User = self.classes.User
sess = create_session()
def go():
results = (
sess.query(User)
.limit(1)
.options(joinedload("addresses"))
.add_column(User.name)
.all()
)
eq_(results, [(User(name="jack"), "jack")])
self.assert_sql_count(testing.db, go, 1)
@testing.fails_on("firebird", "unknown")
def test_self_referential(self):
Order = self.classes.Order
sess = create_session()
oalias = aliased(Order)
for q in [
sess.query(Order, oalias)
.filter(Order.user_id == oalias.user_id)
.filter(Order.user_id == 7)
.filter(Order.id > oalias.id)
.order_by(Order.id, oalias.id),
sess.query(Order, oalias)
.from_self()
.filter(Order.user_id == oalias.user_id)
.filter(Order.user_id == 7)
.filter(Order.id > oalias.id)
.order_by(Order.id, oalias.id),
# same thing, but reversed.
sess.query(oalias, Order)
.from_self()
.filter(oalias.user_id == Order.user_id)
.filter(oalias.user_id == 7)
.filter(Order.id < oalias.id)
.order_by(oalias.id, Order.id),
# here we go....two layers of aliasing
sess.query(Order, oalias)
.filter(Order.user_id == oalias.user_id)
.filter(Order.user_id == 7)
.filter(Order.id > oalias.id)
.from_self()
.order_by(Order.id, oalias.id)
.limit(10)
.options(joinedload(Order.items)),
# gratuitous four layers
sess.query(Order, oalias)
.filter(Order.user_id == oalias.user_id)
.filter(Order.user_id == 7)
.filter(Order.id > oalias.id)
.from_self()
.from_self()
.from_self()
.order_by(Order.id, oalias.id)
.limit(10)
.options(joinedload(Order.items)),
]:
eq_(
q.all(),
[
(
Order(
address_id=1,
description="order 3",
isopen=1,
user_id=7,
id=3,
),
Order(
address_id=1,
description="order 1",
isopen=0,
user_id=7,
id=1,
),
),
(
Order(
address_id=None,
description="order 5",
isopen=0,
user_id=7,
id=5,
),
Order(
address_id=1,
description="order 1",
isopen=0,
user_id=7,
id=1,
),
),
(
Order(
address_id=None,
description="order 5",
isopen=0,
user_id=7,
id=5,
),
Order(
address_id=1,
description="order 3",
isopen=1,
user_id=7,
id=3,
),
),
],
)
# ensure column expressions are taken from inside the subquery, not
# restated at the top
q = (
sess.query(
Order.id, Order.description, literal_column("'q'").label("foo")
)
.filter(Order.description == "order 3")
.from_self()
)
self.assert_compile(
q,
"SELECT anon_1.orders_id AS "
"anon_1_orders_id, anon_1.orders_descriptio"
"n AS anon_1_orders_description, "
"anon_1.foo AS anon_1_foo FROM (SELECT "
"orders.id AS orders_id, "
"orders.description AS orders_description, "
"'q' AS foo FROM orders WHERE "
"orders.description = :description_1) AS "
"anon_1",
)
eq_(q.all(), [(3, "order 3", "q")])
def test_multi_mappers(self):
Address, addresses, users, User = (
self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User,
)
test_session = create_session()
(user7, user8, user9, user10) = test_session.query(User).all()
(
address1,
address2,
address3,
address4,
address5,
) = test_session.query(Address).all()
expected = [
(user7, address1),
(user8, address2),
(user8, address3),
(user8, address4),
(user9, address5),
(user10, None),
]
sess = create_session()
selectquery = users.outerjoin(addresses).select(
use_labels=True, order_by=[users.c.id, addresses.c.id]
)
eq_(
list(sess.query(User, Address).instances(selectquery.execute())),
expected,
)
sess.expunge_all()
for address_entity in (Address, aliased(Address)):
q = (
sess.query(User)
.add_entity(address_entity)
.outerjoin(address_entity, "addresses")
.order_by(User.id, address_entity.id)
)
eq_(q.all(), expected)
sess.expunge_all()
q = sess.query(User).add_entity(address_entity)
q = q.join(address_entity, "addresses")
q = q.filter_by(email_address="ed@bettyboop.com")
eq_(q.all(), [(user8, address3)])
sess.expunge_all()
q = (
sess.query(User, address_entity)
.join(address_entity, "addresses")
.filter_by(email_address="ed@bettyboop.com")
)
eq_(q.all(), [(user8, address3)])
sess.expunge_all()
q = (
sess.query(User, address_entity)
.join(address_entity, "addresses")
.options(joinedload("addresses"))
.filter_by(email_address="ed@bettyboop.com")
)
eq_(list(util.OrderedSet(q.all())), [(user8, address3)])
sess.expunge_all()
def test_aliased_multi_mappers(self):
User, addresses, users, Address = (
self.classes.User,
self.tables.addresses,
self.tables.users,
self.classes.Address,
)
sess = create_session()
(user7, user8, user9, user10) = sess.query(User).all()
(address1, address2, address3, address4, address5) = sess.query(
Address
).all()
expected = [
(user7, address1),
(user8, address2),
(user8, address3),
(user8, address4),
(user9, address5),
(user10, None),
]
q = sess.query(User)
adalias = addresses.alias("adalias")
q = q.add_entity(Address, alias=adalias).select_entity_from(
users.outerjoin(adalias)
)
result = q.order_by(User.id, adalias.c.id).all()
assert result == expected
sess.expunge_all()
q = sess.query(User).add_entity(Address, alias=adalias)
result = (
q.select_entity_from(users.outerjoin(adalias))
.filter(adalias.c.email_address == "ed@bettyboop.com")
.all()
)
assert result == [(user8, address3)]
def test_with_entities(self):
User, Address = self.classes.User, self.classes.Address
sess = create_session()
q = sess.query(User).filter(User.id == 7).order_by(User.name)
self.assert_compile(
q.with_entities(User.id, Address).filter(
Address.user_id == User.id
),
"SELECT users.id AS users_id, addresses.id "
"AS addresses_id, addresses.user_id AS "
"addresses_user_id, addresses.email_address"
" AS addresses_email_address FROM users, "
"addresses WHERE users.id = :id_1 AND "
"addresses.user_id = users.id ORDER BY "
"users.name",
)
def test_multi_columns(self):
users, User = self.tables.users, self.classes.User
sess = create_session()
expected = [(u, u.name) for u in sess.query(User).all()]
for add_col in (User.name, users.c.name):
assert sess.query(User).add_column(add_col).all() == expected
sess.expunge_all()
assert_raises(
sa_exc.InvalidRequestError, sess.query(User).add_column, object()
)
def test_add_multi_columns(self):
"""test that add_column accepts a FROM clause."""
users, User = self.tables.users, self.classes.User
sess = create_session()
eq_(
sess.query(User.id).add_column(users).all(),
[(7, 7, "jack"), (8, 8, "ed"), (9, 9, "fred"), (10, 10, "chuck")],
)
def test_multi_columns_2(self):
"""test aliased/nonalised joins with the usage of add_column()"""
User, Address, addresses, users = (
self.classes.User,
self.classes.Address,
self.tables.addresses,
self.tables.users,
)
sess = create_session()
(user7, user8, user9, user10) = sess.query(User).all()
expected = [(user7, 1), (user8, 3), (user9, 1), (user10, 0)]
q = sess.query(User)
q = (
q.group_by(users)
.order_by(User.id)
.outerjoin("addresses")
.add_column(func.count(Address.id).label("count"))
)
eq_(q.all(), expected)
sess.expunge_all()
adalias = aliased(Address)
q = sess.query(User)
q = (
q.group_by(users)
.order_by(User.id)
.outerjoin(adalias, "addresses")
.add_column(func.count(adalias.id).label("count"))
)
eq_(q.all(), expected)
sess.expunge_all()
# TODO: figure out why group_by(users) doesn't work here
s = (
select([users, func.count(addresses.c.id).label("count")])
.select_from(users.outerjoin(addresses))
.group_by(*[c for c in users.c])
.order_by(User.id)
)
q = sess.query(User)
result = q.add_column("count").from_statement(s).all()
assert result == expected
def test_raw_columns(self):
addresses, users, User = (
self.tables.addresses,
self.tables.users,
self.classes.User,
)
sess = create_session()
(user7, user8, user9, user10) = sess.query(User).all()
expected = [
(user7, 1, "Name:jack"),
(user8, 3, "Name:ed"),
(user9, 1, "Name:fred"),
(user10, 0, "Name:chuck"),
]
adalias = addresses.alias()
q = (
create_session()
.query(User)
.add_column(func.count(adalias.c.id))
.add_column(("Name:" + users.c.name))
.outerjoin(adalias, "addresses")
.group_by(users)
.order_by(users.c.id)
)
assert q.all() == expected
# test with a straight statement
s = select(
[
users,
func.count(addresses.c.id).label("count"),
("Name:" + users.c.name).label("concat"),
],
from_obj=[users.outerjoin(addresses)],
group_by=[c for c in users.c],
order_by=[users.c.id],
)
q = create_session().query(User)
result = (
q.add_column("count").add_column("concat").from_statement(s).all()
)
assert result == expected
sess.expunge_all()
# test with select_entity_from()
q = (
create_session()
.query(User)
.add_column(func.count(addresses.c.id))
.add_column(("Name:" + users.c.name))
.select_entity_from(users.outerjoin(addresses))
.group_by(users)
.order_by(users.c.id)
)
assert q.all() == expected
sess.expunge_all()
q = (
create_session()
.query(User)
.add_column(func.count(addresses.c.id))
.add_column(("Name:" + users.c.name))
.outerjoin("addresses")
.group_by(users)
.order_by(users.c.id)
)
assert q.all() == expected
sess.expunge_all()
q = (
create_session()
.query(User)
.add_column(func.count(adalias.c.id))
.add_column(("Name:" + users.c.name))
.outerjoin(adalias, "addresses")
.group_by(users)
.order_by(users.c.id)
)
assert q.all() == expected
sess.expunge_all()
def test_expression_selectable_matches_mzero(self):
User, Address = self.classes.User, self.classes.Address
ua = aliased(User)
aa = aliased(Address)
s = create_session()
for crit, j, exp in [
(
User.id + Address.id,
User.addresses,
"SELECT users.id + addresses.id AS anon_1 "
"FROM users JOIN addresses ON users.id = "
"addresses.user_id",
),
(
User.id + Address.id,
Address.user,
"SELECT users.id + addresses.id AS anon_1 "
"FROM addresses JOIN users ON users.id = "
"addresses.user_id",
),
(
Address.id + User.id,
User.addresses,
"SELECT addresses.id + users.id AS anon_1 "
"FROM users JOIN addresses ON users.id = "
"addresses.user_id",
),
(
User.id + aa.id,
(aa, User.addresses),
"SELECT users.id + addresses_1.id AS anon_1 "
"FROM users JOIN addresses AS addresses_1 "
"ON users.id = addresses_1.user_id",
),
]:
q = s.query(crit)
mzero = q._entity_zero()
is_(mzero.mapped_table, q._query_entity_zero().selectable)
q = q.join(j)
self.assert_compile(q, exp)
for crit, j, exp in [
(
ua.id + Address.id,
ua.addresses,
"SELECT users_1.id + addresses.id AS anon_1 "
"FROM users AS users_1 JOIN addresses "
"ON users_1.id = addresses.user_id",
),
(
ua.id + aa.id,
(aa, ua.addresses),
"SELECT users_1.id + addresses_1.id AS anon_1 "
"FROM users AS users_1 JOIN addresses AS "
"addresses_1 ON users_1.id = addresses_1.user_id",
),
(
ua.id + aa.id,
(ua, aa.user),
"SELECT users_1.id + addresses_1.id AS anon_1 "
"FROM addresses AS addresses_1 JOIN "
"users AS users_1 "
"ON users_1.id = addresses_1.user_id",
),
]:
q = s.query(crit)
mzero = q._entity_zero()
is_(inspect(mzero).selectable, q._query_entity_zero().selectable)
q = q.join(j)
self.assert_compile(q, exp)
def test_aliased_adapt_on_names(self):
User, Address = self.classes.User, self.classes.Address
sess = Session()
agg_address = sess.query(
Address.id,
func.sum(func.length(Address.email_address)).label(
"email_address"
),
).group_by(Address.user_id)
ag1 = aliased(Address, agg_address.subquery())
ag2 = aliased(Address, agg_address.subquery(), adapt_on_names=True)
# first, without adapt on names, 'email_address' isn't matched up - we
# get the raw "address" element in the SELECT
self.assert_compile(
sess.query(User, ag1.email_address)
.join(ag1, User.addresses)
.filter(ag1.email_address > 5),
"SELECT users.id "
"AS users_id, users.name AS users_name, addresses.email_address "
"AS addresses_email_address FROM addresses, users JOIN "
"(SELECT addresses.id AS id, sum(length(addresses.email_address)) "
"AS email_address FROM addresses GROUP BY addresses.user_id) AS "
"anon_1 ON users.id = addresses.user_id "
"WHERE addresses.email_address > :email_address_1",
)
# second, 'email_address' matches up to the aggreagte, and we get a
# smooth JOIN from users->subquery and that's it
self.assert_compile(
sess.query(User, ag2.email_address)
.join(ag2, User.addresses)
.filter(ag2.email_address > 5),
"SELECT users.id AS users_id, users.name AS users_name, "
"anon_1.email_address AS anon_1_email_address FROM users "
"JOIN ("
"SELECT addresses.id AS id, sum(length(addresses.email_address)) "
"AS email_address FROM addresses GROUP BY addresses.user_id) AS "
"anon_1 ON users.id = addresses.user_id "
"WHERE anon_1.email_address > :email_address_1",
)
class SelectFromTest(QueryTest, AssertsCompiledSQL):
run_setup_mappers = None
__dialect__ = "default"
def test_replace_with_select(self):
users, Address, addresses, User = (
self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User,
)
mapper(User, users, properties={"addresses": relationship(Address)})
mapper(Address, addresses)
sel = users.select(users.c.id.in_([7, 8])).alias()
sess = create_session()
eq_(
sess.query(User).select_entity_from(sel).all(),
[User(id=7), User(id=8)],
)
eq_(
sess.query(User)
.select_entity_from(sel)
.filter(User.id == 8)
.all(),
[User(id=8)],
)
eq_(
sess.query(User)
.select_entity_from(sel)
.order_by(desc(User.name))
.all(),
[User(name="jack", id=7), User(name="ed", id=8)],
)
eq_(
sess.query(User)
.select_entity_from(sel)
.order_by(asc(User.name))
.all(),
[User(name="ed", id=8), User(name="jack", id=7)],
)
eq_(
sess.query(User)
.select_entity_from(sel)
.options(joinedload("addresses"))
.first(),
User(name="jack", addresses=[Address(id=1)]),
)
def test_select_from_aliased(self):
User, users = self.classes.User, self.tables.users
mapper(User, users)
sess = create_session()
not_users = table("users", column("id"), column("name"))
ua = aliased(User, select([not_users]).alias(), adapt_on_names=True)
q = sess.query(User.name).select_entity_from(ua).order_by(User.name)
self.assert_compile(
q,
"SELECT anon_1.name AS anon_1_name FROM (SELECT users.id AS id, "
"users.name AS name FROM users) AS anon_1 ORDER BY anon_1.name",
)
eq_(q.all(), [("chuck",), ("ed",), ("fred",), ("jack",)])
@testing.uses_deprecated("Mapper.order_by")
def test_join_mapper_order_by(self):
"""test that mapper-level order_by is adapted to a selectable."""
User, users = self.classes.User, self.tables.users
mapper(User, users, order_by=users.c.id)
sel = users.select(users.c.id.in_([7, 8]))
sess = create_session()
eq_(
sess.query(User).select_entity_from(sel).all(),
[User(name="jack", id=7), User(name="ed", id=8)],
)
def test_differentiate_self_external(self):
"""test some different combinations of joining a table to a subquery of
itself."""
users, User = self.tables.users, self.classes.User
mapper(User, users)
sess = create_session()
sel = sess.query(User).filter(User.id.in_([7, 8])).subquery()
ualias = aliased(User)
self.assert_compile(
sess.query(User).join(sel, User.id > sel.c.id),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN (SELECT users.id AS id, users.name AS name FROM users "
"WHERE users.id IN (:id_1, :id_2)) "
"AS anon_1 ON users.id > anon_1.id",
)
self.assert_compile(
sess.query(ualias)
.select_entity_from(sel)
.filter(ualias.id > sel.c.id),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1, ("
"SELECT users.id AS id, users.name AS name FROM users "
"WHERE users.id IN (:id_1, :id_2)) AS anon_1 "
"WHERE users_1.id > anon_1.id",
)
self.assert_compile(
sess.query(ualias)
.select_entity_from(sel)
.join(ualias, ualias.id > sel.c.id),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM (SELECT users.id AS id, users.name AS name "
"FROM users WHERE users.id IN (:id_1, :id_2)) AS anon_1 "
"JOIN users AS users_1 ON users_1.id > anon_1.id",
)
self.assert_compile(
sess.query(ualias)
.select_entity_from(sel)
.join(ualias, ualias.id > User.id),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM (SELECT users.id AS id, users.name AS name FROM "
"users WHERE users.id IN (:id_1, :id_2)) AS anon_1 "
"JOIN users AS users_1 ON users_1.id > anon_1.id",
)
salias = aliased(User, sel)
self.assert_compile(
sess.query(salias).join(ualias, ualias.id > salias.id),
"SELECT anon_1.id AS anon_1_id, anon_1.name AS anon_1_name FROM "
"(SELECT users.id AS id, users.name AS name "
"FROM users WHERE users.id IN (:id_1, :id_2)) AS anon_1 "
"JOIN users AS users_1 ON users_1.id > anon_1.id",
)
self.assert_compile(
sess.query(ualias).select_entity_from(
join(sel, ualias, ualias.id > sel.c.id)
),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM "
"(SELECT users.id AS id, users.name AS name "
"FROM users WHERE users.id "
"IN (:id_1, :id_2)) AS anon_1 "
"JOIN users AS users_1 ON users_1.id > anon_1.id",
)
def test_aliased_class_vs_nonaliased(self):
User, users = self.classes.User, self.tables.users
mapper(User, users)
ua = aliased(User)
sess = create_session()
self.assert_compile(
sess.query(User).select_from(ua).join(User, ua.name > User.name),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users AS users_1 JOIN users ON users_1.name > users.name",
)
self.assert_compile(
sess.query(User.name)
.select_from(ua)
.join(User, ua.name > User.name),
"SELECT users.name AS users_name FROM users AS users_1 "
"JOIN users ON users_1.name > users.name",
)
self.assert_compile(
sess.query(ua.name)
.select_from(ua)
.join(User, ua.name > User.name),
"SELECT users_1.name AS users_1_name FROM users AS users_1 "
"JOIN users ON users_1.name > users.name",
)
self.assert_compile(
sess.query(ua).select_from(User).join(ua, ua.name > User.name),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users JOIN users AS users_1 ON users_1.name > users.name",
)
self.assert_compile(
sess.query(ua).select_from(User).join(ua, User.name > ua.name),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users JOIN users AS users_1 ON users.name > users_1.name",
)
# this is tested in many other places here, just adding it
# here for comparison
self.assert_compile(
sess.query(User.name).select_entity_from(
users.select().where(users.c.id > 5)
),
"SELECT anon_1.name AS anon_1_name FROM (SELECT users.id AS id, "
"users.name AS name FROM users WHERE users.id > :id_1) AS anon_1",
)
def test_join_no_order_by(self):
User, users = self.classes.User, self.tables.users
mapper(User, users)
sel = users.select(users.c.id.in_([7, 8]))
sess = create_session()
eq_(
sess.query(User).select_entity_from(sel).all(),
[User(name="jack", id=7), User(name="ed", id=8)],
)
def test_join_relname_from_selected_from(self):
User, Address = self.classes.User, self.classes.Address
users, addresses = self.tables.users, self.tables.addresses
mapper(
User,
users,
properties={
"addresses": relationship(
mapper(Address, addresses), backref="user"
)
},
)
sess = create_session()
self.assert_compile(
sess.query(User).select_from(Address).join("user"),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM addresses JOIN users ON users.id = addresses.user_id",
)
def test_filter_by_selected_from(self):
User, Address = self.classes.User, self.classes.Address
users, addresses = self.tables.users, self.tables.addresses
mapper(
User,
users,
properties={"addresses": relationship(mapper(Address, addresses))},
)
sess = create_session()
self.assert_compile(
sess.query(User)
.select_from(Address)
.filter_by(email_address="ed")
.join(User),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM addresses JOIN users ON users.id = addresses.user_id "
"WHERE addresses.email_address = :email_address_1",
)
def test_join_ent_selected_from(self):
User, Address = self.classes.User, self.classes.Address
users, addresses = self.tables.users, self.tables.addresses
mapper(
User,
users,
properties={"addresses": relationship(mapper(Address, addresses))},
)
sess = create_session()
self.assert_compile(
sess.query(User).select_from(Address).join(User),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM addresses JOIN users ON users.id = addresses.user_id",
)
def test_join(self):
users, Address, addresses, User = (
self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User,
)
mapper(User, users, properties={"addresses": relationship(Address)})
mapper(Address, addresses)
sel = users.select(users.c.id.in_([7, 8]))
sess = create_session()
eq_(
sess.query(User)
.select_entity_from(sel)
.join("addresses")
.add_entity(Address)
.order_by(User.id)
.order_by(Address.id)
.all(),
[
(
User(name="jack", id=7),
Address(user_id=7, email_address="jack@bean.com", id=1),
),
(
User(name="ed", id=8),
Address(user_id=8, email_address="ed@wood.com", id=2),
),
(
User(name="ed", id=8),
Address(user_id=8, email_address="ed@bettyboop.com", id=3),
),
(
User(name="ed", id=8),
Address(user_id=8, email_address="ed@lala.com", id=4),
),
],
)
adalias = aliased(Address)
eq_(
sess.query(User)
.select_entity_from(sel)
.join(adalias, "addresses")
.add_entity(adalias)
.order_by(User.id)
.order_by(adalias.id)
.all(),
[
(
User(name="jack", id=7),
Address(user_id=7, email_address="jack@bean.com", id=1),
),
(
User(name="ed", id=8),
Address(user_id=8, email_address="ed@wood.com", id=2),
),
(
User(name="ed", id=8),
Address(user_id=8, email_address="ed@bettyboop.com", id=3),
),
(
User(name="ed", id=8),
Address(user_id=8, email_address="ed@lala.com", id=4),
),
],
)
def test_more_joins(self):
(
users,
Keyword,
orders,
items,
order_items,
Order,
Item,
User,
keywords,
item_keywords,
) = (
self.tables.users,
self.classes.Keyword,
self.tables.orders,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.tables.keywords,
self.tables.item_keywords,
)
mapper(
User,
users,
properties={"orders": relationship(Order, backref="user")},
) # o2m, m2o
mapper(
Order,
orders,
properties={
"items": relationship(
Item, secondary=order_items, order_by=items.c.id
)
},
) # m2m
mapper(
Item,
items,
properties={
"keywords": relationship(
Keyword, secondary=item_keywords, order_by=keywords.c.id
)
},
) # m2m
mapper(Keyword, keywords)
sess = create_session()
sel = users.select(users.c.id.in_([7, 8]))
eq_(
sess.query(User)
.select_entity_from(sel)
.join("orders", "items", "keywords")
.filter(Keyword.name.in_(["red", "big", "round"]))
.all(),
[User(name="jack", id=7)],
)
eq_(
sess.query(User)
.select_entity_from(sel)
.join("orders", "items", "keywords", aliased=True)
.filter(Keyword.name.in_(["red", "big", "round"]))
.all(),
[User(name="jack", id=7)],
)
def test_very_nested_joins_with_joinedload(self):
(
users,
Keyword,
orders,
items,
order_items,
Order,
Item,
User,
keywords,
item_keywords,
) = (
self.tables.users,
self.classes.Keyword,
self.tables.orders,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.tables.keywords,
self.tables.item_keywords,
)
mapper(
User,
users,
properties={"orders": relationship(Order, backref="user")},
) # o2m, m2o
mapper(
Order,
orders,
properties={
"items": relationship(
Item, secondary=order_items, order_by=items.c.id
)
},
) # m2m
mapper(
Item,
items,
properties={
"keywords": relationship(
Keyword, secondary=item_keywords, order_by=keywords.c.id
)
},
) # m2m
mapper(Keyword, keywords)
sess = create_session()
sel = users.select(users.c.id.in_([7, 8]))
def go():
eq_(
sess.query(User)
.select_entity_from(sel)
.options(joinedload_all("orders.items.keywords"))
.join("orders", "items", "keywords", aliased=True)
.filter(Keyword.name.in_(["red", "big", "round"]))
.all(),
[
User(
name="jack",
orders=[
Order(
description="order 1",
items=[
Item(
description="item 1",
keywords=[
Keyword(name="red"),
Keyword(name="big"),
Keyword(name="round"),
],
),
Item(
description="item 2",
keywords=[
Keyword(name="red", id=2),
Keyword(name="small", id=5),
Keyword(name="square"),
],
),
Item(
description="item 3",
keywords=[
Keyword(name="green", id=3),
Keyword(name="big", id=4),
Keyword(name="round", id=6),
],
),
],
),
Order(
description="order 3",
items=[
Item(
description="item 3",
keywords=[
Keyword(name="green", id=3),
Keyword(name="big", id=4),
Keyword(name="round", id=6),
],
),
Item(
description="item 4", keywords=[], id=4
),
Item(
description="item 5", keywords=[], id=5
),
],
),
Order(
description="order 5",
items=[
Item(description="item 5", keywords=[])
],
),
],
)
],
)
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
sel2 = orders.select(orders.c.id.in_([1, 2, 3]))
eq_(
sess.query(Order)
.select_entity_from(sel2)
.join("items", "keywords")
.filter(Keyword.name == "red")
.order_by(Order.id)
.all(),
[
Order(description="order 1", id=1),
Order(description="order 2", id=2),
],
)
eq_(
sess.query(Order)
.select_entity_from(sel2)
.join("items", "keywords", aliased=True)
.filter(Keyword.name == "red")
.order_by(Order.id)
.all(),
[
Order(description="order 1", id=1),
Order(description="order 2", id=2),
],
)
def test_replace_with_eager(self):
users, Address, addresses, User = (
self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User,
)
mapper(
User,
users,
properties={
"addresses": relationship(Address, order_by=addresses.c.id)
},
)
mapper(Address, addresses)
sel = users.select(users.c.id.in_([7, 8]))
sess = create_session()
def go():
eq_(
sess.query(User)
.options(joinedload("addresses"))
.select_entity_from(sel)
.order_by(User.id)
.all(),
[
User(id=7, addresses=[Address(id=1)]),
User(
id=8,
addresses=[
Address(id=2),
Address(id=3),
Address(id=4),
],
),
],
)
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
eq_(
sess.query(User)
.options(joinedload("addresses"))
.select_entity_from(sel)
.filter(User.id == 8)
.order_by(User.id)
.all(),
[
User(
id=8,
addresses=[
Address(id=2),
Address(id=3),
Address(id=4),
],
)
],
)
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
eq_(
sess.query(User)
.options(joinedload("addresses"))
.select_entity_from(sel)
.order_by(User.id)[1],
User(
id=8,
addresses=[Address(id=2), Address(id=3), Address(id=4)],
),
)
self.assert_sql_count(testing.db, go, 1)
class CustomJoinTest(QueryTest):
run_setup_mappers = None
def test_double_same_mappers(self):
"""test aliasing of joins with a custom join condition"""
(
addresses,
items,
order_items,
orders,
Item,
User,
Address,
Order,
users,
) = (
self.tables.addresses,
self.tables.items,
self.tables.order_items,
self.tables.orders,
self.classes.Item,
self.classes.User,
self.classes.Address,
self.classes.Order,
self.tables.users,
)
mapper(Address, addresses)
mapper(
Order,
orders,
properties={
"items": relationship(
Item,
secondary=order_items,
lazy="select",
order_by=items.c.id,
)
},
)
mapper(Item, items)
mapper(
User,
users,
properties=dict(
addresses=relationship(Address, lazy="select"),
open_orders=relationship(
Order,
primaryjoin=and_(
orders.c.isopen == 1, users.c.id == orders.c.user_id
),
lazy="select",
),
closed_orders=relationship(
Order,
primaryjoin=and_(
orders.c.isopen == 0, users.c.id == orders.c.user_id
),
lazy="select",
),
),
)
q = create_session().query(User)
eq_(
q.join("open_orders", "items", aliased=True)
.filter(Item.id == 4)
.join("closed_orders", "items", aliased=True)
.filter(Item.id == 3)
.all(),
[User(id=7)],
)
class ExternalColumnsTest(QueryTest):
"""test mappers with SQL-expressions added as column properties."""
run_setup_mappers = None
def test_external_columns_bad(self):
users, User = self.tables.users, self.classes.User
assert_raises_message(
sa_exc.ArgumentError,
"not represented in the mapper's table",
mapper,
User,
users,
properties={"concat": (users.c.id * 2)},
)
clear_mappers()
def test_external_columns(self):
"""test querying mappings that reference external columns or
selectables."""
users, Address, addresses, User = (
self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User,
)
mapper(
User,
users,
properties={
"concat": column_property((users.c.id * 2)),
"count": column_property(
select(
[func.count(addresses.c.id)],
users.c.id == addresses.c.user_id,
)
.correlate(users)
.as_scalar()
),
},
)
mapper(Address, addresses, properties={"user": relationship(User)})
sess = create_session()
sess.query(Address).options(joinedload("user")).all()
eq_(
sess.query(User).all(),
[
User(id=7, concat=14, count=1),
User(id=8, concat=16, count=3),
User(id=9, concat=18, count=1),
User(id=10, concat=20, count=0),
],
)
address_result = [
Address(id=1, user=User(id=7, concat=14, count=1)),
Address(id=2, user=User(id=8, concat=16, count=3)),
Address(id=3, user=User(id=8, concat=16, count=3)),
Address(id=4, user=User(id=8, concat=16, count=3)),
Address(id=5, user=User(id=9, concat=18, count=1)),
]
eq_(sess.query(Address).all(), address_result)
# run the eager version twice to test caching of aliased clauses
for x in range(2):
sess.expunge_all()
def go():
eq_(
sess.query(Address)
.options(joinedload("user"))
.order_by(Address.id)
.all(),
address_result,
)
self.assert_sql_count(testing.db, go, 1)
ualias = aliased(User)
eq_(
sess.query(Address, ualias).join(ualias, "user").all(),
[(address, address.user) for address in address_result],
)
eq_(
sess.query(Address, ualias.count)
.join(ualias, "user")
.join("user", aliased=True)
.order_by(Address.id)
.all(),
[
(Address(id=1), 1),
(Address(id=2), 3),
(Address(id=3), 3),
(Address(id=4), 3),
(Address(id=5), 1),
],
)
eq_(
sess.query(Address, ualias.concat, ualias.count)
.join(ualias, "user")
.join("user", aliased=True)
.order_by(Address.id)
.all(),
[
(Address(id=1), 14, 1),
(Address(id=2), 16, 3),
(Address(id=3), 16, 3),
(Address(id=4), 16, 3),
(Address(id=5), 18, 1),
],
)
ua = aliased(User)
eq_(
sess.query(Address, ua.concat, ua.count)
.select_entity_from(join(Address, ua, "user"))
.options(joinedload(Address.user))
.order_by(Address.id)
.all(),
[
(Address(id=1, user=User(id=7, concat=14, count=1)), 14, 1),
(Address(id=2, user=User(id=8, concat=16, count=3)), 16, 3),
(Address(id=3, user=User(id=8, concat=16, count=3)), 16, 3),
(Address(id=4, user=User(id=8, concat=16, count=3)), 16, 3),
(Address(id=5, user=User(id=9, concat=18, count=1)), 18, 1),
],
)
eq_(
list(
sess.query(Address)
.join("user")
.values(Address.id, User.id, User.concat, User.count)
),
[
(1, 7, 14, 1),
(2, 8, 16, 3),
(3, 8, 16, 3),
(4, 8, 16, 3),
(5, 9, 18, 1),
],
)
eq_(
list(
sess.query(Address, ua)
.select_entity_from(join(Address, ua, "user"))
.values(Address.id, ua.id, ua.concat, ua.count)
),
[
(1, 7, 14, 1),
(2, 8, 16, 3),
(3, 8, 16, 3),
(4, 8, 16, 3),
(5, 9, 18, 1),
],
)
def test_external_columns_joinedload(self):
users, orders, User, Address, Order, addresses = (
self.tables.users,
self.tables.orders,
self.classes.User,
self.classes.Address,
self.classes.Order,
self.tables.addresses,
)
# in this test, we have a subquery on User that accesses "addresses",
# underneath an joinedload for "addresses". So the "addresses" alias
# adapter needs to *not* hit the "addresses" table within the "user"
# subquery, but "user" still needs to be adapted. therefore the long
# standing practice of eager adapters being "chained" has been removed
# since its unnecessary and breaks this exact condition.
mapper(
User,
users,
properties={
"addresses": relationship(
Address, backref="user", order_by=addresses.c.id
),
"concat": column_property((users.c.id * 2)),
"count": column_property(
select(
[func.count(addresses.c.id)],
users.c.id == addresses.c.user_id,
).correlate(users)
),
},
)
mapper(Address, addresses)
mapper(
Order, orders, properties={"address": relationship(Address)}
) # m2o
sess = create_session()
def go():
o1 = (
sess.query(Order)
.options(joinedload_all("address.user"))
.get(1)
)
eq_(o1.address.user.count, 1)
self.assert_sql_count(testing.db, go, 1)
sess = create_session()
def go():
o1 = (
sess.query(Order)
.options(joinedload_all("address.user"))
.first()
)
eq_(o1.address.user.count, 1)
self.assert_sql_count(testing.db, go, 1)
def test_external_columns_compound(self):
# see [ticket:2167] for background
users, Address, addresses, User = (
self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User,
)
mapper(
User,
users,
properties={"fullname": column_property(users.c.name.label("x"))},
)
mapper(
Address,
addresses,
properties={
"username": column_property(
select([User.fullname])
.where(User.id == addresses.c.user_id)
.label("y")
)
},
)
sess = create_session()
a1 = sess.query(Address).first()
eq_(a1.username, "jack")
sess = create_session()
a1 = sess.query(Address).from_self().first()
eq_(a1.username, "jack")
class TestOverlyEagerEquivalentCols(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table(
"base",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("data", String(50)),
)
Table(
"sub1",
metadata,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
Column("data", String(50)),
)
Table(
"sub2",
metadata,
Column(
"id",
Integer,
ForeignKey("base.id"),
ForeignKey("sub1.id"),
primary_key=True,
),
Column("data", String(50)),
)
def test_equivs(self):
base, sub2, sub1 = (
self.tables.base,
self.tables.sub2,
self.tables.sub1,
)
class Base(fixtures.ComparableEntity):
pass
class Sub1(fixtures.ComparableEntity):
pass
class Sub2(fixtures.ComparableEntity):
pass
mapper(
Base,
base,
properties={
"sub1": relationship(Sub1),
"sub2": relationship(Sub2),
},
)
mapper(Sub1, sub1)
mapper(Sub2, sub2)
sess = create_session()
s11 = Sub1(data="s11")
s12 = Sub1(data="s12")
s2 = Sub2(data="s2")
b1 = Base(data="b1", sub1=[s11], sub2=[])
b2 = Base(data="b1", sub1=[s12], sub2=[])
sess.add(b1)
sess.add(b2)
sess.flush()
# there's an overlapping ForeignKey here, so not much option except
# to artificially control the flush order
b2.sub2 = [s2]
sess.flush()
q = sess.query(Base).outerjoin("sub2", aliased=True)
assert sub1.c.id not in q._filter_aliases.equivalents
eq_(
sess.query(Base)
.join("sub1")
.outerjoin("sub2", aliased=True)
.filter(Sub1.id == 1)
.one(),
b1,
)
class LabelCollideTest(fixtures.MappedTest):
"""Test handling for a label collision. This collision
is handled by core, see ticket:2702 as well as
test/sql/test_selectable->WithLabelsTest. here we want
to make sure the end result is as we expect.
"""
@classmethod
def define_tables(cls, metadata):
Table(
"foo",
metadata,
Column("id", Integer, primary_key=True),
Column("bar_id", Integer),
)
Table("foo_bar", metadata, Column("id", Integer, primary_key=True))
@classmethod
def setup_classes(cls):
class Foo(cls.Basic):
pass
class Bar(cls.Basic):
pass
@classmethod
def setup_mappers(cls):
mapper(cls.classes.Foo, cls.tables.foo)
mapper(cls.classes.Bar, cls.tables.foo_bar)
@classmethod
def insert_data(cls):
s = Session()
s.add_all([cls.classes.Foo(id=1, bar_id=2), cls.classes.Bar(id=3)])
s.commit()
def test_overlap_plain(self):
s = Session()
row = s.query(self.classes.Foo, self.classes.Bar).all()[0]
def go():
eq_(row.Foo.id, 1)
eq_(row.Foo.bar_id, 2)
eq_(row.Bar.id, 3)
# all three columns are loaded independently without
# overlap, no additional SQL to load all attributes
self.assert_sql_count(testing.db, go, 0)
def test_overlap_subquery(self):
s = Session()
row = s.query(self.classes.Foo, self.classes.Bar).from_self().all()[0]
def go():
eq_(row.Foo.id, 1)
eq_(row.Foo.bar_id, 2)
eq_(row.Bar.id, 3)
# all three columns are loaded independently without
# overlap, no additional SQL to load all attributes
self.assert_sql_count(testing.db, go, 0)
| 31.859031
| 79
| 0.469027
|
79235df95643c3f2ffa5e402003bfa2fd04f10b1
| 424
|
py
|
Python
|
io_scene_xray/ui/__init__.py
|
clayne/blender-xray
|
84d5d52049ec9e22c85ba8544995bd39c3a83e55
|
[
"BSD-2-Clause"
] | 93
|
2016-12-02T14:42:18.000Z
|
2022-03-23T08:15:41.000Z
|
io_scene_xray/ui/__init__.py
|
clayne/blender-xray
|
84d5d52049ec9e22c85ba8544995bd39c3a83e55
|
[
"BSD-2-Clause"
] | 276
|
2018-07-04T20:13:22.000Z
|
2022-03-31T09:13:37.000Z
|
io_scene_xray/ui/__init__.py
|
clayne/blender-xray
|
84d5d52049ec9e22c85ba8544995bd39c3a83e55
|
[
"BSD-2-Clause"
] | 31
|
2018-07-04T20:03:17.000Z
|
2022-01-27T18:37:36.000Z
|
# addon modules
from . import base
from . import collapsible
from . import dynamic_menu
from . import edit_helper
from . import list_helper
from . import motion_list
modules = (
collapsible,
dynamic_menu,
edit_helper,
list_helper,
motion_list,
)
def register():
for module in modules:
module.register()
def unregister():
for module in reversed(modules):
module.unregister()
| 15.703704
| 36
| 0.691038
|
3cedda0174e7139d5e6ff7eaf9a748f7ebf0590e
| 75,354
|
py
|
Python
|
venv/lib/python3.9/site-packages/keri-0.1.4-py3.9.egg/keri/core/coring.py
|
ryanwwest/kademlia
|
e1e5b84db0a7710cf372663325041850802d55f1
|
[
"MIT"
] | 2
|
2020-12-09T17:26:25.000Z
|
2021-05-07T02:21:57.000Z
|
venv/lib/python3.9/site-packages/keri-0.1.4-py3.9.egg/keri/core/coring.py
|
ryanwwest/kademlia
|
e1e5b84db0a7710cf372663325041850802d55f1
|
[
"MIT"
] | null | null | null |
venv/lib/python3.9/site-packages/keri-0.1.4-py3.9.egg/keri/core/coring.py
|
ryanwwest/kademlia
|
e1e5b84db0a7710cf372663325041850802d55f1
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
"""
keri.core.coring module
"""
import re
import json
import copy
from dataclasses import dataclass, astuple
from collections import namedtuple, deque
from base64 import urlsafe_b64encode as encodeB64
from base64 import urlsafe_b64decode as decodeB64
from math import ceil
import cbor2 as cbor
import msgpack
import pysodium
import blake3
from ..kering import (ValidationError, VersionError, EmptyMaterialError,
DerivationError, ShortageError)
from ..kering import Versionage, Version
from ..help.helping import extractValues
Serialage = namedtuple("Serialage", 'json mgpk cbor')
Serials = Serialage(json='JSON', mgpk='MGPK', cbor='CBOR')
Mimes = Serialage(json='application/keri+json',
mgpk='application/keri+msgpack',
cbor='application/keri+cbor',)
VERRAWSIZE = 6 # hex characters in raw serialization size in version string
# "{:0{}x}".format(300, 6) # make num char in hex a variable
# '00012c'
VERFMT = "KERI{:x}{:x}{}{:0{}x}_" # version format string
VERFULLSIZE = 17 # number of characters in full versions string
def Versify(version=None, kind=Serials.json, size=0):
"""
Return version string
"""
if kind not in Serials:
raise ValueError("Invalid serialization kind = {}".format(kind))
version = version if version else Version
return VERFMT.format(version[0], version[1], kind, size, VERRAWSIZE)
Vstrings = Serialage(json=Versify(kind=Serials.json, size=0),
mgpk=Versify(kind=Serials.mgpk, size=0),
cbor=Versify(kind=Serials.cbor, size=0))
VEREX = b'KERI(?P<major>[0-9a-f])(?P<minor>[0-9a-f])(?P<kind>[A-Z]{4})(?P<size>[0-9a-f]{6})_'
Rever = re.compile(VEREX) #compile is faster
MINSNIFFSIZE = 12 + VERFULLSIZE # min bytes in buffer to sniff else need more
def Deversify(vs):
"""
Returns tuple(kind, version, size)
Where:
kind is serialization kind, one of Serials
json='JSON', mgpk='MGPK', cbor='CBOR'
version is version tuple of type Version
size is int of raw size
Parameters:
vs is version string str
Uses regex match to extract:
serialization kind
keri version
serialization size
"""
match = Rever.match(vs.encode("utf-8")) # match takes bytes
if match:
major, minor, kind, size = match.group("major", "minor", "kind", "size")
version = Versionage(major=int(major, 16), minor=int(minor, 16))
kind = kind.decode("utf-8")
if kind not in Serials:
raise ValueError("Invalid serialization kind = {}".format(kind))
size = int(size, 16)
return(kind, version, size)
raise ValueError("Invalid version string = {}".format(vs))
Ilkage = namedtuple("Ilkage", 'icp rot ixn dip drt rct vrc') # Event ilk (type of event)
Ilks = Ilkage(icp='icp', rot='rot', ixn='ixn', dip='dip', drt='drt', rct='rct',
vrc='vrc')
@dataclass(frozen=True)
class CrySelectCodex:
"""
Select codex of selector characters for cyptographic material
Only provide defined characters.
Undefined are left out so that inclusion(exclusion) via 'in' operator works.
"""
two: str = '0' # use two character table.
four: str = '1' # use four character table.
dash: str = '-' # use four character count table.
def __iter__(self):
return iter(astuple(self))
CrySelDex = CrySelectCodex() # Make instance
@dataclass(frozen=True)
class CryCntCodex:
"""
CryCntCodex codex of four character length derivation codes that indicate
count (number) of attached receipt couplets following a receipt statement .
Only provide defined codes.
Undefined are left out so that inclusion(exclusion) via 'in' operator works.
.raw is empty
Note binary length of everything in CryCntCodex results in 0 Base64 pad bytes.
First two code characters select format of attached signatures
Next two code charaters select count total of attached signatures to an event
Only provide first two characters here
"""
Base64: str = '-A' # Fully Qualified Base64 Format Receipt Couplets.
Base2: str = '-B' # Fully Qualified Base2 Format Receipt Couplets.
def __iter__(self):
return iter(astuple(self))
CryCntDex = CryCntCodex() # Make instance
# Mapping of Code to Size
# Total size qb64
CryCntSizes = {
"-A": 4,
"-B": 4,
}
# size of index portion of code qb64
CryCntIdxSizes = {
"-A": 2,
"-B": 2,
}
# total size of raw unqualified
CryCntRawSizes = {
"-A": 0,
"-B": 0,
}
CRYCNTMAX = 4095 # maximum count value given two base 64 digits
@dataclass(frozen=True)
class CryOneCodex:
"""
CryOneCodex is codex of one character length derivation codes
Only provide defined codes.
Undefined are left out so that inclusion(exclusion) via 'in' operator works.
Note binary length of everything in CryOneCodex results in 1 Base64 pad byte.
"""
Ed25519_Seed: str = 'A' # Ed25519 256 bit random seed for private key
Ed25519N: str = 'B' # Ed25519 verification key non-transferable, basic derivation.
X25519: str = 'C' # X25519 public encryption key, converted from Ed25519.
Ed25519: str = 'D' # Ed25519 verification key basic derivation
Blake3_256: str = 'E' # Blake3 256 bit digest self-addressing derivation.
Blake2b_256: str = 'F' # Blake2b 256 bit digest self-addressing derivation.
Blake2s_256: str = 'G' # Blake2s 256 bit digest self-addressing derivation.
SHA3_256: str = 'H' # SHA3 256 bit digest self-addressing derivation.
SHA2_256: str = 'I' # SHA2 256 bit digest self-addressing derivation.
ECDSA_secp256k1_Seed: str = 'J' # ECDSA secp256k1 448 bit random Seed for private key
Ed448_Seed: str = 'K' # Ed448 448 bit random Seed for private key
X448: str = 'L' # X448 public encryption key, converted from Ed448
def __iter__(self):
return iter(astuple(self))
CryOneDex = CryOneCodex() # Make instance
# Mapping of Code to Size
CryOneSizes = {
"A": 44, "B": 44, "C": 44, "D": 44, "E": 44, "F": 44,
"G": 44, "H": 44, "I": 44, "J": 44, "K": 76, "L": 76,
}
# Mapping of Code to Size
CryOneRawSizes = {
"A": 32, "B": 32, "C": 32, "D": 32, "E": 32, "F": 32,
"G": 32, "H": 32, "I": 32, "J": 32, "K": 56, "L": 56,
}
@dataclass(frozen=True)
class CryTwoCodex:
"""
CryTwoCodex is codex of two character length derivation codes
Only provide defined codes.
Undefined are left out so that inclusion(exclusion) via 'in' operator works.
Note binary length of everything in CryTwoCodex results in 2 Base64 pad bytes.
"""
Seed_128: str = '0A' # 128 bit random seed.
Ed25519: str = '0B' # Ed25519 signature.
ECDSA_256k1: str = '0C' # ECDSA secp256k1 signature.
def __iter__(self):
return iter(astuple(self))
CryTwoDex = CryTwoCodex() # Make instance
# Mapping of Code to Size
CryTwoSizes = {
"0A": 24,
"0B": 88,
"0B": 88,
}
CryTwoRawSizes = {
"0A": 16,
"0B": 64,
"0B": 64,
}
@dataclass(frozen=True)
class CryFourCodex:
"""
CryFourCodex codex of four character length derivation codes
Only provide defined codes.
Undefined are left out so that inclusion(exclusion) via 'in' operator works.
Note binary length of everything in CryFourCodex results in 0 Base64 pad bytes.
"""
ECDSA_256k1N: str = "1AAA" # ECDSA secp256k1 verification key non-transferable, basic derivation.
ECDSA_256k1: str = "1AAB" # Ed25519 public verification or encryption key, basic derivation
Ed448N: str = "1AAC" # Ed448 non-transferable prefix public signing verification key. Basic derivation.
Ed448: str = "1AAD" # Ed448 public signing verification key. Basic derivation.
Ed448_Sig: str = "1AAE" # Ed448 signature. Self-signing derivation.
def __iter__(self):
return iter(astuple(self))
CryFourDex = CryFourCodex() # Make instance
# Mapping of Code to Size
CryFourSizes = {
"1AAA": 48,
"1AAB": 48,
"1AAC": 80,
"1AAD": 80,
"1AAE": 156,
}
CryFourRawSizes = {
"1AAA": 33,
"1AAB": 33,
"1AAC": 57,
"1AAD": 57,
"1AAE": 114,
}
# all sizes in one dict
CrySizes = dict(CryCntSizes)
CrySizes.update(CryOneSizes)
CrySizes.update(CryTwoSizes)
CrySizes.update(CryFourSizes)
MINCRYSIZE = min(CrySizes.values())
# all sizes in one dict
CryRawSizes = dict(CryCntRawSizes)
CryRawSizes.update(CryOneRawSizes)
CryRawSizes.update(CryTwoRawSizes)
CryRawSizes.update(CryFourRawSizes)
# all sizes in one dict
CryIdxSizes = dict(CryCntIdxSizes)
@dataclass(frozen=True)
class CryNonTransCodex:
"""
CryNonTransCodex is codex all non-transferable derivation codes
Only provide defined codes.
Undefined are left out so that inclusion(exclusion) via 'in' operator works.
"""
Ed25519N: str = 'B' # Ed25519 verification key non-transferable, basic derivation.
ECDSA_256k1N: str = "1AAA" # ECDSA secp256k1 verification key non-transferable, basic derivation.
Ed448N: str = "1AAC" # Ed448 non-transferable prefix public signing verification key. Basic derivation.
def __iter__(self):
return iter(astuple(self))
CryNonTransDex = CryNonTransCodex() # Make instance
class CryMat:
"""
CryMat is fully qualified cryptographic material base class
Sub classes are derivation code and key event element context specific.
Includes the following attributes and properties:
Attributes:
Properties:
.code str derivation code to indicate cypher suite
.raw bytes crypto material only without code
.pad int number of pad chars given raw
.qb64 str in Base64 fully qualified with derivation code + crypto mat
.qb64b bytes in Base64 fully qualified with derivation code + crypto mat
.qb2 bytes in binary with derivation code + crypto material
.nontrans True when non-transferable derivation code False otherwise
"""
def __init__(self, raw=None, qb64b=None, qb64=None, qb2=None,
code=CryOneDex.Ed25519N, index=0):
"""
Validate as fully qualified
Parameters:
raw is bytes of unqualified crypto material usable for crypto operations
qb64b is bytes of fully qualified crypto material
qb64 is str or bytes of fully qualified crypto material
qb2 is bytes of fully qualified crypto material
code is str of derivation code
index is int of count of attached receipts for CryCntDex codes
When raw provided then validate that code is correct for length of raw
and assign .raw
Else when qb64 or qb2 provided extract and assign .raw and .code
"""
if raw is not None: # raw provided so infil with code
if not isinstance(raw, (bytes, bytearray)):
raise TypeError("Not a bytes or bytearray, raw={}.".format(raw))
pad = self._pad(raw)
if (not ( (pad == 1 and (code in CryOneDex)) or # One or Five or Nine
(pad == 2 and (code in CryTwoDex)) or # Two or Six or Ten
(pad == 0 and (code in CryFourDex)) or # Four or Eight
(pad == 0 and (code in CryCntDex)) )): # Cnt Four
raise ValidationError("Wrong code={} for raw={}.".format(code, raw))
if (code in CryCntDex and ((index < 0) or (index > CRYCNTMAX))):
raise ValidationError("Invalid index={} for code={}.".format(index, code))
raw = raw[:CryRawSizes[code]] # allows longer by truncating if stream
if len(raw) != CryRawSizes[code]: # forbids shorter
raise ValidationError("Unexpected raw size={} for code={}"
" not size={}.".format(len(raw),
code,
CryRawSizes[code]))
self._code = code
self._index = index
self._raw = bytes(raw) # crypto ops require bytes not bytearray
elif qb64b is not None:
self._exfil(qb64b)
elif qb64 is not None:
if hasattr(qb64, "encode"): # ._exfil expects bytes not str
qb64 = qb64.encode("utf-8") # greedy so do not use on stream
self._exfil(qb64)
elif qb2 is not None: # rewrite to use direct binary exfiltration
self._exfil(encodeB64(qb2))
else:
raise EmptyMaterialError("Improper initialization need raw or b64 or b2.")
@staticmethod
def _pad(raw):
"""
Returns number of pad characters that would result from converting raw
to Base64 encoding
raw is bytes or bytearray
"""
m = len(raw) % 3
return (3 - m if m else 0)
@property
def pad(self):
"""
Returns number of pad characters that would result from converting
self.raw to Base64 encoding
self.raw is raw is bytes or bytearray
"""
return self._pad(self._raw)
@property
def code(self):
"""
Returns ._code
Makes .code read only
"""
return self._code
@property
def index(self):
"""
Returns ._index
Makes .index read only
"""
return self._index
@property
def raw(self):
"""
Returns ._raw
Makes .raw read only
"""
return self._raw
def _infil(self):
"""
Returns fully qualified base64 bytes given self.pad, self.code, self.count
and self.raw
code is Codex value
count is attached receipt couplet count when applicable for CryCntDex codes
raw is bytes or bytearray
"""
if self._code in CryCntDex:
l = CryIdxSizes[self._code] # count length b64 characters
# full is pre code + index
full = "{}{}".format(self._code, IntToB64(self._index, l=l))
else:
full = self._code
pad = self.pad
# valid pad for code length
if len(full) % 4 != pad: # pad is not remainder of len(code) % 4
raise ValidationError("Invalid code = {} for converted raw pad = {}."
.format(full, self.pad))
# prepending derivation code and strip off trailing pad characters
return (full.encode("utf-8") + encodeB64(self._raw)[:-pad])
def _exfil(self, qb64b):
"""
Extracts self.code and self.raw from qualified base64 bytes qb64b
"""
if len(qb64b) < MINCRYSIZE: # Need more bytes
raise ShortageError("Need more bytes.")
cs = 1 # code size initially 1 to extract selector
code = qb64b[:cs].decode("utf-8") # convert to str
index = 0
# need to map code to length so can only consume proper number of chars
# from front of qb64 so can use with full identifiers not just prefixes
if code in CryOneDex: # One Char code
qb64b = qb64b[:CryOneSizes[code]] # strip of full crymat
elif code == CrySelDex.two: # first char of two char code
cs += 1 # increase code size
code = qb64b[0:cs].decode("utf-8") # get full code, convert to str
if code not in CryTwoDex:
raise ValidationError("Invalid derivation code = {} in {}.".format(code, qb64b))
qb64b = qb64b[:CryTwoSizes[code]] # strip of full crymat
elif code == CrySelDex.four: # first char of four char cnt code
cs += 3 # increase code size
code = qb64b[0:cs].decode("utf-8") # get full code, convert to str
if code not in CryFourDex:
raise ValidationError("Invalid derivation code = {} in {}.".format(code, qb64b))
qb64b = qb64b[:CryFourSizes[code]] # strip of full crymat
elif code == CrySelDex.dash: # '-' 2 char code + 2 char index count
cs += 1 # increase code size
code = qb64b[0:cs].decode("utf-8") # get full code, convert to str
if code not in CryCntDex: # 4 char = 2 code + 2 index
raise ValidationError("Invalid derivation code = {} in {}.".format(code, qb64b))
qb64b = qb64b[:CryCntSizes[code]] # strip of full crymat
cs += 2 # increase code size
index = B64ToInt(qb64b[cs-2:cs].decode("utf-8")) # last two characters for index
else:
raise ValueError("Improperly coded material = {}".format(qb64b))
if len(qb64b) != CrySizes[code]: # must be correct length
if len(qb64b) < CrySizes[code]: # need more bytes
raise ShortageError("Need more bytes.")
else:
raise ValidationError("Bad qb64b size expected {}, got {} "
"bytes.".format(CrySizes[code],
len(qb64b)))
pad = cs % 4 # pad is remainder pre mod 4
# strip off prepended code and append pad characters
base = qb64b[cs:] + pad * BASE64_PAD
raw = decodeB64(base)
if len(raw) != (len(qb64b) - cs) * 3 // 4: # exact lengths
raise ValueError("Improperly qualified material = {}".format(qb64b))
self._code = code
self._index = index
self._raw = raw
@property
def qb64(self):
"""
Property qb64:
Returns Fully Qualified Base64 Version
Assumes self.raw and self.code are correctly populated
"""
return self.qb64b.decode("utf-8")
@property
def qb64b(self):
"""
Property qb64b:
Returns Fully Qualified Base64 Version encoded as bytes
Assumes self.raw and self.code are correctly populated
"""
return self._infil()
@property
def qb2(self):
"""
Property qb2:
Returns Fully Qualified Binary Version Bytes
redo to use b64 to binary decode table since faster
"""
# rewrite to do direct binary infiltration by
# decode self.code as bits and prepend to self.raw
return decodeB64(self._infil())
@property
def nontrans(self):
"""
Property transferable:
Returns True if identifier has non-transferable derivation code,
False otherwise
"""
return(self.code in CryNonTransDex)
class CryCounter(CryMat):
"""
CryCounter is subclass of CryMat, cryptographic material,
CryCrount provides count of following number of attached cryptographic
material items in its .count property.
Useful when parsing attached receipt couplets from stream where CryCounter
instance qb64 is inserted after Serder of receipt statement and
before attached receipt couplets.
Changes default initialization code = CryCntDex.Base64
Raises error on init if code not in CryCntDex
See CryMat for inherited attributes and properties:
Attributes:
Properties:
.count is int count of attached signatures (same as .index)
Methods:
"""
def __init__(self, raw=None, qb64b=None, qb64=None, qb2=None,
code=CryCntDex.Base64, index=None, count=None, **kwa):
"""
Parameters: See CryMat for inherted parameters
count is int number of attached sigantures same as index
"""
raw = b'' if raw is not None else raw # force raw to be empty is
if raw is None and qb64b is None and qb64 is None and qb2 is None:
raw = b''
# accept either index or count to init index
if count is not None:
index = count
if index is None:
index = 1 # most common case
# force raw empty
super(CryCounter, self).__init__(raw=raw, qb64b=qb64b, qb64=qb64, qb2=qb2,
code=code, index=index, **kwa)
if self.code not in CryCntDex:
raise ValidationError("Invalid code = {} for CryCounter."
"".format(self.code))
@property
def count(self):
"""
Property counter:
Returns .index as count
Assumes ._index is correctly assigned
"""
return self.index
class Verfer(CryMat):
"""
Verfer is CryMat subclass with method to verify signature of serialization
using the .raw as verifier key and .code for signature cipher suite.
See CryMat for inherited attributes and properties:
Attributes:
Properties:
Methods:
verify: verifies signature
"""
def __init__(self, **kwa):
"""
Assign verification cipher suite function to ._verify
"""
super(Verfer, self).__init__(**kwa)
if self.code in [CryOneDex.Ed25519N, CryOneDex.Ed25519]:
self._verify = self._ed25519
else:
raise ValueError("Unsupported code = {} for verifier.".format(self.code))
def verify(self, sig, ser):
"""
Returns True if bytes signature sig verifies on bytes serialization ser
using .raw as verifier public key for ._verify cipher suite determined
by .code
Parameters:
sig is bytes signature
ser is bytes serialization
"""
return (self._verify(sig=sig, ser=ser, key=self.raw))
@staticmethod
def _ed25519(sig, ser, key):
"""
Returns True if verified False otherwise
Verifiy ed25519 sig on ser using key
Parameters:
sig is bytes signature
ser is bytes serialization
key is bytes public key
"""
try: # verify returns None if valid else raises ValueError
pysodium.crypto_sign_verify_detached(sig, ser, key)
except Exception as ex:
return False
return True
class Cigar(CryMat):
"""
Cigar is CryMat subclass holding a nonindexed signature with verfer property.
From CryMat .raw is signature and .code is signature cipher suite
Adds .verfer property to hold Verfer instance of associated verifier public key
Verfer's .raw as verifier key and .code is verifier cipher suite.
See CryMat for inherited attributes and properties:
Attributes:
Properties:
Methods:
"""
def __init__(self, verfer=None, **kwa):
"""
Assign verfer to ._verfer attribute
"""
super(Cigar, self).__init__(**kwa)
self._verfer = verfer
@property
def verfer(self):
"""
Property verfer:
Returns Verfer instance
Assumes ._verfer is correctly assigned
"""
return self._verfer
@verfer.setter
def verfer(self, verfer):
""" verfer property setter """
self._verfer = verfer
class Signer(CryMat):
"""
Signer is CryMat subclass with method to create signature of serialization
using the .raw as signing (private) key seed, .code as cipher suite for
signing and new property .verfer whose property .raw is public key for signing.
If not provided .verfer is generated from private key seed using .code
as cipher suite for creating key-pair.
See CryMat for inherited attributes and properties:
Attributes:
Properties:
.verfer is Verfer object instance
Methods:
sign: create signature
"""
def __init__(self,raw=None, code=CryOneDex.Ed25519_Seed, transferable=True, **kwa):
"""
Assign signing cipher suite function to ._sign
Parameters: See CryMat for inherted parameters
raw is bytes crypto material seed or private key
code is derivation code
transferable is Boolean True means verifier code is transferable
False othersize non-transerable
"""
try:
super(Signer, self).__init__(raw=raw, code=code, **kwa)
except EmptyMaterialError as ex:
if code == CryOneDex.Ed25519_Seed:
raw = pysodium.randombytes(pysodium.crypto_sign_SEEDBYTES)
super(Signer, self).__init__(raw=raw, code=code, **kwa)
else:
raise ValueError("Unsupported signer code = {}.".format(code))
if self.code == CryOneDex.Ed25519_Seed:
self._sign = self._ed25519
verkey, sigkey = pysodium.crypto_sign_seed_keypair(self.raw)
verfer = Verfer(raw=verkey,
code=CryOneDex.Ed25519 if transferable
else CryOneDex.Ed25519N )
else:
raise ValueError("Unsupported signer code = {}.".format(self.code))
self._verfer = verfer
@property
def verfer(self):
"""
Property verfer:
Returns Verfer instance
Assumes ._verfer is correctly assigned
"""
return self._verfer
def sign(self, ser, index=None):
"""
Returns either Cigar or Siger (indexed) instance of cryptographic
signature material on bytes serialization ser
If index is None
return Cigar instance
Else
return Siger instance
Parameters:
ser is bytes serialization
index is int index of associated verifier key in event keys
"""
return (self._sign(ser=ser,
seed=self.raw,
verfer=self.verfer,
index=index))
@staticmethod
def _ed25519(ser, seed, verfer, index):
"""
Returns signature
Parameters:
ser is bytes serialization
seed is bytes seed (private key)
verfer is Verfer instance. verfer.raw is public key
index is index of offset into signers list or None
"""
sig = pysodium.crypto_sign_detached(ser, seed + verfer.raw)
if index is None:
return Cigar(raw=sig, code=CryTwoDex.Ed25519, verfer=verfer)
else:
return Siger(raw=sig,
code=SigTwoDex.Ed25519,
index=index,
verfer=verfer)
def generateSigners(root=None, count=8, transferable=True):
"""
Returns list of Signers for Ed25519
Parameters:
root is bytes 16 byte long root key (salt/seed) from which seeds for Signers
in list are derived
random root created if not provided
count is number of signers in list
transferable is boolean true means signer.verfer code is transferable
non-transferable otherwise
"""
if not root:
root = pysodium.randombytes(pysodium.crypto_pwhash_SALTBYTES)
signers = []
for i in range(count):
path = "{:x}".format(i)
# algorithm default is argon2id
seed = pysodium.crypto_pwhash(outlen=32,
passwd=path,
salt=root,
opslimit=pysodium.crypto_pwhash_OPSLIMIT_INTERACTIVE,
memlimit=pysodium.crypto_pwhash_MEMLIMIT_INTERACTIVE,
alg=pysodium.crypto_pwhash_ALG_DEFAULT)
signers.append(Signer(raw=seed, transferable=transferable))
return signers
def generateSecrets(root=None, count=8):
"""
Returns list of fully qualified Base64 secret seeds for Ed25519 private keys
Parameters:
root is bytes 16 byte long root key (salt/seed) from which seeds for Signers
in list are derived
random root created if not provided
count is number of signers in list
"""
signers = generateSigners(root=root, count=count)
return [signer.qb64 for signer in signers] # fetch the qb64
class Diger(CryMat):
"""
Diger is CryMat subclass with method to verify digest of serialization
using .raw as digest and .code for digest algorithm.
See CryMat for inherited attributes and properties:
Attributes:
Properties:
Methods:
verify: verifies signature
"""
def __init__(self, raw=None, ser=None, code=CryOneDex.Blake3_256, **kwa):
"""
Assign digest verification function to ._verify
See CryMat for inherited parameters
Parameters:
ser is bytes serialization from which raw is computed if not raw
"""
try:
super(Diger, self).__init__(raw=raw, code=code, **kwa)
except EmptyMaterialError as ex:
if not ser:
raise ex
if code == CryOneDex.Blake3_256:
dig = blake3.blake3(ser).digest()
super(Diger, self).__init__(raw=dig, code=code, **kwa)
else:
raise ValueError("Unsupported code = {} for digester.".format(code))
if self.code == CryOneDex.Blake3_256:
self._verify = self._blake3_256
else:
raise ValueError("Unsupported code = {} for digester.".format(self.code))
def verify(self, ser):
"""
Returns True if digest of bytes serialization ser matches .raw
using .raw as reference digest for ._verify digest algorithm determined
by .code
Parameters:
ser is bytes serialization
"""
return (self._verify(ser=ser, dig=self.raw))
@staticmethod
def _blake3_256(ser, dig):
"""
Returns True if verified False otherwise
Verifiy blake3_256 digest of ser matches dig
Parameters:
ser is bytes serialization
dig is bytes reference digest
"""
return(blake3.blake3(ser).digest() == dig)
class Nexter(Diger):
"""
Nexter is Diger subclass with support to create itself from
next sith and next keys
See Diger for inherited attributes and properties:
Attributes:
Properties:
.sith return copy of sith used to create digest. None otherwise.
.keys returns copy of keys used to create digest. None otherwise.
Methods:
"""
def __init__(self, ser=None, sith=None, keys=None, ked=None, **kwa):
"""
Assign digest verification function to ._verify
See CryMat for inherited parameters
Parameters:
ser is bytes serialization from which raw is computed if not raw
sith is int threshold or lowercase hex str no leading zeros
keys is list of keys each is qb64 public key str
Raises error if not any of raw, ser, keys, ked
if not raw and not ser
If keys not provided
get keys from ked
If sith not provided
get sith from ked
but if not ked then compute sith as simple majority of keys
"""
try:
super(Nexter, self).__init__(ser=ser, **kwa)
except EmptyMaterialError as ex:
if not keys and not ked:
raise ex
ser, sith, keys = self._derive(sith=sith, keys=keys, ked=ked)
super(Nexter, self).__init__(ser=ser, **kwa)
self._sith = copy.deepcopy(sith) if sith is not None else None
self._keys = copy.deepcopy(keys) if keys is not None else None
@property
def sith(self):
""" Property ._sith getter """
return self._sith
@property
def keys(self):
""" Property ._keys getter """
return self._keys
@staticmethod
def _derive(sith=None, keys=None, ked=None):
"""
Returns serialization derived from sith, keys, or ked
"""
if not keys:
try:
keys = ked["keys"]
except KeyError as ex:
raise DerivationError("Error extracting keys from"
" ked = {}".format(ex))
if not keys: # empty keys
raise DerivationError("Empty keys.")
if sith is None:
try:
sith = ked["sith"]
except Exception as ex:
sith = max(1, ceil(len(keys) / 2)) # default simple majority
if isinstance(sith, list):
# verify list expression against keys
# serialize list here
raise DerivationError("List form of sith = {} not yet supported".format(sith))
else:
try:
sith = int(sith, 16) # convert to int
except TypeError as ex: # already int
pass
sith = max(1, sith) # ensure sith at least 1
sith = "{:x}".format(sith) # convert back to lowercase hex no leading zeros
nxts = [sith.encode("utf-8")] # create list to concatenate for hashing
for key in keys:
nxts.append(key.encode("utf-8"))
ser = b''.join(nxts)
return (ser, sith, keys)
def verify(self, ser=b'', sith=None, keys=None, ked=None):
"""
Returns True if digest of bytes serialization ser matches .raw
using .raw as reference digest for ._verify digest algorithm determined
by .code
If ser not provided then extract ser from either (sith, keys) or ked
Parameters:
ser is bytes serialization
sith is str lowercase hex
"""
if not ser:
ser, sith, keys = self._derive(sith=sith, keys=keys, ked=ked)
return (self._verify(ser=ser, dig=self.raw))
class Prefixer(CryMat):
"""
DigAider is CryMat subclass for autonomic identifier prefix using
derivation as determined by code from ked
See CryMat for other inherited attributes and properties:
Attributes:
Properties:
Methods:
verify(): Verifies derivation of aid prefix
"""
# element labels to exclude in digest or signature derivation from inception icp
IcpExcludes = ["pre"]
# element labels to exclude in digest or signature derivation from delegated inception dip
DipExcludes = ["pre"]
def __init__(self, raw=None, code=CryOneDex.Ed25519N, ked=None,
seed=None, secret=None, **kwa):
"""
assign ._derive to derive derivatin of aid prefix from ked
assign ._verify to verify derivation of aid prefix from ked
Parameters:
seed is bytes seed when signature derivation
secret is qb64 when signature derivation when applicable
one of seed or secret must be provided when signature derivation
"""
try:
super(Prefixer, self).__init__(raw=raw, code=code, **kwa)
except EmptyMaterialError as ex:
if not ked or not code:
raise ex
if code == CryOneDex.Ed25519N:
self._derive = self._DeriveBasicEd25519N
elif code == CryOneDex.Ed25519:
self._derive = self._DeriveBasicEd25519
elif code == CryOneDex.Blake3_256:
self._derive = self._DeriveDigBlake3_256
elif code == CryTwoDex.Ed25519:
self._derive = self._DeriveSigEd25519
else:
raise ValueError("Unsupported code = {} for prefixer.".format(code))
# use ked to derive aid prefix
raw, code = self._derive(ked=ked, seed=seed, secret=secret)
super(Prefixer, self).__init__(raw=raw, code=code, **kwa)
if self.code == CryOneDex.Ed25519N:
self._verify = self._VerifyBasicEd25519N
elif self.code == CryOneDex.Ed25519:
self._verify = self._VerifyBasicEd25519
elif self.code == CryOneDex.Blake3_256:
self._verify = self._VerifyDigBlake3_256
elif code == CryTwoDex.Ed25519:
self._verify = self._VerifySigEd25519
else:
raise ValueError("Unsupported code = {} for prefixer.".format(self.code))
#if ked and not self.verify(ked):
#raise DerivationError("Error verifying derived prefix = {} with code "
#"= {} from ked = {}.".format(self.qb64,
#self.code,
#ked))
def derive(self, ked, seed=None, secret=None):
"""
Returns tuple (raw, code) of aid prefix as derived from key event dict ked.
uses a derivation code specific _derive method
Parameters:
ked is inception key event dict
seed is only used for sig derivation it is the secret key/secret
"""
return (self._derive(ked=ked, seed=seed, secret=secret))
def verify(self, ked):
"""
Returns True if derivation from iked for .code matches .qb64,
False otherwise
Parameters:
ked is inception key event dict
"""
return (self._verify(ked=ked, pre=self.qb64))
def _DeriveBasicEd25519N(self, ked, seed=None, secret=None):
"""
Returns tuple (raw, code) of basic nontransferable Ed25519 prefix (qb64)
as derived from key event dict ked
"""
try:
keys = ked["keys"]
if len(keys) != 1:
raise DerivationError("Basic derivation needs at most 1 key "
" got {} keys instead".format(len(keys)))
verfer = Verfer(qb64=keys[0])
except Exception as ex:
raise DerivationError("Error extracting public key ="
" = {}".format(ex))
if verfer.code not in [CryOneDex.Ed25519N]:
raise DerivationError("Invalid derivation code = {}."
"".format(verfer.code))
try:
if verfer.code == CryOneDex.Ed25519N and ked["nxt"]:
raise DerivationError("Non-empty nxt = {} for non-transferable"
" code = {}".format(ked["nxt"],
verfer.code))
except Exception as ex:
raise DerivationError("Error checking nxt = {}".format(ex))
return (verfer.raw, verfer.code)
def _VerifyBasicEd25519N(self, ked, pre):
"""
Returns True if verified raises exception otherwise
Verify derivation of fully qualified Base64 pre from inception iked dict
Parameters:
ked is inception key event dict
pre is Base64 fully qualified prefix
"""
try:
keys = ked["keys"]
if len(keys) != 1:
return False
if keys[0] != pre:
return False
if ked["nxt"]: # must be empty
return False
except Exception as ex:
return False
return True
def _DeriveBasicEd25519(self, ked, seed=None, secret=None):
"""
Returns tuple (raw, code) of basic Ed25519 prefix (qb64)
as derived from key event dict ked
"""
try:
keys = ked["keys"]
if len(keys) != 1:
raise DerivationError("Basic derivation needs at most 1 key "
" got {} keys instead".format(len(keys)))
verfer = Verfer(qb64=keys[0])
except Exception as ex:
raise DerivationError("Error extracting public key ="
" = {}".format(ex))
if verfer.code not in [CryOneDex.Ed25519]:
raise DerivationError("Invalid derivation code = {}"
"".format(verfer.code))
return (verfer.raw, verfer.code)
def _VerifyBasicEd25519(self, ked, pre):
"""
Returns True if verified raises exception otherwise
Verify derivation of fully qualified Base64 prefix from
inception key event dict (ked)
Parameters:
ked is inception key event dict
pre is Base64 fully qualified prefix
"""
try:
keys = ked["keys"]
if len(keys) != 1:
return False
if keys[0] != pre:
return False
except Exception as ex:
return False
return True
def _DeriveDigBlake3_256(self, ked, seed=None, secret=None):
"""
Returns tuple (raw, code) of basic Ed25519 pre (qb64)
as derived from key event dict ked
"""
ilk = ked["ilk"]
if ilk == Ilks.icp:
labels = [key for key in ked if key not in self.IcpExcludes]
elif ilk == Ilks.dip:
labels = [key for key in ked if key not in self.DipExcludes]
else:
raise DerivationError("Invalid ilk = {} to derive pre.".format(ilk))
# put in dummy pre to get size correct
ked["pre"] = "{}".format("a"*CryOneSizes[CryOneDex.Blake3_256])
serder = Serder(ked=ked)
ked = serder.ked # use updated ked with valid vs element
for l in labels:
if l not in ked:
raise DerivationError("Missing element = {} from ked.".format(l))
values = extractValues(ked=ked, labels=labels)
ser = "".join(values).encode("utf-8")
dig = blake3.blake3(ser).digest()
return (dig, CryOneDex.Blake3_256)
def _VerifyDigBlake3_256(self, ked, pre):
"""
Returns True if verified raises exception otherwise
Verify derivation of fully qualified Base64 prefix from
inception key event dict (ked)
Parameters:
ked is inception key event dict
pre is Base64 fully qualified
"""
try:
raw, code = self._DeriveDigBlake3_256(ked=ked)
crymat = CryMat(raw=raw, code=CryOneDex.Blake3_256)
if crymat.qb64 != pre:
return False
except Exception as ex:
return False
return True
def _DeriveSigEd25519(self, ked, seed=None, secret=None):
"""
Returns tuple (raw, code) of basic Ed25519 pre (qb64)
as derived from key event dict ked
"""
ilk = ked["ilk"]
if ilk == Ilks.icp:
labels = [key for key in ked if key not in self.IcpExcludes]
elif ilk == Ilks.dip:
labels = [key for key in ked if key not in self.DipExcludes]
else:
raise DerivationError("Invalid ilk = {} to derive pre.".format(ilk))
# put in dummy pre to get size correct
ked["pre"] = "{}".format("a"*CryTwoSizes[CryTwoDex.Ed25519])
serder = Serder(ked=ked)
ked = serder.ked # use updated ked with valid vs element
for l in labels:
if l not in ked:
raise DerivationError("Missing element = {} from ked.".format(l))
values = extractValues(ked=ked, labels=labels)
ser = "".join(values).encode("utf-8")
try:
keys = ked["keys"]
if len(keys) != 1:
raise DerivationError("Basic derivation needs at most 1 key "
" got {} keys instead".format(len(keys)))
verfer = Verfer(qb64=keys[0])
except Exception as ex:
raise DerivationError("Error extracting public key ="
" = {}".format(ex))
if verfer.code not in [CryOneDex.Ed25519]:
raise DerivationError("Invalid derivation code = {}"
"".format(verfer.code))
if not (seed or secret):
raise DerivationError("Missing seed or secret.")
signer = Signer(raw=seed, qb64=secret)
if verfer.raw != signer.verfer.raw:
raise DerivationError("Key in ked not match seed.")
cigar = signer.sign(ser=ser)
# sig = pysodium.crypto_sign_detached(ser, signer.raw + verfer.raw)
return (cigar.raw, CryTwoDex.Ed25519)
def _VerifySigEd25519(self, ked, pre):
"""
Returns True if verified raises exception otherwise
Verify derivation of fully qualified Base64 prefix from
inception key event dict (ked)
Parameters:
ked is inception key event dict
pre is Base64 fully qualified prefix
"""
try:
ilk = ked["ilk"]
if ilk == Ilks.icp:
labels = [key for key in ked if key not in self.IcpExcludes]
elif ilk == Ilks.dip:
labels = [key for key in ked if key not in self.DipExcludes]
else:
raise DerivationError("Invalid ilk = {} to derive prefix.".format(ilk))
# put in dummy pre to get size correct
ked["pre"] = "{}".format("a"*CryTwoSizes[CryTwoDex.Ed25519])
serder = Serder(ked=ked)
ked = serder.ked # use updated ked with valid vs element
for l in labels:
if l not in ked:
raise DerivationError("Missing element = {} from ked.".format(l))
values = extractValues(ked=ked, labels=labels)
ser = "".join(values).encode("utf-8")
try:
keys = ked["keys"]
if len(keys) != 1:
raise DerivationError("Basic derivation needs at most 1 key "
" got {} keys instead".format(len(keys)))
verfer = Verfer(qb64=keys[0])
except Exception as ex:
raise DerivationError("Error extracting public key ="
" = {}".format(ex))
if verfer.code not in [CryOneDex.Ed25519]:
raise DerivationError("Invalid derivation code = {}"
"".format(verfer.code))
cigar = Cigar(qb64=pre, verfer=verfer)
result = cigar.verfer.verify(sig=cigar.raw, ser=ser)
return result
#try: # verify returns None if valid else raises ValueError
#result = pysodium.crypto_sign_verify_detached(sig, ser, verfer.raw)
#except Exception as ex:
#return False
except Exception as ex:
return False
return True
BASE64_PAD = b'='
# Mappings between Base64 Encode Index and Decode Characters
# B64ChrByIdx is dict where each key is a B64 index and each value is the B64 char
# B64IdxByChr is dict where each key is a B64 chars and each value is the B64 index
# Map Base64 index to char
B64ChrByIdx = dict((index, char) for index, char in enumerate([chr(x) for x in range(65, 91)]))
B64ChrByIdx.update([(index + 26, char) for index, char in enumerate([chr(x) for x in range(97, 123)])])
B64ChrByIdx.update([(index + 52, char) for index, char in enumerate([chr(x) for x in range(48, 58)])])
B64ChrByIdx[62] = '-'
B64ChrByIdx[63] = '_'
B64IdxByChr = {char: index for index, char in B64ChrByIdx.items()} # map char to Base64 index
def IntToB64(i, l=1):
"""
Returns conversion of int i to Base64 str
l is min number of b64 digits padded with Base4 zeros "A"
"""
d = deque() # deque of characters base64
d.appendleft(B64ChrByIdx[i % 64])
i = i // 64
while i:
d.appendleft(B64ChrByIdx[i % 64])
i = i // 64
for j in range(l - len(d)): # range(x) x <= 0 means do not iterate
d.appendleft("A")
return ( "".join(d))
def B64ToInt(cs):
"""
Returns conversion of Base64 str cs to int
"""
i = 0
for e, c in enumerate(reversed(cs)):
i += B64IdxByChr[c] * 64 ** e
return i
@dataclass(frozen=True)
class SigSelectCodex:
"""
SigSelectCodex codex of selector characters for attached signature cyptographic material
Only provide defined characters.
Undefined are left out so that inclusion(exclusion) via 'in' operator works.
"""
four: str = '0' # use four character table.
five: str = '1' # use five character table.
six: str = '2' # use six character table.
dash: str = '-' # use signature count table
def __iter__(self):
return iter(astuple(self))
SigSelDex = SigSelectCodex() # Make instance
@dataclass(frozen=True)
class SigCntCodex:
"""
SigCntCodex codex of four character length derivation codes that indicate
count (number) of attached signatures following an event .
Only provide defined codes.
Undefined are left out so that inclusion(exclusion) via 'in' operator works.
.raw is empty
Note binary length of everything in SigCntCodex results in 0 Base64 pad bytes.
First two code characters select format of attached signatures
Next two code charaters select count total of attached signatures to an event
Only provide first two characters here
"""
Base64: str = '-A' # Fully Qualified Base64 Format Signatures.
Base2: str = '-B' # Fully Qualified Base2 Format Signatures.
def __iter__(self):
return iter(astuple(self))
SigCntDex = SigCntCodex() # Make instance
# Mapping of Code to Size
# Total size qb64
SigCntSizes = {
"-A": 4,
"-B": 4,
}
# size of index portion of code qb64
SigCntIdxSizes = {
"-A": 2,
"-B": 2,
}
# total size of raw unqualified
SigCntRawSizes = {
"-A": 0,
"-B": 0,
}
SIGCNTMAX = 4095 # maximum count value given two base 64 digits
@dataclass(frozen=True)
class SigTwoCodex:
"""
SigTwoCodex codex of two character length derivation codes for attached signatures
Only provide defined codes.
Undefined are left out so that inclusion(exclusion) via 'in' operator works.
Note binary length of everything in SigTwoCodex results in 2 Base64 pad bytes.
First code character selects signature cipher suite
Second code charater selects index into current signing key list
Only provide first character here
"""
Ed25519: str = 'A' # Ed25519 signature.
ECDSA_256k1: str = 'B' # ECDSA secp256k1 signature.
def __iter__(self):
return iter(astuple(self))
SigTwoDex = SigTwoCodex() # Make instance
# Mapping of Code to Size
SigTwoSizes = {
"A": 88,
"B": 88,
}
# size of index portion of code qb64
SigTwoIdxSizes = {
"A": 1,
"B": 1,
}
SigTwoRawSizes = {
"A": 64,
"B": 64,
}
SIGTWOMAX = 63 # maximum index value given one base64 digit
@dataclass(frozen=True)
class SigFourCodex:
"""
SigFourCodex codex of four character length derivation codes
Only provide defined codes.
Undefined are left out so that inclusion(exclusion) via 'in' operator works.
Note binary length of everything in SigFourCodex results in 0 Base64 pad bytes.
First two code characters select signature cipher suite
Next two code charaters select index into current signing key list
Only provide first two characters here
"""
Ed448: str = '0A' # Ed448 signature.
def __iter__(self):
return iter(astuple(self))
SigFourDex = SigFourCodex() # Make instance
# Mapping of Code to Size
SigFourSizes = {
"0A": 156,
}
# size of index portion of code qb64
SigFourIdxSizes = {
"0A": 2,
}
SigFourRawSizes = {
"0A": 114,
}
SIGFOURMAX = 4095 # maximum index value given two base 64 digits
@dataclass(frozen=True)
class SigFiveCodex:
"""
Five codex of five character length derivation codes
Only provide defined codes. Undefined are left out so that inclusion
exclusion via 'in' operator works.
Note binary length of everything in Four results in 0 Base64 pad bytes.
First three code characters select signature cipher suite
Next two code charaters select index into current signing key list
Only provide first three characters here
"""
def __iter__(self):
return iter(astuple(self))
SigFiveDex = SigFiveCodex() # Make instance
# Mapping of Code to Size
SigFiveSizes = {}
SigFiveIdxSizes = {}
SigFiveRawSizes = {}
SIGFIVEMAX = 4095 # maximum index value given two base 64 digits
# all sizes in one dict
SigSizes = dict(SigCntSizes)
SigSizes.update(SigTwoSizes)
SigSizes.update(SigFourSizes)
SigSizes.update(SigFiveSizes)
MINSIGSIZE = min(SigSizes.values())
SigIdxSizes = dict(SigCntIdxSizes)
SigIdxSizes.update(SigTwoIdxSizes)
SigIdxSizes.update(SigFourIdxSizes)
SigIdxSizes.update(SigFiveIdxSizes)
SigRawSizes = dict(SigCntRawSizes)
SigRawSizes.update(SigTwoRawSizes)
SigRawSizes.update(SigFourRawSizes)
SigRawSizes.update(SigFiveRawSizes)
class SigMat:
"""
SigMat is fully qualified attached signature crypto material base class
Sub classes are derivation code specific.
Includes the following attributes and properites.
Attributes:
Properties:
.code str derivation code of cipher suite for signature
.index int zero based offset into signing key list
or if from SigCntDex then its count of attached signatures
.raw bytes crypto material only without code
.pad int number of pad chars given .raw
.qb64 str in Base64 fully qualified with derivation code and signature crypto material
.qb64b bytes in Base64 fully qualified with derivation code and signature crypto material
.qb2 bytes in binary fully qualified with derivation code and signature crypto material
"""
def __init__(self, raw=None, qb64b=None, qb64=None, qb2=None,
code=SigTwoDex.Ed25519, index=0):
"""
Validate as fully qualified
Parameters:
raw is bytes of unqualified crypto material usable for crypto operations
qb64b is bytes of fully qualified crypto material
qb64 is str or bytes of fully qualified crypto material
qb2 is bytes of fully qualified crypto material
code is str of derivation code cipher suite
index is int of offset index into current signing key list
or if from SigCntDex then its count of attached signatures
When raw provided then validate that code is correct for length of raw
and assign .raw .code and .index
Else when either qb64 or qb2 provided then extract and assign .raw and .code
"""
if raw is not None: # raw provided
if not isinstance(raw, (bytes, bytearray)):
raise TypeError("Not a bytes or bytearray, raw={}.".format(raw))
pad = self._pad(raw)
if (not ( (pad == 2 and (code in SigTwoDex)) or # Two or Six or Ten
(pad == 0 and (code in SigCntDex)) or # Cnt (Count)
(pad == 0 and (code in SigFourDex)) or # Four or Eight
(pad == 1 and (code in SigFiveDex)) )): # Five or Nine
raise ValidationError("Wrong code={} for raw={}.".format(code, raw))
if ( (code in SigTwoDex and ((index < 0) or (index > SIGTWOMAX)) ) or
(code in SigCntDex and ((index < 0) or (index > SIGFOURMAX)) ) or
(code in SigFourDex and ((index < 0) or (index > SIGFOURMAX)) ) or
(code in SigFiveDex and ((index < 0) or (index > SIGFIVEMAX)) ) ):
raise ValidationError("Invalid index={} for code={}.".format(index, code))
raw = raw[:SigRawSizes[code]] # allows longer by truncating stream
if len(raw) != SigRawSizes[code]: # forbids shorter
raise ValidationError("Unexpected raw size={} for code={}"
" not size={}.".format(len(raw),
code,
SigRawSizes[code]))
self._code = code # front part without index
self._index = index
self._raw = bytes(raw) # crypto ops require bytes not bytearray
elif qb64b is not None:
self._exfil(qb64b)
elif qb64 is not None:
if hasattr(qb64, "encode"): # ._exfil expects bytes not str
qb64 = qb64.encode("utf-8") # greedy so do not use on stream
self._exfil(qb64)
elif qb2 is not None: # rewrite to use direct binary exfiltration
self._exfil(encodeB64(qb2))
else:
raise EmptyMaterialError("Improper initialization need raw or b64 or b2.")
@staticmethod
def _pad(raw):
"""
Returns number of pad characters that would result from converting raw
to Base64 encoding
raw is bytes or bytearray
"""
m = len(raw) % 3
return (3 - m if m else 0)
@property
def pad(self):
"""
Returns number of pad characters that would result from converting
self.raw to Base64 encoding
self.raw is raw is bytes or bytearray
"""
return self._pad(self._raw)
@property
def code(self):
"""
Returns ._code
Makes .code read only
"""
return self._code
@property
def index(self):
"""
Returns ._index
Makes .index read only
"""
return self._index
@property
def raw(self):
"""
Returns ._raw
Makes .raw read only
"""
return self._raw
def _infil(self):
"""
Returns fully qualified attached sig base64 bytes computed from
self.raw, self.code and self.index.
"""
l = SigIdxSizes[self._code] # index length b64 characters
# full is pre code + index
full = "{}{}".format(self._code, IntToB64(self._index, l=l))
pad = self.pad
# valid pad for code length
if len(full) % 4 != pad: # pad is not remainder of len(code) % 4
raise ValidationError("Invalid code + index = {} for converted raw pad = {}."
.format(full, self.pad))
# prepending full derivation code with index and strip off trailing pad characters
return (full.encode("utf-8") + encodeB64(self._raw)[:-pad])
def _exfil(self, qb64b):
"""
Extracts self.code,self.index, and self.raw from qualified base64 qb64
"""
if len(qb64b) < MINSIGSIZE: # Need more bytes
raise ShortageError("Need more bytes.")
cs = 1 # code size initially 1 to extract selector or one char code
code = qb64b[:cs].decode("utf-8") # get front code, convert to str
if hasattr(code, "decode"): # converts bytes like to str
code = code.decode("utf-8")
index = 0
# need to map code to length so can only consume proper number of chars
# from front of qb64 so can use with full identifiers not just prefixes
if code in SigTwoDex: # 2 char = 1 code + 1 index
qb64b = qb64b[:SigTwoSizes[code]] # strip of full sigmat
cs += 1
index = B64IdxByChr[qb64b[cs-1:cs].decode("utf-8")] # one character for index
elif code == SigSelDex.four: # '0'
cs += 1
code = qb64b[0:cs].decode("utf-8") # get front code, convert to str
if code not in SigFourDex: # 4 char = 2 code + 2 index
raise ValidationError("Invalid derivation code = {} in {}.".format(code, qb64b))
qb64b = qb64b[:SigFourSizes[code]] # strip of full sigmat
cs += 2
index = B64ToInt(qb64b[cs-2:cs].decode("utf-8")) # two characters for index
elif code == SigSelDex.dash: # '-'
cs += 1
code = qb64b[0:cs].decode("utf-8") # get front code, convert to str
if code not in SigCntDex: # 4 char = 2 code + 2 index
raise ValidationError("Invalid derivation code = {} in {}.".format(code, qb64b))
qb64b = qb64b[:SigCntSizes[code]] # strip of full sigmat
cs += 2
index = B64ToInt(qb64b[cs-2:cs].decode("utf-8")) # two characters for index
else:
raise ValueError("Improperly coded material = {}".format(qb64b))
if len(qb64b) != SigSizes[code]: # not correct length
if len(qb64b) < SigSizes[code]: # need more bytes
raise ShortageError("Need more bytes.")
else:
raise ValidationError("Bad qb64b size expected {}, got {} "
"bytes.".format(SigSizes[code],
len(qb64b)))
pad = cs % 4 # pad is remainder pre mod 4
# strip off prepended code and append pad characters
base = qb64b[cs:] + pad * BASE64_PAD
raw = decodeB64(base)
if len(raw) != (len(qb64b) - cs) * 3 // 4: # exact lengths
raise ValueError("Improperly qualified material = {}".format(qb64b))
self._code = code
self._index = index
self._raw = raw
@property
def qb64(self):
"""
Property qb64:
Returns Fully Qualified Base64 Version
Assumes self.raw and self.code are correctly populated
"""
return self.qb64b.decode("utf-8")
@property
def qb64b(self):
"""
Property qb64b:
Returns Fully Qualified Base64 Version encoded as bytes
Assumes self.raw and self.code are correctly populated
"""
return self._infil()
@property
def qb2(self):
"""
Property qb2:
Returns Fully Qualified Binary Version
redo to use b64 to binary decode table since faster
"""
# rewrite to do direct binary infiltration by
# decode self.code as bits and prepend to self.raw
return decodeB64(self._infil())
class SigCounter(SigMat):
"""
SigCounter is subclass of SigMat, indexed signature material,
That provides count of following number of attached signatures.
Useful when parsing attached signatures from stream where SigCounter
instance qb64 is inserted after Serder of event and before attached signatures.
Changes default initialization code = SigCntDex.Base64
Raises error on init if code not in SigCntDex
See SigMat for inherited attributes and properties:
Attributes:
Properties:
.count is int count of attached signatures (same as .index)
Methods:
"""
def __init__(self, raw=None, qb64b =None, qb64=None, qb2=None,
code=SigCntDex.Base64, index=None, count=None, **kwa):
"""
Parameters: See CryMat for inherted parameters
count is int number of attached sigantures same as index
"""
raw = b'' if raw is not None else raw # force raw to be empty is
if raw is None and qb64b is None and qb64 is None and qb2 is None:
raw = b''
# accept either index or count to init index
if count is not None:
index = count
if index is None:
index = 1 # most common case
# force raw empty
super(SigCounter, self).__init__(raw=raw, qb64b=qb64b, qb64=qb64, qb2=qb2,
code=code, index=index, **kwa)
if self.code not in SigCntDex:
raise ValidationError("Invalid code = {} for SigCounter."
"".format(self.code))
@property
def count(self):
"""
Property counter:
Returns .index as count
Assumes ._index is correctly assigned
"""
return self.index
class Siger(SigMat):
"""
Siger is subclass of SigMat, indexed signature material,
Adds .verfer property which is instance of Verfer that provides
associated signature verifier.
See SigMat for inherited attributes and properties:
Attributes:
Properties:
.verfer is Verfer object instance
Methods:
"""
def __init__(self, verfer=None, **kwa):
"""
Assign verfer to ._verfer
Parameters: See CryMat for inherted parameters
verfer if Verfer instance if any
"""
super(Siger, self).__init__(**kwa)
self._verfer = verfer
@property
def verfer(self):
"""
Property verfer:
Returns Verfer instance
Assumes ._verfer is correctly assigned
"""
return self._verfer
@verfer.setter
def verfer(self, verfer):
""" verfer property setter """
self._verfer = verfer
"""
Need to add Serdery as Serder factory that figures out what type of
serialization and creates appropriate subclass
"""
class Serder:
"""
Serder is KERI key event serializer-deserializer class
Only supports current version VERSION
Has the following public properties:
Properties:
.raw is bytes of serialized event only
.ked is key event dict
.kind is serialization kind string value (see namedtuple coring.Serials)
.version is Versionage instance of event version
.size is int of number of bytes in serialed event only
"""
def __init__(self, raw=b'', ked=None, kind=None):
"""
Deserialize if raw provided
Serialize if ked provided but not raw
When serilaizing if kind provided then use kind instead of field in ked
Parameters:
raw is bytes of serialized event plus any attached signatures
ked is key event dict or None
if None its deserialized from raw
kind is serialization kind string value or None (see namedtuple coring.Serials)
supported kinds are 'json', 'cbor', 'msgpack', 'binary'
if kind is None then its extracted from ked or raw
size is int number of bytes in raw if any
Attributes:
._raw is bytes of serialized event only
._ked is key event dict
._kind is serialization kind string value (see namedtuple coring.Serials)
supported kinds are 'json', 'cbor', 'msgpack', 'binary'
._version is Versionage instance of event version
._size is int of number of bytes in serialed event only
._diger is Diger instance of digest of .raw
Properties:
.raw is bytes of serialized event only
.ked is key event dict
.kind is serialization kind string value (see namedtuple coring.Serials)
.version is Versionage instance of event version
.size is int of number of bytes in serialed event only
.diger is Diger instance of digest of .raw
.dig is qb64 digest from .diger
.digb is qb64b digest from .diger
Note:
loads and jumps of json use str whereas cbor and msgpack use bytes
"""
if raw: # deserialize raw using property
self.raw = raw # raw property setter does the deserialization
elif ked: # serialize ked
self._kind = kind
self.ked = ked # ked property setter does the serialization
else:
raise ValueError("Improper initialization need raw or ked.")
@staticmethod
def _sniff(raw):
"""
Returns serialization kind, version and size from serialized event raw
by investigating leading bytes that contain version string
Parameters:
raw is bytes of serialized event
"""
if len(raw) < MINSNIFFSIZE:
raise ShortageError("Need more bytes.")
match = Rever.search(raw) # Rever's regex takes bytes
if not match or match.start() > 12:
raise ValueError("Invalid version string in raw = {}".format(raw))
major, minor, kind, size = match.group("major", "minor", "kind", "size")
version = Versionage(major=int(major, 16), minor=int(minor, 16))
kind = kind.decode("utf-8")
if kind not in Serials:
raise ValueError("Invalid serialization kind = {}".format(kind))
size = int(size, 16)
return(kind, version, size)
def _inhale(self, raw):
"""
Parses serilized event ser of serialization kind and assigns to
instance attributes.
Parameters:
raw is bytes of serialized event
kind is str of raw serialization kind (see namedtuple Serials)
size is int size of raw to be deserialized
Note:
loads and jumps of json use str whereas cbor and msgpack use bytes
"""
kind, version, size = self._sniff(raw)
if version != Version:
raise VersionError("Unsupported version = {}.{}".format(version.major,
version.minor))
if len(raw) < size:
raise ShortageError("Need more bytes.")
if kind == Serials.json:
try:
ked = json.loads(raw[:size].decode("utf-8"))
except Exception as ex:
raise ex
elif kind == Serials.mgpk:
try:
ked = msgpack.loads(raw[:size])
except Exception as ex:
raise ex
elif kind == Serials.cbor:
try:
ked = cbor.loads(raw[:size])
except Exception as ex:
raise ex
else:
ked = None
return (ked, kind, version, size)
def _exhale(self, ked, kind=None):
"""
ked is key event dict
kind is serialization if given else use one given in ked
Returns tuple of (raw, kind, ked, version) where:
raw is serialized event as bytes of kind
kind is serialzation kind
ked is key event dict
version is Versionage instance
Assumes only supports Version
"""
if "vs" not in ked:
raise ValueError("Missing or empty version string in key event dict = {}".format(ked))
knd, version, size = Deversify(ked['vs']) # extract kind and version
if version != Version:
raise VersionError("Unsupported version = {}.{}".format(version.major,
version.minor))
if not kind:
kind = knd
if kind not in Serials:
raise ValueError("Invalid serialization kind = {}".format(kind))
if kind == Serials.json:
raw = json.dumps(ked, separators=(",", ":"), ensure_ascii=False).encode("utf-8")
elif kind == Serials.mgpk:
raw = msgpack.dumps(ked)
elif kind == Serials.cbor:
raw = cbor.dumps(ked)
else:
raise ValueError("Invalid serialization kind = {}".format(kind))
size = len(raw)
match = Rever.search(raw) # Rever's regex takes bytes
if not match or match.start() > 12:
raise ValueError("Invalid version string in raw = {}".format(raw))
fore, back = match.span() # full version string
# update vs with latest kind version size
vs = Versify(version=version, kind=kind, size=size)
# replace old version string in raw with new one
raw = b'%b%b%b' % (raw[:fore], vs.encode("utf-8"), raw[back:])
if size != len(raw): # substitution messed up
raise ValueError("Malformed version string size = {}".format(vs))
ked['vs'] = vs # update ked
return (raw, kind, ked, version)
@property
def raw(self):
""" raw property getter """
return self._raw
@raw.setter
def raw(self, raw):
""" raw property setter """
ked, kind, version, size = self._inhale(raw=raw)
self._raw = bytes(raw[:size]) # crypto ops require bytes not bytearray
self._ked = ked
self._kind = kind
self._version = version
self._size = size
self._diger = Diger(raw=blake3.blake3(self._raw).digest(),
code=CryOneDex.Blake3_256)
@property
def ked(self):
""" ked property getter"""
return self._ked
@ked.setter
def ked(self, ked):
""" ked property setter assumes ._kind """
raw, kind, ked, version = self._exhale(ked=ked, kind=self._kind)
size = len(raw)
self._raw = raw[:size]
self._ked = ked
self._kind = kind
self._size = size
self._version = version
self._diger = Diger(raw=blake3.blake3(self._raw).digest(),
code=CryOneDex.Blake3_256)
@property
def kind(self):
""" kind property getter"""
return self._kind
@kind.setter
def kind(self, kind):
""" kind property setter Assumes ._ked """
raw, kind, ked, version = self._exhale(ked=self._ked, kind=kind)
size = len(raw)
self._raw = raw[:size]
self._ked = ked
self._kind = kind
self._size = size
self._version = version
@property
def version(self):
""" version property getter"""
return self._version
@property
def size(self):
""" size property getter"""
return self._size
@property
def diger(self):
"""
Returns Diger of digest of self.raw
diger (digest material) property getter
"""
return self._diger
@property
def dig(self):
"""
Returns qualified Base64 digest of self.raw
dig (digest) property getter
"""
return self.diger.qb64
@property
def digb(self):
"""
Returns qualified Base64 digest of self.raw
dig (digest) property getter
"""
return self.diger.qb64b
@property
def verfers(self):
"""
Returns list of Verifier instances as converted from .ked.keys
verfers property getter
"""
if "keys" in self.ked: # establishment event
keys = self.ked["keys"]
else: # non-establishment event
keys = []
return [Verfer(qb64=key) for key in keys]
| 33.05
| 115
| 0.586007
|
30d4ca8758e8f5e1ee387ec5e1b8a137209e67fa
| 5,454
|
py
|
Python
|
env.clearblockd/lib/python2.7/site-packages/strict_rfc3339.py
|
Organizational-Proof-Of-Work/clearinghoused_build
|
7bab4ccb516015913bad41cfdc9eb15d3fbfcaf4
|
[
"MIT"
] | null | null | null |
env.clearblockd/lib/python2.7/site-packages/strict_rfc3339.py
|
Organizational-Proof-Of-Work/clearinghoused_build
|
7bab4ccb516015913bad41cfdc9eb15d3fbfcaf4
|
[
"MIT"
] | null | null | null |
env.clearblockd/lib/python2.7/site-packages/strict_rfc3339.py
|
Organizational-Proof-Of-Work/clearinghoused_build
|
7bab4ccb516015913bad41cfdc9eb15d3fbfcaf4
|
[
"MIT"
] | null | null | null |
# Copyright 2012 (C) Daniel Richman, Adam Greig
#
# This file is part of strict_rfc3339.
#
# strict_rfc3339 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# strict_rfc3339 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with strict_rfc3339. If not, see <http://www.gnu.org/licenses/>.
"""
Super simple lightweight RFC3339 functions
"""
import re
import time
import calendar
__all__ = ["validate_rfc3339",
"InvalidRFC3339Error",
"rfc3339_to_timestamp",
"timestamp_to_rfc3339_utcoffset",
"timestamp_to_rfc3339_localoffset",
"now_to_rfc3339_utcoffset",
"now_to_rfc3339_localoffset"]
rfc3339_regex = re.compile(
r"(\d\d\d\d)\-(\d\d)\-(\d\d)T"
r"(\d\d):(\d\d):(\d\d)(\.\d+)?(Z|([+\-])(\d\d):(\d\d))")
def validate_rfc3339(datestring):
"""Check an RFC3339 string is valid via a regex and some range checks"""
m = rfc3339_regex.match(datestring)
if m is None:
return False
groups = m.groups()
year, month, day, hour, minute, second = [int(i) for i in groups[:6]]
if not 1 <= year <= 9999:
# Have to reject this, unfortunately (despite it being OK by rfc3339):
# calendar.timegm/calendar.monthrange can't cope (since datetime can't)
return False
if not 1 <= month <= 12:
return False
(_, max_day) = calendar.monthrange(year, month)
if not 1 <= day <= max_day:
return False
if not (0 <= hour <= 23 and 0 <= minute <= 59 and 0 <= second <= 59):
# forbid leap seconds :-(. See README
return False
if groups[7] != "Z":
(offset_sign, offset_hours, offset_mins) = groups[8:]
if not (0 <= int(offset_hours) <= 23 and 0 <= int(offset_mins) <= 59):
return False
# all OK
return True
class InvalidRFC3339Error(ValueError):
"""Subclass of ValueError thrown by rfc3339_to_timestamp"""
pass
def rfc3339_to_timestamp(datestring):
"""Convert an RFC3339 date-time string to a UTC UNIX timestamp"""
if not validate_rfc3339(datestring):
raise InvalidRFC3339Error
groups = rfc3339_regex.match(datestring).groups()
time_tuple = [int(p) for p in groups[:6]]
timestamp = calendar.timegm(time_tuple)
seconds_part = groups[6]
if seconds_part is not None:
timestamp += float("0" + seconds_part)
if groups[7] != "Z":
(offset_sign, offset_hours, offset_mins) = groups[8:]
offset_seconds = int(offset_hours) * 3600 + int(offset_mins) * 60
if offset_sign == '-':
offset_seconds = -offset_seconds
timestamp -= offset_seconds
return timestamp
def _make_datestring_start(time_tuple, seconds_part):
ds_format = "{0:04d}-{1:02d}-{2:02d}T{3:02d}:{4:02d}:{5:02d}"
datestring = ds_format.format(*time_tuple)
seconds_part_str = "{0:06d}".format(int(round(seconds_part * 1e6)))
seconds_part_str = seconds_part_str.rstrip("0")
if seconds_part_str != "":
datestring += "." + seconds_part_str
return datestring
def timestamp_to_rfc3339_utcoffset(timestamp):
"""Convert a UTC UNIX timestamp to RFC3339, with the offset as 'Z'"""
timestamp_int = int(timestamp)
seconds_part = timestamp % 1
time_tuple = time.gmtime(timestamp_int)
datestring = _make_datestring_start(time_tuple, seconds_part)
datestring += "Z"
assert abs(rfc3339_to_timestamp(datestring) - timestamp) < 0.000001
return datestring
def timestamp_to_rfc3339_localoffset(timestamp):
"""
Convert a UTC UNIX timestamp to RFC3339, using the local offset.
localtime() provides the time parts. The difference between gmtime and
localtime tells us the offset.
"""
timestamp_int = int(timestamp)
seconds_part = timestamp % 1
time_tuple = time.localtime(timestamp_int)
datestring = _make_datestring_start(time_tuple, seconds_part)
gm_time_tuple = time.gmtime(timestamp_int)
offset = calendar.timegm(time_tuple) - calendar.timegm(gm_time_tuple)
if abs(offset) % 60 != 0:
raise ValueError("Your local offset is not a whole minute")
offset_minutes = abs(offset) // 60
offset_hours = offset_minutes // 60
offset_minutes %= 60
offset_string = "{0:02d}:{1:02d}".format(offset_hours, offset_minutes)
if offset < 0:
datestring += "-"
else:
datestring += "+"
datestring += offset_string
assert abs(rfc3339_to_timestamp(datestring) - timestamp) < 0.000001
return datestring
def now_to_rfc3339_utcoffset(integer=True):
"""Convert the current time to RFC3339, with the offset as 'Z'"""
timestamp = time.time()
if integer:
timestamp = int(timestamp)
return timestamp_to_rfc3339_utcoffset(timestamp)
def now_to_rfc3339_localoffset(integer=True):
"""Convert the current time to RFC3339, using the local offset."""
timestamp = time.time()
if integer:
timestamp = int(timestamp)
return timestamp_to_rfc3339_localoffset(timestamp)
| 29.803279
| 79
| 0.673817
|
ebf1284fe505b0ca5ef58367e84a214bd816ff4e
| 3,092
|
py
|
Python
|
resource_monitor_sdk/model/flowable_service/bpmn_end_event_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | 5
|
2019-07-31T04:11:05.000Z
|
2021-01-07T03:23:20.000Z
|
resource_monitor_sdk/model/flowable_service/bpmn_end_event_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
resource_monitor_sdk/model/flowable_service/bpmn_end_event_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bpmn_end_event.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from resource_monitor_sdk.model.flowable_service import bpmn_links_pb2 as resource__monitor__sdk_dot_model_dot_flowable__service_dot_bpmn__links__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='bpmn_end_event.proto',
package='flowable_service',
syntax='proto3',
serialized_options=_b('ZJgo.easyops.local/contracts/protorepo-models/easyops/model/flowable_service'),
serialized_pb=_b('\n\x14\x62pmn_end_event.proto\x12\x10\x66lowable_service\x1a<resource_monitor_sdk/model/flowable_service/bpmn_links.proto\"F\n\x0c\x42PMNEndEvent\x12\n\n\x02id\x18\x01 \x01(\t\x12*\n\x05links\x18\x02 \x01(\x0b\x32\x1b.flowable_service.BPMNLinksBLZJgo.easyops.local/contracts/protorepo-models/easyops/model/flowable_serviceb\x06proto3')
,
dependencies=[resource__monitor__sdk_dot_model_dot_flowable__service_dot_bpmn__links__pb2.DESCRIPTOR,])
_BPMNENDEVENT = _descriptor.Descriptor(
name='BPMNEndEvent',
full_name='flowable_service.BPMNEndEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='flowable_service.BPMNEndEvent.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='links', full_name='flowable_service.BPMNEndEvent.links', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=104,
serialized_end=174,
)
_BPMNENDEVENT.fields_by_name['links'].message_type = resource__monitor__sdk_dot_model_dot_flowable__service_dot_bpmn__links__pb2._BPMNLINKS
DESCRIPTOR.message_types_by_name['BPMNEndEvent'] = _BPMNENDEVENT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
BPMNEndEvent = _reflection.GeneratedProtocolMessageType('BPMNEndEvent', (_message.Message,), {
'DESCRIPTOR' : _BPMNENDEVENT,
'__module__' : 'bpmn_end_event_pb2'
# @@protoc_insertion_point(class_scope:flowable_service.BPMNEndEvent)
})
_sym_db.RegisterMessage(BPMNEndEvent)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 37.707317
| 355
| 0.787516
|
ce29fd3f463d50e3ffd1af4b29f333c3abf050df
| 8,794
|
py
|
Python
|
lib/modules/collection/inveigh.py
|
asmc/Empire-Win
|
af27f437ee919c9b65e3c2dd7c006f11ff5d5b2a
|
[
"BSD-3-Clause"
] | 1
|
2019-03-04T20:30:41.000Z
|
2019-03-04T20:30:41.000Z
|
lib/modules/collection/inveigh.py
|
ninj4c0d3r/Empire
|
e07e7ca5398dce82f9ce10028355f909d102838b
|
[
"BSD-3-Clause"
] | null | null | null |
lib/modules/collection/inveigh.py
|
ninj4c0d3r/Empire
|
e07e7ca5398dce82f9ce10028355f909d102838b
|
[
"BSD-3-Clause"
] | 5
|
2017-10-17T08:16:13.000Z
|
2021-04-30T02:41:02.000Z
|
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-Inveigh',
'Author': ['Kevin Robertson'],
'Description': ('Inveigh is a Windows PowerShell LLMNR/NBNS spoofer/man-in-the-middle tool.'),
'Background' : True,
'OutputExtension' : None,
'NeedsAdmin' : True,
'OpsecSafe' : True,
'MinPSVersion' : '2',
'Comments': [
'https://github.com/Kevin-Robertson/Inveigh'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'IP' : {
'Description' : 'Specific local IP address for listening. This IP address will also be used for LLMNR/NBNS spoofing if the SpooferIP parameter is not set.',
'Required' : False,
'Value' : ''
},
'SpooferIP' : {
'Description' : 'Specific IP address for LLMNR/NBNS spoofing. This parameter is only necessary when redirecting victims to a system other than the Inveigh host.',
'Required' : False,
'Value' : ''
},
'SpooferHostsReply' : {
'Description' : 'Comma separated list of requested hostnames to respond to when spoofing with LLMNR and NBNS.',
'Required' : False,
'Value' : ''
},
'SpooferHostsIgnore' : {
'Description' : 'Comma separated list of requested hostnames to ignore when spoofing with LLMNR and NBNS.',
'Required' : False,
'Value' : ''
},
'SpooferIPsReply' : {
'Description' : 'Comma separated list of source IP addresses to respond to when spoofing with LLMNR and NBNS.',
'Required' : False,
'Value' : ''
},
'SpooferIPsIgnore' : {
'Description' : 'Comma separated list of source IP addresses to ignore when spoofing with LLMNR and NBNS.',
'Required' : False,
'Value' : ''
},
'SpooferRepeat' : {
'Description' : 'Enable/Disable repeated LLMNR/NBNS spoofs to a victim system after one user challenge/response has been captured (Y/N).',
'Required' : False,
'Value' : 'Y'
},
'LLMNR' : {
'Description' : 'Enable/Disable LLMNR spoofing (Y/N).',
'Required' : False,
'Value' : 'Y'
},
'LLMNRTTL' : {
'Description' : 'Custom LLMNR TTL in seconds for the response packet.',
'Required' : False,
'Value' : ''
},
'NBNS' : {
'Description' : 'Enable/Disable NBNS spoofing (Y/N).',
'Required' : False,
'Value' : 'Y'
},
'NBNSTTL' : {
'Description' : 'Custom NBNS TTL in seconds for the response packet.',
'Required' : False,
'Value' : ''
},
'NBNSTypes' : {
'Description' : 'Comma separated list of NBNS types to spoof.',
'Required' : False,
'Value' : '00,20'
},
'HTTP' : {
'Description' : 'Enable/Disable HTTP challenge/response capture (Y/N).',
'Required' : False,
'Value' : 'Y'
},
'HTTPAuth' : {
'Description' : 'HTTP server authentication type. This setting does not apply to wpad.dat requests (Anonymous,Basic,NTLM).',
'Required' : False,
'Value' : 'NTLM'
},
'HTTPBasicRealm' : {
'Description' : 'Realm name for Basic authentication. This parameter applies to both HTTPAuth and WPADAuth.',
'Required' : False,
'Value' : 'IIS'
},
'HTTPResponse' : {
'Description' : 'String or HTML to serve as the default HTTP response. This response will not be used for wpad.dat requests. Do not wrap in quotes and use PowerShell character escapes where necessary.',
'Required' : False,
'Value' : ''
},
'WPADAuth' : {
'Description' : 'HTTP server authentication type for wpad.dat requests. Setting to Anonymous can prevent browser login prompts (Anonymous,Basic,NTLM).',
'Required' : False,
'Value' : 'NTLM'
},
'WPADIP' : {
'Description' : 'Proxy server IP to be included in a basic wpad.dat response for WPAD enabled browsers. This parameter must be used with WPADPort.',
'Required' : False,
'Value' : ''
},
'WPADPort' : {
'Description' : 'Proxy server port to be included in a basic wpad.dat response for WPAD enabled browsers. This parameter must be used with WPADIP.',
'Required' : False,
'Value' : ''
},
'WPADDirectHosts' : {
'Description' : 'Comma separated list of hosts to list as direct in the wpad.dat file. Listed hosts will not be routed through the defined proxy. Add the Empire host to avoid catching Empire HTTP traffic.',
'Required' : False,
'Value' : ''
},
'SMB' : {
'Description' : 'Enable/Disable SMB challenge/response capture (Y/N).',
'Required' : False,
'Value' : 'Y'
},
'Challenge' : {
'Description' : 'Specific 16 character hex NTLM challenge for use with the HTTP listener. If left blank, a random challenge will be generated for each request.',
'Required' : False,
'Value' : ''
},
'MachineAccounts' : {
'Description' : 'Enable/Disable showing NTLM challenge/response captures from machine accounts (Y/N).',
'Required' : False,
'Value' : 'N'
},
'RunTime' : {
'Description' : 'Run time duration in minutes.',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
# read in the common module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/collection/Invoke-Inveigh.ps1"
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
script = moduleCode
# set defaults for Empire
script += "\n" + 'Invoke-Inveigh -Tool "2" '
for option,values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if values['Value'].lower() == "true":
# if we're just adding a switch
script += " -" + str(option)
else:
if "," in str(values['Value']):
quoted = '"' + str(values['Value']).replace(',', '","') + '"'
script += " -" + str(option) + " " + quoted
else:
script += " -" + str(option) + " \"" + str(values['Value']) + "\""
return script
| 43.107843
| 226
| 0.461792
|
92ace722b516a8aebaa2ed8d1c2f5b62909aa4ae
| 4,999
|
py
|
Python
|
networkapi/ip/resource/SearchIPv6EnvironmentResource.py
|
vinicius-marinho/GloboNetworkAPI
|
94651d3b4dd180769bc40ec966814f3427ccfb5b
|
[
"Apache-2.0"
] | 73
|
2015-04-13T17:56:11.000Z
|
2022-03-24T06:13:07.000Z
|
networkapi/ip/resource/SearchIPv6EnvironmentResource.py
|
leopoldomauricio/GloboNetworkAPI
|
3b5b2e336d9eb53b2c113977bfe466b23a50aa29
|
[
"Apache-2.0"
] | 99
|
2015-04-03T01:04:46.000Z
|
2021-10-03T23:24:48.000Z
|
networkapi/ip/resource/SearchIPv6EnvironmentResource.py
|
shildenbrand/GloboNetworkAPI
|
515d5e961456cee657c08c275faa1b69b7452719
|
[
"Apache-2.0"
] | 64
|
2015-08-05T21:26:29.000Z
|
2022-03-22T01:06:28.000Z
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from networkapi.admin_permission import AdminPermission
from networkapi.ambiente.models import Ambiente
from networkapi.ambiente.models import AmbienteError
from networkapi.ambiente.models import AmbienteNotFoundError
from networkapi.auth import has_perm
from networkapi.exception import InvalidValueError
from networkapi.infrastructure.ipaddr import IPv6Address
from networkapi.infrastructure.xml_utils import dumps_networkapi
from networkapi.infrastructure.xml_utils import loads
from networkapi.ip.models import IpError
from networkapi.ip.models import IpNotFoundError
from networkapi.ip.models import Ipv6
from networkapi.rest import RestResource
from networkapi.rest import UserNotAuthorizedError
from networkapi.util import is_valid_int_greater_zero_param
from networkapi.util import is_valid_ipv6
class SearchIPv6EnvironmentResource(RestResource):
log = logging.getLogger('SearchIPv6EnvironmentResource')
def handle_post(self, request, user, *args, **kwargs):
"""Treat requests POST to verify that the IPv6 belongs to environment.
URL: /ipv6/environment/
"""
self.log.info('Verify that the IPv6 belongs to environment')
try:
# User permission
if not has_perm(user, AdminPermission.IPS, AdminPermission.READ_OPERATION):
self.log.error(
u'User does not have permission to perform the operation.')
raise UserNotAuthorizedError(None)
# Load XML data
xml_map, attrs_map = loads(request.raw_post_data)
# XML data format
networkapi_map = xml_map.get('networkapi')
if networkapi_map is None:
return self.response_error(3, u'There is no value to the networkapi tag of XML request.')
ipv6_map = networkapi_map.get('ipv6_map')
if ipv6_map is None:
return self.response_error(3, u'There is no value to the ipv6_map tag of XML request.')
# Get XML data
environment_id = ipv6_map.get('id_environment')
ipv6 = ipv6_map.get('ipv6')
# Valid Environment ID
if not is_valid_int_greater_zero_param(environment_id):
self.log.error(
u'The id_environment parameter is not a valid value: %s.', environment_id)
raise InvalidValueError(None, 'id_environment', environment_id)
# Valid IPv6 ID
if not is_valid_ipv6(ipv6):
self.log.error(
u'The ipv6 parameter is not a valid value: %s.', ipv6)
raise InvalidValueError(None, 'ipv6', ipv6)
blocks = str(IPv6Address(ipv6).exploded).split(':')
# Find Environment by ID to check if it exist
environment = Ambiente.get_by_pk(environment_id)
# Find Ipv6 by blocks to check if it exist
IPv6 = Ipv6.get_by_octs_and_environment(blocks[0], blocks[1], blocks[2], blocks[
3], blocks[4], blocks[5], blocks[6], blocks[7], environment_id)
# Generate return map
ipv6_map = dict()
ipv6_map['id'] = IPv6.id
ipv6_map['id_vlan'] = IPv6.networkipv6.vlan.id
ipv6_map['bloco1'] = IPv6.block1
ipv6_map['bloco2'] = IPv6.block2
ipv6_map['bloco3'] = IPv6.block3
ipv6_map['bloco4'] = IPv6.block4
ipv6_map['bloco5'] = IPv6.block5
ipv6_map['bloco6'] = IPv6.block6
ipv6_map['bloco7'] = IPv6.block7
ipv6_map['bloco8'] = IPv6.block8
ipv6_map['descricao'] = IPv6.description
return self.response(dumps_networkapi({'ipv6': ipv6_map}))
except UserNotAuthorizedError:
return self.not_authorized()
except InvalidValueError, e:
return self.response_error(269, e.param, e.value)
except IpNotFoundError:
return self.response_error(119)
except AmbienteNotFoundError:
return self.response_error(112)
except (IpError, AmbienteError):
return self.response_error(1)
| 42.008403
| 115
| 0.665333
|
52eea582cf2767e9a75e96eca815bb9a1d8eb01d
| 6,591
|
py
|
Python
|
Star Wars Lite/structs/elements.py
|
shinozaki1595/hephaestus
|
f93dd0bddf432664cebaa2793d601b18387625bb
|
[
"Apache-2.0"
] | 6
|
2020-10-05T17:46:15.000Z
|
2020-12-29T03:42:12.000Z
|
Star Wars Lite/structs/elements.py
|
shinozaki1595/hephaestus
|
f93dd0bddf432664cebaa2793d601b18387625bb
|
[
"Apache-2.0"
] | 20
|
2020-10-04T19:59:21.000Z
|
2022-01-03T14:07:30.000Z
|
Star Wars Lite/structs/elements.py
|
shinozaki1595/hephaestus
|
f93dd0bddf432664cebaa2793d601b18387625bb
|
[
"Apache-2.0"
] | 14
|
2020-10-02T13:10:38.000Z
|
2021-12-14T19:04:49.000Z
|
import pygame, sys
from pygame import mixer
import os
import main
from structs import root
sys.path.append(os.getcwd())
from structs import sprites
class Shuttle:
REST_TIME = 30
def __init__(self, posx, posy, health=100):
self.shuttle_image = None
self.laser_image = None
self.lasers = []
self.rest_timer = 0
self.posx = posx
self.posy = posy
self.health = health
def draw(self, window):
window.blit(self.shuttle_image, (self.posx, self.posy))
for laser in self.lasers:
laser.draw(window)
def fire_lasers(self, velocity, obj):
self.rest_time()
for laser in self.lasers:
laser.move(velocity)
if laser.screen_bound(sprites.HEIGHT):
self.lasers.remove(laser)
elif laser.collision(obj):
obj.health -= 10
self.lasers.remove(laser)
def rest_time(self):
if self.rest_timer >= self.REST_TIME:
self.rest_timer = 0
elif self.rest_timer > 0:
self.rest_timer += 1
def shoot(self):
if self.rest_timer == 0:
laser = Laser(self.posx, self.posy, self.laser_image)
self.lasers.append(laser)
self.rest_timer = 1
def get_width(self):
return self.shuttle_image.get_width()
def get_height(self):
return self.shuttle_image.get_height()
class Laser:
def __init__(self, posx, posy, image):
self.posx = posx
self.posy = posy
self.image = image
self.mask = pygame.mask.from_surface(self.image)
def draw(self, window):
window.blit(self.image, (self.posx, self.posy))
def move(self, velocity):
self.posy += velocity
def screen_bound(self, height):
return not (self.posy <= height and self.posy >= 0)
def collision(self, obj):
return object_collision(self, obj)
class Player(Shuttle):
def __init__(self, posx, posy, health=100):
super().__init__(posx, posy, health)
self.shuttle_image = sprites.PLAYER
self.laser_image = sprites.LASER_YELLOW
self.mask = pygame.mask.from_surface(self.shuttle_image)
self.max_health = health
def fire_lasers(self, velocity, objs, score):
self.rest_time()
for laser in self.lasers:
laser.move(velocity)
if laser.screen_bound(sprites.HEIGHT):
self.lasers.remove(laser)
else:
for obj in objs:
if laser.collision(obj):
collision_sound = mixer.Sound(r"sounds\explosion.wav")
collision_sound.play()
objs.remove(obj)
score.update()
if laser in self.lasers:
self.lasers.remove(laser)
def draw(self, window):
super().draw(window)
self.healthbar(window)
def healthbar(self, window):
pygame.draw.rect(
window,
(255, 0, 0),
(
self.posx,
self.posy + self.shuttle_image.get_height() + 10,
self.shuttle_image.get_width(),
10,
),
)
pygame.draw.rect(
window,
(255, 255, 0),
(
self.posx,
self.posy + self.shuttle_image.get_height() + 10,
self.shuttle_image.get_width() * (self.health / self.max_health),
10,
),
)
class Enemy(Shuttle):
# using rgbs as placeholders
COLOR_PLACEHOLDER = {
"red": (sprites.ENEMY0, sprites.LASER_RED),
"green": (sprites.ENEMY1, sprites.LASER_GREEN),
"blue": (sprites.ENEMY3, sprites.LASER_BLUE),
}
def __init__(self, posx, posy, color, health=100):
super().__init__(posx, posy, health)
self.shuttle_image, self.laser_image = self.COLOR_PLACEHOLDER[color]
self.mask = pygame.mask.from_surface(self.shuttle_image)
def move(self, velocity):
self.posy += velocity
def shoot(self):
if self.rest_timer == 0:
laser = Laser(self.posx - 20, self.posy, self.laser_image)
self.lasers.append(laser)
self.rest_timer = 1
class Score():
def __init__(self):
self.score = 0
self.high_scores = [int(line.rstrip("\n")) for line in open("leaderboard.txt")]
def update(self): #Ten points per ship destroyed
self.score += 10
def wave_score_update(self): #One hundred points per wave cleared
self.score += 100
def top_3_high_score(self): #Still needs to be added
return self.high_scores.sort(reverse=True)
def update_high_scores(self): #Still needs to be added
self.high_scores.sort()
if self.score > self.high_scores[0]:
del self.high_scores[0]
self.high_scores.append(self.score)
with open("leaderboard.txt", "w") as leaderboard:
leaderboard.writelines("%s\n" % high_score for high_score in self.high_scores)
return self.high_scores
def object_collision(obj1, obj2):
offset_x = obj2.posx - obj1.posx
offset_y = obj2.posy - obj1.posy
return obj1.mask.overlap(obj2.mask, (offset_x, offset_y)) != None
def button(active,inactive,get_top,action=None):
click = pygame.mouse.get_pressed()
music_paused = False
mx, my = pygame.mouse.get_pos()
display = pygame.image.load(inactive).convert_alpha()
display_rect = display.get_rect(center=sprites.WINDOW.get_rect().center, top=get_top)
if display_rect.collidepoint((mx, my)):
display = pygame.image.load(active).convert_alpha()
if click[0] == 1 and action != None:
if action == 'play':
root.game()
elif action == 'options':
root.options()
elif action == 'exit':
pygame.quit()
quit()
elif action == 'back':
main.game_menu()
elif action == 'soundon':
if mixer.music.play(-1) == False:
mixer.music.play(-1)
elif action == 'soundoff':
if not music_paused:
pygame.mixer.music.pause()
music_paused = True
else:
pygame.mixer.music.unpause()
else:
display = pygame.image.load(inactive).convert_alpha()
sprites.WINDOW.blit(display, display_rect)
| 31.089623
| 90
| 0.570475
|
8d9017e92eee5163fc24782c6069e755e0abd799
| 962
|
py
|
Python
|
stage1/thread_demo/demo2.py
|
kaixiang1992/python-review
|
7f4f82b453f81b47af7ab1e8f3b3d07d1d75cbe4
|
[
"MIT"
] | null | null | null |
stage1/thread_demo/demo2.py
|
kaixiang1992/python-review
|
7f4f82b453f81b47af7ab1e8f3b3d07d1d75cbe4
|
[
"MIT"
] | null | null | null |
stage1/thread_demo/demo2.py
|
kaixiang1992/python-review
|
7f4f82b453f81b47af7ab1e8f3b3d07d1d75cbe4
|
[
"MIT"
] | null | null | null |
"""
2019/12/12 22:11
150.【多线程】使用Thread类创建多线程(线程)
"""
"""
查看线程数:
使用`threading.enumerate()`函数可以看到当前线程的数量.
查看当前线程的名字:
使用`threading.currentThread()`可以看到当前线程的信息.
继承自`threading.Thread`类:
为了让线程代码更好的封装。可以使用threading模块下的Thread类,继承自这个类,
然后实现run方法,线程就会自动运行run方法中的代码。
"""
import threading
import time
class codingThread(threading.Thread):
def run(self):
for x in range(0, 3):
print('正在写代码%s' % threading.currentThread())
time.sleep(1)
class drawingThread(threading.Thread):
def run(self):
for x in range(0, 3):
print('正在画画%s' % threading.currentThread())
time.sleep(1)
if __name__ == '__main__':
coding = codingThread()
drawing = drawingThread()
coding.start()
drawing.start()
# TODO: [<_MainThread(MainThread, started 13016)>, <codingThread(Thread-1, started 3300)>, <drawingThread(
# Thread-2, started 8452)>]
print('当前总线程数量: %s' % threading.enumerate())
| 19.24
| 110
| 0.656965
|
cb3c49c02ea4d43945e27166ad8b16d366c097bb
| 13,840
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/network/v20200401/virtual_hub.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/network/v20200401/virtual_hub.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/network/v20200401/virtual_hub.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['VirtualHub']
class VirtualHub(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
address_prefix: Optional[pulumi.Input[str]] = None,
azure_firewall: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
express_route_gateway: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
p2_s_vpn_gateway: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
route_table: Optional[pulumi.Input[pulumi.InputType['VirtualHubRouteTableArgs']]] = None,
security_partner_provider: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
security_provider_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_hub_name: Optional[pulumi.Input[str]] = None,
virtual_hub_route_table_v2s: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualHubRouteTableV2Args']]]]] = None,
virtual_network_connections: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['HubVirtualNetworkConnectionArgs']]]]] = None,
virtual_wan: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
vpn_gateway: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
VirtualHub Resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] address_prefix: Address-prefix for this VirtualHub.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] azure_firewall: The azureFirewall associated with this VirtualHub.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] express_route_gateway: The expressRouteGateway associated with this VirtualHub.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] p2_s_vpn_gateway: The P2SVpnGateway associated with this VirtualHub.
:param pulumi.Input[str] resource_group_name: The resource group name of the VirtualHub.
:param pulumi.Input[pulumi.InputType['VirtualHubRouteTableArgs']] route_table: The routeTable associated with this virtual hub.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] security_partner_provider: The securityPartnerProvider associated with this VirtualHub.
:param pulumi.Input[str] security_provider_name: The Security Provider name.
:param pulumi.Input[str] sku: The sku of this VirtualHub.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[str] virtual_hub_name: The name of the VirtualHub.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualHubRouteTableV2Args']]]] virtual_hub_route_table_v2s: List of all virtual hub route table v2s associated with this VirtualHub.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['HubVirtualNetworkConnectionArgs']]]] virtual_network_connections: List of all vnet connections with this VirtualHub.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] virtual_wan: The VirtualWAN to which the VirtualHub belongs.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] vpn_gateway: The VpnGateway associated with this VirtualHub.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['address_prefix'] = address_prefix
__props__['azure_firewall'] = azure_firewall
__props__['express_route_gateway'] = express_route_gateway
__props__['id'] = id
if location is None:
raise TypeError("Missing required property 'location'")
__props__['location'] = location
__props__['p2_s_vpn_gateway'] = p2_s_vpn_gateway
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['route_table'] = route_table
__props__['security_partner_provider'] = security_partner_provider
__props__['security_provider_name'] = security_provider_name
__props__['sku'] = sku
__props__['tags'] = tags
if virtual_hub_name is None:
raise TypeError("Missing required property 'virtual_hub_name'")
__props__['virtual_hub_name'] = virtual_hub_name
__props__['virtual_hub_route_table_v2s'] = virtual_hub_route_table_v2s
__props__['virtual_network_connections'] = virtual_network_connections
__props__['virtual_wan'] = virtual_wan
__props__['vpn_gateway'] = vpn_gateway
__props__['etag'] = None
__props__['name'] = None
__props__['provisioning_state'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/latest:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20180401:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20180601:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20180701:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20180801:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20181001:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20181101:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20181201:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20190201:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20190401:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20190601:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20190701:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20190801:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20190901:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20191101:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20191201:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20200301:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20200501:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20200601:VirtualHub"), pulumi.Alias(type_="azure-nextgen:network/v20200701:VirtualHub")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(VirtualHub, __self__).__init__(
'azure-nextgen:network/v20200401:VirtualHub',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'VirtualHub':
"""
Get an existing VirtualHub resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return VirtualHub(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="addressPrefix")
def address_prefix(self) -> pulumi.Output[Optional[str]]:
"""
Address-prefix for this VirtualHub.
"""
return pulumi.get(self, "address_prefix")
@property
@pulumi.getter(name="azureFirewall")
def azure_firewall(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
The azureFirewall associated with this VirtualHub.
"""
return pulumi.get(self, "azure_firewall")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="expressRouteGateway")
def express_route_gateway(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
The expressRouteGateway associated with this VirtualHub.
"""
return pulumi.get(self, "express_route_gateway")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="p2SVpnGateway")
def p2_s_vpn_gateway(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
The P2SVpnGateway associated with this VirtualHub.
"""
return pulumi.get(self, "p2_s_vpn_gateway")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state of the virtual hub resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="routeTable")
def route_table(self) -> pulumi.Output[Optional['outputs.VirtualHubRouteTableResponse']]:
"""
The routeTable associated with this virtual hub.
"""
return pulumi.get(self, "route_table")
@property
@pulumi.getter(name="securityPartnerProvider")
def security_partner_provider(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
The securityPartnerProvider associated with this VirtualHub.
"""
return pulumi.get(self, "security_partner_provider")
@property
@pulumi.getter(name="securityProviderName")
def security_provider_name(self) -> pulumi.Output[Optional[str]]:
"""
The Security Provider name.
"""
return pulumi.get(self, "security_provider_name")
@property
@pulumi.getter
def sku(self) -> pulumi.Output[Optional[str]]:
"""
The sku of this VirtualHub.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="virtualHubRouteTableV2s")
def virtual_hub_route_table_v2s(self) -> pulumi.Output[Optional[Sequence['outputs.VirtualHubRouteTableV2Response']]]:
"""
List of all virtual hub route table v2s associated with this VirtualHub.
"""
return pulumi.get(self, "virtual_hub_route_table_v2s")
@property
@pulumi.getter(name="virtualNetworkConnections")
def virtual_network_connections(self) -> pulumi.Output[Optional[Sequence['outputs.HubVirtualNetworkConnectionResponse']]]:
"""
List of all vnet connections with this VirtualHub.
"""
return pulumi.get(self, "virtual_network_connections")
@property
@pulumi.getter(name="virtualWan")
def virtual_wan(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
The VirtualWAN to which the VirtualHub belongs.
"""
return pulumi.get(self, "virtual_wan")
@property
@pulumi.getter(name="vpnGateway")
def vpn_gateway(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
The VpnGateway associated with this VirtualHub.
"""
return pulumi.get(self, "vpn_gateway")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 48.904594
| 1,370
| 0.675145
|
53de5ab78261eaf1c6d00bfe486291e32a8eb5c3
| 291
|
py
|
Python
|
hit_me_please/hitters/tests/test_models.py
|
kittinan/hit-me-please
|
8f4ec834dec99d7ce832dbe7565c04b0b5137a68
|
[
"MIT"
] | 3
|
2019-06-04T02:51:50.000Z
|
2019-12-26T13:58:51.000Z
|
hit_me_please/hitters/tests/test_models.py
|
kittinan/hit-me-please
|
8f4ec834dec99d7ce832dbe7565c04b0b5137a68
|
[
"MIT"
] | 13
|
2019-12-04T23:05:20.000Z
|
2022-02-10T08:35:35.000Z
|
hit_me_please/hitters/tests/test_models.py
|
yothinix/hit-me-please
|
8f4ec834dec99d7ce832dbe7565c04b0b5137a68
|
[
"MIT"
] | 1
|
2019-06-04T04:21:54.000Z
|
2019-06-04T04:21:54.000Z
|
from django.test import TestCase
from ..models import Hitter
class HitterTest(TestCase):
def test_hitter_model_should_have_email_field(self):
hitter = Hitter.objects.create(
email='m@yothinix.com'
)
self.assertEqual(hitter.email, 'm@yothinix.com')
| 24.25
| 56
| 0.690722
|
9208273a9973bab72ec6e9a2fcf4a24d6d95d45a
| 4,122
|
py
|
Python
|
dtree/random_forest.py
|
zhaoyan1117/DecisionTree
|
f1db94197abf0e4c2d6da8c547109a24ee89907a
|
[
"BSD-2-Clause"
] | null | null | null |
dtree/random_forest.py
|
zhaoyan1117/DecisionTree
|
f1db94197abf0e4c2d6da8c547109a24ee89907a
|
[
"BSD-2-Clause"
] | null | null | null |
dtree/random_forest.py
|
zhaoyan1117/DecisionTree
|
f1db94197abf0e4c2d6da8c547109a24ee89907a
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from multiprocessing import cpu_count, Pool
import time, signal
import numpy as np
from .decision_tree import DecisionTree
from .util import iterate_with_progress
#################################
# Multi-process funcs & klasses #
#################################
class KeyboardInterruptError(Exception): pass
def train_tree(args):
try:
tree, data, labels = args
tree.train(data, labels)
return tree
except KeyboardInterrupt:
raise KeyboardInterruptError()
def prune_tree(args):
try:
tree, data, labels = args
tree.prune(data, labels)
return tree
except KeyboardInterrupt:
raise KeyboardInterruptError()
class RandomForest:
def __init__(self, impurity, segmentor, **kwargs):
self._impurity = impurity
self._segmentor = segmentor
self._num_trees = kwargs.get('num_trees', 10)
assert self._num_trees > 0
self._max_depth = kwargs.get('max_depth', None)
self._min_samples = kwargs.get('min_samples', 2)
self._trees = []
def train(self, data, labels):
self._klasses = np.unique(labels)
print 'Prepare parallel training.'
args_list = []
for _ in iterate_with_progress(xrange(self._num_trees)):
sampled_data, sampled_labels = self._sample_data_labels(data, labels)
tree = DecisionTree(self._impurity,
self._segmentor,
max_depth=self._max_depth,
min_samples=self._min_samples)
args_list.append([tree, sampled_data, sampled_labels])
num_processes = cpu_count()
print 'Train in parallel with {0} processes.'.format(num_processes)
pool = Pool(num_processes)
try:
start = time.time()
self._trees = pool.map(train_tree, args_list)
print 'Training takes {0} seconds.'.format(int(time.time() - start))
pool.close()
except KeyboardInterrupt:
pool.terminate()
except Exception, e:
pool.terminate()
finally:
pool.join()
def predict(self, data):
if not self._trees:
raise StandardError("Random forest has not been trained.")
def draw_votes(probs):
avg_probs = {}
for klass in self._klasses:
total_prob = sum([prob.get(klass, 0.0) for prob in probs])
avg_probs[klass] = total_prob / self._num_trees
return max(avg_probs, key=lambda klass : avg_probs[klass])
tree_results = np.array([tree.predict(data, True) for tree in self._trees])
return np.apply_along_axis(draw_votes, 0, tree_results)
def score(self, data, labels):
if not self._trees:
raise StandardError("Random forest has not been trained.")
predictions = self.predict(data)
correct_count = np.count_nonzero(predictions == labels)
return correct_count / labels.shape[0]
def prune(self, data, labels):
args_list = []
for tree in self._trees:
args_list.append([tree, data, labels])
num_processes = cpu_count()
print 'Prune in parallel with {0} processes.'.format(num_processes)
pool = Pool(num_processes)
try:
start = time.time()
self._trees = pool.map(prune_tree, args_list)
print 'Pruning takes {0} seconds.'.format(int(time.time() - start))
pool.close()
return self.score(data, labels)
except KeyboardInterrupt:
pool.terminate()
except Exception, e:
pool.terminate()
finally:
pool.join()
def _sample_data_labels(self, data, labels):
num_data = len(data)
assert num_data == len(labels)
data_indices = np.random.choice(num_data, num_data)
sampled_data = data[data_indices,:]
sampled_labels = labels[data_indices]
return sampled_data, sampled_labels
| 34.638655
| 83
| 0.604318
|
82c3dc4c9b70518ee5019118c7769d4579caf1a2
| 2,101
|
py
|
Python
|
eventsrouter/views/events.py
|
The-Politico/django-slack-events-router
|
a838d94a55f7be7afeafa19dad093c29e77ebe67
|
[
"MIT"
] | null | null | null |
eventsrouter/views/events.py
|
The-Politico/django-slack-events-router
|
a838d94a55f7be7afeafa19dad093c29e77ebe67
|
[
"MIT"
] | 6
|
2019-12-05T00:43:05.000Z
|
2021-06-09T18:39:48.000Z
|
eventsrouter/views/events.py
|
The-Politico/django-slack-events-router
|
a838d94a55f7be7afeafa19dad093c29e77ebe67
|
[
"MIT"
] | 1
|
2021-05-30T15:00:36.000Z
|
2021-05-30T15:00:36.000Z
|
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from toolbox.django.rest.authentication.slack import SlackSignedAuthentication
from ..models import Route
from ..celery import post_webhook
class Events(APIView):
authentication_classes = (SlackSignedAuthentication,)
permission_classes = ()
def post(self, request, *args, **kwargs):
slack_message = request.data
# Handle Slack's URL verification challenge
# https://api.slack.com/events/url_verification
if slack_message.get("type") == "url_verification":
return Response(
data=slack_message.get("challenge"), status=status.HTTP_200_OK
)
if "event" in slack_message:
channel_id = get_channel_id(slack_message)
event_name = slack_message.get("event", {}).get("type", "")
routes = Route.objects.all()
for route in routes:
if filter_route_by_channel(
route, channel_id
) and filter_route_by_event(route, event_name):
post_webhook.delay(
endpoint=route.endpoint, data=slack_message
)
return Response(status=status.HTTP_200_OK)
def get_channel_id(data):
"""Gets channel id in a few of the different ways Slack sends it."""
channel_id = data.get("event", {}).get("channel", None)
if channel_id is None:
channel_id = data.get("event", {}).get("item", {}).get("channel", None)
if channel_id is None:
channel_id = data.get("event", {}).get("channel_id", None)
return channel_id
def filter_route_by_channel(route, api_id):
if not api_id:
return True
filters = route.channel_filters
if filters.count() == 0:
return True
return filters.filter(api_id=api_id).count() > 0
def filter_route_by_event(route, name):
filters = route.event_filters
if filters.count() == 0:
return True
return filters.filter(name=name).count() > 0
| 30.449275
| 79
| 0.641123
|
7258727332089ea605aa06e83f3e108fc62dbf7a
| 2,768
|
py
|
Python
|
testCircuit1.py
|
aniabrown/QuEST-TN
|
8e0c8686859531d670d537af5eec03b7232f6b26
|
[
"MIT"
] | null | null | null |
testCircuit1.py
|
aniabrown/QuEST-TN
|
8e0c8686859531d670d537af5eec03b7232f6b26
|
[
"MIT"
] | 1
|
2020-02-06T07:02:40.000Z
|
2021-03-01T14:44:40.000Z
|
testCircuit1.py
|
aniabrown/QuEST-TN
|
8e0c8686859531d670d537af5eec03b7232f6b26
|
[
"MIT"
] | null | null | null |
from QuESTPy.QuESTBase import init_QuESTLib
from TNPy.TNBase import init_TNLib
# If set up
QuESTPath = "build/TN/QuEST/"
TNPath = "build/TN/"
init_QuESTLib(QuESTPath)
init_TNLib(TNPath)
#!/usr/bin/env python3
from QuESTPy.QuESTFunc import *
from TNPy.TNFunc import *
from TNPy.TNAdditionalGates import *
print("---------------------------------")
print("---- Create QuEST environment----")
print("---------------------------------")
env = createQuESTEnv()
reportQuESTEnv(env)
print("\n\n--------------------------------------------------------------")
print("---- QuEST example of a two qubit circuit: X(0), CNOT(0,1)----")
print("--------------------------------------------------------------")
print("Create qureg. It will be in the zero state by default")
qureg = createQureg(2, env);
print("qureg:")
print(qureg)
print("Apply PauliX to qubit 0")
pauliX(qureg, 0);
print("qureg:")
print(qureg)
print("Apply controlled not controlled by qubit 0 to qubit 1")
controlledNot(qureg, 0, 1);
print("qureg:")
print(qureg)
print("\n\n-----------------------------------------------------------")
print("---- TN example of a two qubit circuit: X(0), CNOT(0,1)----")
print("-----------------------------------------------------------")
print("Create tensor1. It will be in the zero state by default");
tensor1 = createTensor(1, 1, env)
print("Tensor1:");
print(tensor1.qureg)
print("Create tensor2. It will be in the zero state by default");
tensor2 = createTensor(1, 1, env)
print("Tensor2:");
print(tensor2.qureg)
print("Apply pauliX to global qubit 0, which is qubit 0 in tensor1");
TN_singleQubitGate(pauliX, tensor1, 0)
print("Tensor1:");
print(tensor1.qureg)
print("Apply a controlled not gate controlled by global qubit 0, to global qubit 1");
TN_controlledGateControlHalf(controlledNot, tensor1, 0, 1)
TN_controlledGateTargetHalf(controlledNot, tensor2, 1, 0)
print("Tensor1:");
print(tensor1.qureg)
print("Tensor2:");
print(tensor2.qureg)
print("Contract tensor1 and tensor2 across their virtual qubit index");
tensor1Contractions = [1]
tensor2Contractions = [1]
tensor1FreeIndices = [0]
tensor2FreeIndices = [0]
numContractions = 1
numTensor1FreeIndices = 1
numTensor2FreeIndices = 1
# Cast to correct type for c
tensor1Contractions = (c_int*numContractions)(*tensor1Contractions)
tensor2Contractions = (c_int*numContractions)(*tensor2Contractions)
tensor1FreeIndices = (c_int*numTensor1FreeIndices)(*tensor1FreeIndices)
tensor2FreeIndices = (c_int*numTensor2FreeIndices)(*tensor2FreeIndices)
outputTensor = contractIndices(tensor1, tensor2, tensor1Contractions, tensor2Contractions, numContractions,
tensor1FreeIndices, numTensor2FreeIndices, tensor2FreeIndices, numTensor2FreeIndices, env)
print("output tensor:")
print(outputTensor.qureg)
| 29.763441
| 107
| 0.672688
|
6cad63b0c711acd10cdbc520ef269baad27cd1c6
| 204
|
py
|
Python
|
Task/N-queens-problem/Python/n-queens-problem-1.py
|
LaudateCorpus1/RosettaCodeData
|
9ad63ea473a958506c041077f1d810c0c7c8c18d
|
[
"Info-ZIP"
] | 5
|
2021-01-29T20:08:05.000Z
|
2022-03-22T06:16:05.000Z
|
Task/N-queens-problem/Python/n-queens-problem-1.py
|
seanwallawalla-forks/RosettaCodeData
|
9ad63ea473a958506c041077f1d810c0c7c8c18d
|
[
"Info-ZIP"
] | null | null | null |
Task/N-queens-problem/Python/n-queens-problem-1.py
|
seanwallawalla-forks/RosettaCodeData
|
9ad63ea473a958506c041077f1d810c0c7c8c18d
|
[
"Info-ZIP"
] | 1
|
2018-11-09T22:08:40.000Z
|
2018-11-09T22:08:40.000Z
|
from itertools import permutations
n = 8
cols = range(n)
for vec in permutations(cols):
if n == len(set(vec[i]+i for i in cols)) \
== len(set(vec[i]-i for i in cols)):
print ( vec )
| 22.666667
| 46
| 0.588235
|
55a25c46553f58e10de78129b27b0d5197386cbd
| 5,986
|
py
|
Python
|
db/models/student.py
|
matchd-ch/matchd-backend
|
84be4aab1b4708cae50a8988301b15df877c8db0
|
[
"Apache-2.0"
] | 1
|
2022-03-03T09:55:57.000Z
|
2022-03-03T09:55:57.000Z
|
db/models/student.py
|
matchd-ch/matchd-backend
|
84be4aab1b4708cae50a8988301b15df877c8db0
|
[
"Apache-2.0"
] | 7
|
2022-02-09T10:44:53.000Z
|
2022-03-28T03:29:43.000Z
|
db/models/student.py
|
matchd-ch/matchd-backend
|
84be4aab1b4708cae50a8988301b15df877c8db0
|
[
"Apache-2.0"
] | null | null | null |
from datetime import datetime
from django.apps import apps
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.core.validators import RegexValidator
from django.db import models
from django.db.models import Q
from wagtail.search import index
from .match import Match
from .profile_type import ProfileType
from .profile_state import ProfileState
def default_date():
return datetime.strptime('01.01.1970', '%m.%d.%Y').date()
class Student(models.Model, index.Indexed):
user = models.OneToOneField(to=get_user_model(),
on_delete=models.CASCADE,
related_name='student')
mobile = models.CharField(max_length=12,
blank=True,
validators=[RegexValidator(regex=settings.PHONE_REGEX)])
street = models.CharField(max_length=255, blank=True)
zip = models.CharField(max_length=255, blank=True)
city = models.CharField(max_length=255, blank=True)
date_of_birth = models.DateField(blank=True, null=True)
nickname = models.CharField(max_length=150, null=True, unique=True)
school_name = models.CharField(blank=True, null=True, max_length=255)
field_of_study = models.CharField(blank=False, null=False, max_length=255)
graduation = models.DateField(blank=True, null=True)
branch = models.ForeignKey('db.Branch', blank=True, null=True, on_delete=models.SET_NULL)
job_type = models.ForeignKey('db.JobType', blank=True, null=True, on_delete=models.SET_NULL)
job_from_date = models.DateField(null=True, blank=True)
job_to_date = models.DateField(null=True, blank=True)
skills = models.ManyToManyField('db.Skill', related_name='students')
distinction = models.TextField(max_length=1000, blank=True)
state = models.CharField(choices=ProfileState.choices,
max_length=255,
blank=False,
default=ProfileState.INCOMPLETE)
profile_step = models.IntegerField(default=1)
soft_skills = models.ManyToManyField('db.SoftSkill', blank=True, related_name='students')
cultural_fits = models.ManyToManyField('db.CulturalFit', blank=True, related_name='students')
slug = models.CharField(max_length=200, blank=True)
def __init__(self, *args, **kwargs):
self.possible_matches = {}
super().__init__(*args, **kwargs)
def get_profile_content_type(self):
return ContentType.objects.get(app_label='db', model='student')
def get_profile_id(self):
return self.id
@classmethod
def get_indexed_objects(cls):
query = Q(state=ProfileState.PUBLIC)
query |= Q(state=ProfileState.ANONYMOUS)
return cls.objects.filter(query).prefetch_related('user', 'languages', 'languages__language_level',
'cultural_fits', 'soft_skills', 'skills').\
select_related('branch', 'job_type')
def has_match(self, company):
if self.possible_matches.get(company.slug, None) is None:
model = apps.get_model('db', model_name='match')
self.possible_matches[company.slug] = model.objects.filter(student=self,
job_posting__company=company)
possible_matches = self.possible_matches.get(company.slug)
if len(possible_matches) > 0:
for possible_match in possible_matches:
if possible_match.initiator in ProfileType.valid_student_types(
) or possible_match.complete:
return True
return False
def get_match_hints(self, company):
has_requested_match = Match.objects.filter(initiator__in=ProfileType.valid_student_types(),
student=self,
job_posting__company=company).exists()
if not has_requested_match:
has_requested_match = Match.objects.filter(
initiator__in=ProfileType.valid_student_types(),
student=self,
project_posting__company=company).exists()
has_confirmed_match = Match.objects.filter(initiator__in=ProfileType.valid_company_types(),
student=self,
student_confirmed=True,
job_posting__company=company).exists()
if not has_confirmed_match:
has_confirmed_match = Match.objects.filter(initiator__in=ProfileType.valid_company_types(), student=self,
student_confirmed=True, project_posting__company=company).\
exists()
return {
'has_confirmed_match': has_confirmed_match,
'has_requested_match': has_requested_match
}
search_fields = [
index.FilterField('branch_id'),
index.FilterField('job_type_id'),
index.RelatedFields('cultural_fits', [
index.FilterField('id'),
]),
index.RelatedFields('soft_skills', [
index.FilterField('id'),
]),
index.RelatedFields('skills', [
index.FilterField('id'),
]),
index.FilterField('job_from_date',
es_extra={
'type': 'date',
'format': 'yyyy-MM-dd',
'null_value': default_date()
}),
index.FilterField('job_to_date',
es_extra={
'type': 'date',
'format': 'yyyy-MM-dd',
'null_value': default_date()
}),
]
| 46.046154
| 117
| 0.595389
|
dd57ea994f6418583b09563b3e00378f1f60bf9a
| 136
|
py
|
Python
|
config.py
|
doug1043/InstaUnfollow
|
5e0b90a8df6dc4a7c08f1fd50e38861710a5535e
|
[
"MIT"
] | 1
|
2021-12-11T20:55:24.000Z
|
2021-12-11T20:55:24.000Z
|
config.py
|
doug1043/InstaUnfollow
|
5e0b90a8df6dc4a7c08f1fd50e38861710a5535e
|
[
"MIT"
] | null | null | null |
config.py
|
doug1043/InstaUnfollow
|
5e0b90a8df6dc4a7c08f1fd50e38861710a5535e
|
[
"MIT"
] | null | null | null |
import instaloader
BOT_TOKEN = "YOUR_BOT_TOKEN"
USER, PASS = range(2)
L = instaloader.Instaloader()
user_profiles = {
}
| 11.333333
| 30
| 0.661765
|
87cd9a604489be77468250a8a45e8680231c356c
| 4,581
|
py
|
Python
|
msda-src/model_utils/domain_critic.py
|
jiangfeng1124/transfer
|
e58291cafd0aa07017db371a22be2ecaf4a08dcd
|
[
"MIT"
] | 51
|
2018-09-18T06:41:44.000Z
|
2022-03-21T00:45:04.000Z
|
msda-src/model_utils/domain_critic.py
|
zeinabbo/transfer
|
e58291cafd0aa07017db371a22be2ecaf4a08dcd
|
[
"MIT"
] | 3
|
2018-11-26T11:36:26.000Z
|
2020-08-30T03:02:32.000Z
|
msda-src/model_utils/domain_critic.py
|
zeinabbo/transfer
|
e58291cafd0aa07017db371a22be2ecaf4a08dcd
|
[
"MIT"
] | 20
|
2018-10-01T09:08:52.000Z
|
2022-01-12T03:53:41.000Z
|
import numpy as np
import torch
import torch.nn as nn
import torch.autograd as autograd
from torch.autograd import Variable
import torch.nn.functional as F
from .flip_gradient import flip_gradient
from functools import partial
from .utils import gaussian_kernel_matrix, maximum_mean_discrepancy
class ClassificationD(nn.Module):
@staticmethod
def add_config(cfgparser):
return
def __init__(self, encoder, configs):
super(ClassificationD, self).__init__()
self.n_in = encoder.n_out
self.n_out = 100
self.seq = nn.Sequential(
nn.Linear(self.n_in, self.n_out),
nn.ReLU(),
# nn.Linear(self.n_out, self.n_out),
# nn.ReLU(),
nn.Linear(self.n_out, 2)
)
for module in self.seq:
if isinstance(module, nn.Linear):
nn.init.xavier_normal_(module.weight)
nn.init.constant_(module.bias, 0.1)
# module.bias.requires_grad = False
def forward(self, x):
x_grl = flip_gradient(x)
return self.seq(x_grl)
def compute_loss(self, output, labels):
return nn.functional.cross_entropy(output, labels)
class MMD(nn.Module):
@staticmethod
def add_config(cfgparser):
return
def __init__(self, encoder, configs):
super(MMD, self).__init__()
self.sigmas = torch.FloatTensor([
1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1,
1, 5, 10, 15, 20, 25, 30, 35, 100,
1e3, 1e4, 1e5, 1e6
])
if configs.cuda:
self.sigmas = self.sigmas.cuda()
self.gaussian_kernel = partial(gaussian_kernel_matrix, sigmas=self.sigmas)
def forward(self, hs, ht):
loss_value = maximum_mean_discrepancy(hs, ht, kernel=self.gaussian_kernel)
return torch.clamp(loss_value, min=1e-4)
class CoralD(nn.Module):
@staticmethod
def add_config(cfgparser):
return
def __init__(self, encoder, configs):
super(CoralD, self).__init__()
def forward(self, hs, ht):
_D_s = torch.sum(hs, 0, keepdim=True)
_D_t = torch.sum(ht, 0, keepdim=True)
C_s = (torch.matmul(hs.t(), hs) - torch.matmul(_D_s.t(), _D_s) / hs.shape[0]) / (hs.shape[0] - 1)
C_t = (torch.matmul(ht.t(), ht) - torch.matmul(_D_t.t(), _D_t) / ht.shape[0]) / (ht.shape[0] - 1)
loss_value = torch.sum((C_s - C_t) * (C_s - C_t)) # can use `norm'
return loss_value
class WassersteinD(nn.Module):
@staticmethod
def add_config(cfgparser):
return
def __init__(self, encoder, configs):
super(WassersteinD, self).__init__()
if configs.cond == "concat": # concatenation or outer product
self.n_in = encoder.n_out + 2
elif configs.cond == "outer":
self.n_in = encoder.n_out * 2
else:
self.n_in = encoder.n_out
self.n_out = 100
# self.cuda = 1 if configs.cuda else 0
# self.lambda_gp = configs.lambda_gp
self.seq = nn.Sequential(
nn.Linear(self.n_in, self.n_out),
nn.ReLU(),
# nn.Linear(self.n_out, self.n_out),
# nn.ReLU(),
nn.Linear(self.n_out, 1)
)
for module in self.seq:
if isinstance(module, nn.Linear):
nn.init.xavier_normal_(module.weight)
nn.init.constant_(module.bias, 0.1)
def forward(self, hs, ht):
# self.clip_weights()
alpha = torch.FloatTensor(hs.shape[0], 1).uniform_(0., 1.)
# if self.cuda:
alpha = alpha.cuda()
alpha = Variable(alpha)
hdiff = hs - ht
interpolates = ht + (alpha * hdiff)
# interpolates.requires_grad = True
h_whole = torch.cat([hs, ht, interpolates])
wd_loss = self.seq(hs).mean() - self.seq(ht).mean()
# gradient penalty
# Ref implementation:
# - https://github.com/caogang/wgan-gp/blob/master/gan_mnist.py
disc_whole = self.seq(h_whole)
gradients = autograd.grad(outputs=disc_whole, inputs=h_whole,
grad_outputs=torch.ones(disc_whole.size()).cuda(), \
create_graph=True, retain_graph=True, only_inputs=True)[0]
gradient_panelty = ((gradients.norm(2, dim=1) - 1) ** 2).mean()
return (wd_loss, gradient_panelty)
# not as good as gradient penalty
def clip_weights(self, val_range=0.01):
for p in self.parameters():
p.data.clamp_(min=-val_range, max=val_range)
| 32.721429
| 105
| 0.589828
|
5d8d54e4bc7f5a49bcfa3b7a94fc560274f23bb2
| 801
|
py
|
Python
|
pylintplugins.py
|
magiskboy/instagram-clone
|
ed389438ad3c8a891f745311ba2f786aa1ccc2cc
|
[
"MIT"
] | null | null | null |
pylintplugins.py
|
magiskboy/instagram-clone
|
ed389438ad3c8a891f745311ba2f786aa1ccc2cc
|
[
"MIT"
] | null | null | null |
pylintplugins.py
|
magiskboy/instagram-clone
|
ed389438ad3c8a891f745311ba2f786aa1ccc2cc
|
[
"MIT"
] | null | null | null |
import sys
from astroid import MANAGER, scoped_nodes
from astroid.builder import AstroidBuilder
from sqlalchemy.orm import Session
def register(_linter):
pass
def transform(cls):
if cls.name == 'scoped_session':
builder = AstroidBuilder(MANAGER)
module_node = builder.module_build(sys.modules[Session.__module__])
session_cls_node = [
c for c in module_node.get_children()
if getattr(c, "type", None) == "class" and c.name == Session.__name__
][0]
for prop in Session.public_methods:
cls.locals[prop] = [
c for c in session_cls_node.get_children()
if getattr(c, "type", None) == "method" and c.name == prop
]
MANAGER.register_transform(scoped_nodes.Class, transform)
| 29.666667
| 81
| 0.644195
|
e11ce13d73f255e52909fdfa48a85613fcb79c7d
| 8,036
|
py
|
Python
|
multi_sokoban/searchclient.py
|
FjodBorg/Multi_Agent_AI
|
471e1d62eab93d3d234feeabb53e1b90f13f8c26
|
[
"OML"
] | 1
|
2020-05-27T17:59:14.000Z
|
2020-05-27T17:59:14.000Z
|
multi_sokoban/searchclient.py
|
FjodBorg/Multi_Agent_AI
|
471e1d62eab93d3d234feeabb53e1b90f13f8c26
|
[
"OML"
] | null | null | null |
multi_sokoban/searchclient.py
|
FjodBorg/Multi_Agent_AI
|
471e1d62eab93d3d234feeabb53e1b90f13f8c26
|
[
"OML"
] | null | null | null |
"""Client that receives messages from the server."""
import argparse
import re
import string
import sys
from typing import List
import numpy as np
from _io import TextIOWrapper
from multi_sokoban.actions import StateInit
from multi_sokoban.strategy import BestFirstSearch, aStarSearch, greedySearch
from multi_sokoban.manager import Manager
from multi_sokoban.utils import println
from heuristics import dGraph, EasyRule
class ParseError(Exception):
"""Define parsing error exception."""
pass
class SearchClient:
"""Contain the AI, strategy and parsing."""
def __init__(self, server_messages: TextIOWrapper, strategy: str):
"""Init object."""
self.colors_re = re.compile(r"^([a-z]+):\s*([0-9])\s*")
self.invalid_re = re.compile(r"[^A-Za-z0-9+]")
self.colors = {}
self.initial_state = self.parse_map(server_messages)
self._strategy = None
self.heuristic = dGraph(self.initial_state)
self.add_strategy(strategy)
sys.setrecursionlimit(1000000000)
@property
def strategy(self) -> BestFirstSearch:
"""Get strategy, the setter handles different types of inputs."""
return self._strategy
@strategy.setter
def strategy(self, strategy: str):
if isinstance(strategy, BestFirstSearch):
self._strategy = strategy
else:
if strategy == "astar":
self._strategy = aStarSearch
elif strategy == "wastar":
raise NotImplementedError
elif strategy == "greedy":
self._strategy = greedySearch
def add_strategy(self, strategy: str):
"""Initialize strategy, just for the __init__ method."""
self.strategy = strategy
def parse_map(self, server_messages: TextIOWrapper) -> StateInit:
"""Parse the initial server message into a map."""
# a level has a header with color specifications followed by the map
# the map starts after the line "#initial"
line = server_messages.readline().rstrip()
initial = False # mark start of level map
goal = False # mark start of level map
map = []
goal_state = []
col_count = 0
while line:
if goal:
if line.find("#end") != -1:
len_line = max(len(l) for l in map)
for i in range(len(map)):
map[i] += "+" * (len_line - len(map[i]))
goal_state[i] += "+" * (len_line - len(goal_state[i]))
println("\n".join(["".join(line) for line in map]))
return self.build_map(map, goal_state)
goal_state.append(list(self._formatl(line)))
elif initial:
if line.find("#goal") != -1:
goal = True
else:
map.append(list(self._formatl(line)))
else:
if line.find("#initial") != -1:
initial = True
else:
color_matched = self.colors_re.search(line)
if color_matched:
col_count += 1
color = color_matched[1]
self.colors[color_matched[2]] = color
for obj in line[len(color) + 5 :].split(", "):
self.colors[obj] = color
line = server_messages.readline().replace("\r", "")[:-1] # chop last
def _formatl(self, line: str):
prev = len(line)
new_line = re.sub(r'^.*?\+', '+', line)
while len(new_line) < prev:
new_line = "+" + new_line
return new_line
def build_map(self, map: List, goal_state: List) -> StateInit:
"""Build the StateInit from the parsed map.
addMap just parses rigid positions (not agent and boxes), so
get the positions of the agents and boxes and remove them from map
"""
state = StateInit()
all_objects = []
agent_n_boxes = string.digits + string.ascii_uppercase
possible_colors = set(self.colors.values())
println(possible_colors)
all_objects = self._locate_objects(np.array(map), agent_n_boxes)
# it is required to add the map first and then the rest level objects
state.addMap(map)
for obj, pos, color in all_objects:
row, col = pos
if obj in string.digits:
state.addAgent(obj, (row, col), color)
elif obj in string.ascii_uppercase:
if color in possible_colors:
state.addBox(obj, (row, col), color)
else:
state.map[row, col] = "+"
goals = string.ascii_uppercase
all_objects = self._locate_objects(np.array(goal_state), goals)
for obj, pos, color in all_objects:
row, col = pos
state.addGoal(obj, (row, col), color)
println(state)
return state
def _locate_objects(self, map: np.array, possible_objects: str) -> List:
all_objects = []
# print(map, file=sys.stderr, flush=True)
for obj in possible_objects:
agent_pos = np.where(map == obj)
if len(agent_pos) > 0:
for x, y in zip(agent_pos[0], agent_pos[1]):
color = self.colors[obj] if obj in self.colors else None
all_objects.append([obj, (x, y), color])
map[agent_pos] = " "
return all_objects
def search(self) -> List:
"""Apply search algorithm."""
println(f"Starting search with strategy {self.strategy}.")
boss = Manager(self.initial_state, self.strategy, self.heuristic)
paths = boss.run()
nodes_explored = boss.nodes_explored
return paths, nodes_explored
def parse_arguments() -> argparse.ArgumentParser:
"""Parse CLI arguments, such as strategy and nbn limit."""
parser = argparse.ArgumentParser(
description="Simple client based on state-space graph search."
)
parser.add_argument(
"--max-memory",
metavar="<MB>",
type=float,
default=2048.0,
help="The maximum memory usage allowed in MB (soft limit).",
)
strategy_group = parser.add_mutually_exclusive_group()
strategy_group.add_argument(
"-astar",
action="store_const",
dest="strategy",
const="astar",
help="Use the A* strategy.",
)
strategy_group.add_argument(
"-wastar",
action="store_const",
dest="strategy",
const="wastar",
help="Use the WA* strategy.",
)
strategy_group.add_argument(
"-greedy",
action="store_const",
dest="strategy",
const="greedy",
help="Use the Greedy strategy.",
)
args = parser.parse_args()
return args
def run_loop(strategy: str, memory: float):
"""Iterate over main loop Server->Client->Server."""
global MAX_USAGE
MAX_USAGE = memory
server_messages = sys.stdin
client = SearchClient(server_messages, strategy)
solution, nodes_explored = client.search()
if solution is None:
println("Unable to solve level.")
sys.exit(1)
else:
println("\nSummary for {}.".format(strategy))
println(f"Total nodes explored: {nodes_explored}")
println("Found solution of length {}.".format(len(solution)))
println(f"Solution -> {solution}")
for state in solution:
print(state, flush=True)
response = server_messages.readline().rstrip()
if "false" in response:
println(
f"Server responsed with '{response}' to the action"
f" '{state}' applied in:\n{solution}\n"
)
break
if __name__ == "__main__":
args = parse_arguments()
print("Karen\n", flush=True)
run_loop(args.strategy, args.max_memory)
| 35.715556
| 81
| 0.573668
|
5778b88168b08c981026027cdd50a5efdfa982fe
| 4,290
|
py
|
Python
|
sleuth/tests/test_auditor.py
|
trussworks/aws-iam-sleuth
|
8a3e570ca72331b032124edb22bcf1e1688251ee
|
[
"Apache-2.0"
] | 6
|
2020-05-25T18:58:39.000Z
|
2021-11-22T21:36:56.000Z
|
sleuth/tests/test_auditor.py
|
trussworks/terraform-aws-iam-sleuth
|
5c35c238e256c8cbf9f43d86a37114e4e1c3e7b3
|
[
"Apache-2.0"
] | 58
|
2020-05-14T19:30:10.000Z
|
2021-09-15T21:15:14.000Z
|
sleuth/tests/test_auditor.py
|
trussworks/aws-iam-sleuth
|
8a3e570ca72331b032124edb22bcf1e1688251ee
|
[
"Apache-2.0"
] | 3
|
2020-05-19T20:30:32.000Z
|
2021-08-18T06:15:45.000Z
|
from freezegun import freeze_time
import datetime
import pytest
from sleuth.auditor import Key
@freeze_time("2019-01-16")
class TestKey():
def test_normal(self):
"""Normal happy path, key is good"""
created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)
last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)
k = Key('username', 'keyid', 'Active', created, last_used)
k.audit(60, 80, 20, 19)
assert k.creation_age == 15
assert k.audit_state == 'good'
def test_rotate_expiration(self):
"""Key is past rotate age, key is marked as old"""
created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)
last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)
key = Key('username', 'keyid', 'Active', created, last_used)
key.audit(10, 80, 20, 19)
assert key.audit_state == 'old'
def test_rotate_access(self):
"""Key is past last accessed age, key is marked as old"""
created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)
last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)
key = Key('username', 'keyid', 'Active', created, last_used)
key.audit(60, 80, 20, 10)
assert key.audit_state == 'stagnant'
def test_old_expiration(self):
"""Key is past max expiration threshold, key is marked as expired"""
created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)
last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)
key = Key('username', 'keyid', 'Active', created, last_used)
key.audit(10, 11, 10, 8)
assert key.audit_state == 'expire'
def test_old_access(self):
"""Key is past max access age threshold, key is marked as expired"""
created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)
last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)
key = Key('username', 'keyid', 'Active', created, last_used)
key.audit(60, 80, 10, 9)
assert key.audit_state == 'stagnant_expire'
def test_no_disable(self, monkeypatch):
"""Key is disabled AWS status of Inactive, but disabling is turned off so key remains audit state expire"""
monkeypatch.setenv('ENABLE_AUTO_EXPIRE', 'false')
created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)
last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)
key = Key('user2', 'ldasfkk', 'Inactive', created, last_used)
key.audit(10, 11, 10, 8)
assert key.audit_state == 'expire'
def test_last_used(self, monkeypatch):
"""Key has not been used in X days, key marked is disabled"""
monkeypatch.setenv('ENABLE_AUTO_EXPIRE', 'true')
monkeypatch.setenv('INACTIVITY_AGE', '10')
created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)
last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)
key = Key('user3', 'kljin', 'Active', created, last_used)
key.audit(10, 11, 2, 1)
assert key.audit_state == 'expire'
key.audit(60, 80, 2, 1)
assert key.audit_state == 'stagnant_expire'
def test_disabled(self, monkeypatch):
"""Key is disabled AWS status of Inactive, key marked is disabled"""
monkeypatch.setenv('ENABLE_AUTO_EXPIRE', 'true')
created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)
last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)
key = Key('user2', 'ldasfkk', 'Inactive', created, last_used)
key.audit(10, 11, 10, 8)
assert key.audit_state == 'disabled'
key.audit(60, 80, 30, 20)
assert key.audit_state == 'disabled'
def test_invalid(self):
"""Key is disabled AWS status of Inactive, key marked is disabled"""
created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)
last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)
key = Key('user2', 'ldasfkk', 'Inactive', created, last_used)
with pytest.raises(AssertionError):
key.audit(5, 1, 1, 1)
| 48.202247
| 115
| 0.646387
|
3cd6cda8c1180dcb98231e7d91e1ae2f0e7bbc28
| 171
|
py
|
Python
|
exercises/concept/inventory-management/dicts.py
|
gsilvapt/python
|
d675468b2437d4c09c358d023ef998a05a781f58
|
[
"MIT"
] | 200
|
2019-12-12T13:50:59.000Z
|
2022-02-20T22:38:42.000Z
|
exercises/concept/inventory-management/dicts.py
|
gsilvapt/python
|
d675468b2437d4c09c358d023ef998a05a781f58
|
[
"MIT"
] | 1,938
|
2019-12-12T08:07:10.000Z
|
2021-01-29T12:56:13.000Z
|
exercises/concept/inventory-management/dicts.py
|
gsilvapt/python
|
d675468b2437d4c09c358d023ef998a05a781f58
|
[
"MIT"
] | 239
|
2019-12-12T14:09:08.000Z
|
2022-03-18T00:04:07.000Z
|
def create_inventory(items):
pass
def add_items(inventory, items):
pass
def delete_items(inventory, items):
pass
def list_inventory(inventory):
pass
| 11.4
| 35
| 0.707602
|
ab2c186b0db95e3cb6b9f061be4420537fc69301
| 82,747
|
py
|
Python
|
spyder/app/mainwindow.py
|
dan123456-eng/spyder
|
e57751e01d09a35b8f0583f9efd8dce318b17b4e
|
[
"MIT"
] | 1
|
2022-02-23T16:50:02.000Z
|
2022-02-23T16:50:02.000Z
|
spyder/app/mainwindow.py
|
dan123456-eng/spyder
|
e57751e01d09a35b8f0583f9efd8dce318b17b4e
|
[
"MIT"
] | null | null | null |
spyder/app/mainwindow.py
|
dan123456-eng/spyder
|
e57751e01d09a35b8f0583f9efd8dce318b17b4e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""
Spyder, the Scientific Python Development Environment
=====================================================
Developed and maintained by the Spyder Project
Contributors
Copyright © Spyder Project Contributors
Licensed under the terms of the MIT License
(see spyder/__init__.py for details)
"""
# =============================================================================
# Stdlib imports
# =============================================================================
from collections import OrderedDict
from enum import Enum
import errno
import gc
import logging
import os
import os.path as osp
import shutil
import signal
import socket
import sys
import threading
import traceback
#==============================================================================
# Check requirements before proceeding
#==============================================================================
from spyder import requirements
requirements.check_path()
requirements.check_qt()
#==============================================================================
# Third-party imports
#==============================================================================
from qtpy.compat import from_qvariant
from qtpy.QtCore import (QCoreApplication, Qt, QTimer, Signal, Slot,
qInstallMessageHandler)
from qtpy.QtGui import QColor, QKeySequence
from qtpy.QtWidgets import (QApplication, QMainWindow, QMenu, QMessageBox,
QShortcut, QStyleFactory)
# Avoid a "Cannot mix incompatible Qt library" error on Windows platforms
from qtpy import QtSvg # analysis:ignore
# Avoid a bug in Qt: https://bugreports.qt.io/browse/QTBUG-46720
from qtpy import QtWebEngineWidgets # analysis:ignore
from qtawesome.iconic_font import FontError
#==============================================================================
# Local imports
# NOTE: Move (if possible) import's of widgets and plugins exactly where they
# are needed in MainWindow to speed up perceived startup time (i.e. the time
# from clicking the Spyder icon to showing the splash screen).
#==============================================================================
from spyder import __version__
from spyder import dependencies
from spyder.app.find_plugins import (
find_external_plugins, find_internal_plugins)
from spyder.app.utils import (
create_application, create_splash_screen, create_window, ORIGINAL_SYS_EXIT,
delete_debug_log_files, qt_message_handler, set_links_color, setup_logging,
set_opengl_implementation)
from spyder.api.plugin_registration.registry import PLUGIN_REGISTRY
from spyder.config.base import (_, DEV, get_conf_path, get_debug_level,
get_home_dir, get_module_source_path,
is_pynsist, running_in_mac_app,
running_under_pytest, STDERR)
from spyder.config.gui import is_dark_font_color
from spyder.config.main import OPEN_FILES_PORT
from spyder.config.manager import CONF
from spyder.config.utils import IMPORT_EXT, is_gtk_desktop
from spyder.otherplugins import get_spyderplugins_mods
from spyder.py3compat import configparser as cp, PY3, to_text_string
from spyder.utils import encoding, programs
from spyder.utils.icon_manager import ima
from spyder.utils.misc import (select_port, getcwd_or_home,
get_python_executable)
from spyder.utils.palette import QStylePalette
from spyder.utils.qthelpers import (create_action, add_actions, file_uri,
qapplication, start_file)
from spyder.utils.stylesheet import APP_STYLESHEET
# Spyder API Imports
from spyder.api.exceptions import SpyderAPIError
from spyder.api.plugins import (
Plugins, SpyderPlugin, SpyderPluginV2, SpyderDockablePlugin,
SpyderPluginWidget)
#==============================================================================
# Windows only local imports
#==============================================================================
set_attached_console_visible = None
is_attached_console_visible = None
set_windows_appusermodelid = None
if os.name == 'nt':
from spyder.utils.windows import (set_attached_console_visible,
set_windows_appusermodelid)
#==============================================================================
# Constants
#==============================================================================
# Module logger
logger = logging.getLogger(__name__)
# Get the cwd before initializing WorkingDirectory, which sets it to the one
# used in the last session
CWD = getcwd_or_home()
#==============================================================================
# Install Qt messaage handler
#==============================================================================
qInstallMessageHandler(qt_message_handler)
#==============================================================================
# Main Window
#==============================================================================
class MainWindow(QMainWindow):
"""Spyder main window"""
DOCKOPTIONS = (
QMainWindow.AllowTabbedDocks | QMainWindow.AllowNestedDocks |
QMainWindow.AnimatedDocks
)
SPYDER_PATH = get_conf_path('path')
SPYDER_NOT_ACTIVE_PATH = get_conf_path('not_active_path')
DEFAULT_LAYOUTS = 4
# Signals
restore_scrollbar_position = Signal()
sig_setup_finished = Signal()
all_actions_defined = Signal()
# type: (OrderedDict, OrderedDict)
sig_pythonpath_changed = Signal(object, object)
sig_main_interpreter_changed = Signal()
sig_open_external_file = Signal(str)
sig_resized = Signal("QResizeEvent")
sig_moved = Signal("QMoveEvent")
sig_layout_setup_ready = Signal(object) # Related to default layouts
# ---- Plugin handling methods
# ------------------------------------------------------------------------
def get_plugin(self, plugin_name, error=True):
"""
Return a plugin instance by providing the plugin class.
"""
if plugin_name in PLUGIN_REGISTRY:
return PLUGIN_REGISTRY.get_plugin(plugin_name)
if error:
raise SpyderAPIError(f'Plugin "{plugin_name}" not found!')
return None
def get_dockable_plugins(self):
"""Get a list of all dockable plugins."""
dockable_plugins = []
for plugin_name in PLUGIN_REGISTRY:
plugin = PLUGIN_REGISTRY.get_plugin(plugin_name)
if isinstance(plugin, (SpyderDockablePlugin, SpyderPluginWidget)):
dockable_plugins.append((plugin_name, plugin))
return dockable_plugins
def is_plugin_enabled(self, plugin_name):
"""Determine if a given plugin is going to be loaded."""
return PLUGIN_REGISTRY.is_plugin_enabled(plugin_name)
def is_plugin_available(self, plugin_name):
"""Determine if a given plugin is available."""
return PLUGIN_REGISTRY.is_plugin_available(plugin_name)
def show_status_message(self, message, timeout):
"""
Show a status message in Spyder Main Window.
"""
status_bar = self.statusBar()
if status_bar.isVisible():
status_bar.showMessage(message, timeout)
def show_plugin_compatibility_message(self, message):
"""
Show a compatibility message.
"""
messageBox = QMessageBox(self)
messageBox.setWindowModality(Qt.NonModal)
messageBox.setAttribute(Qt.WA_DeleteOnClose)
messageBox.setWindowTitle(_('Compatibility Check'))
messageBox.setText(message)
messageBox.setStandardButtons(QMessageBox.Ok)
messageBox.show()
def register_plugin(self, plugin_name, external=False, omit_conf=False):
"""
Register a plugin in Spyder Main Window.
"""
plugin = PLUGIN_REGISTRY.get_plugin(plugin_name)
self.set_splash(_("Loading {}...").format(plugin.get_name()))
logger.info("Loading {}...".format(plugin.NAME))
# Check plugin compatibility
is_compatible, message = plugin.check_compatibility()
plugin.is_compatible = is_compatible
plugin.get_description()
if not is_compatible:
self.show_compatibility_message(message)
return
# Connect Plugin Signals to main window methods
plugin.sig_exception_occurred.connect(self.handle_exception)
plugin.sig_free_memory_requested.connect(self.free_memory)
plugin.sig_quit_requested.connect(self.close)
plugin.sig_redirect_stdio_requested.connect(
self.redirect_internalshell_stdio)
plugin.sig_status_message_requested.connect(self.show_status_message)
if isinstance(plugin, SpyderDockablePlugin):
plugin.sig_focus_changed.connect(self.plugin_focus_changed)
plugin.sig_switch_to_plugin_requested.connect(
self.switch_to_plugin)
plugin.sig_update_ancestor_requested.connect(
lambda: plugin.set_ancestor(self))
# Connect Main window Signals to plugin signals
self.sig_moved.connect(plugin.sig_mainwindow_moved)
self.sig_resized.connect(plugin.sig_mainwindow_resized)
# Register plugin
plugin._register(omit_conf=omit_conf)
if isinstance(plugin, SpyderDockablePlugin):
# Add dockwidget
self.add_dockwidget(plugin)
# Update margins
margin = 0
if CONF.get('main', 'use_custom_margin'):
margin = CONF.get('main', 'custom_margin')
plugin.update_margins(margin)
if plugin_name == Plugins.Shortcuts:
for action, context, action_name in self.shortcut_queue:
self.register_shortcut(action, context, action_name)
self.shortcut_queue = []
logger.info("Registering shortcuts for {}...".format(plugin.NAME))
for action_name, action in plugin.get_actions().items():
context = (getattr(action, 'shortcut_context', plugin.NAME)
or plugin.NAME)
if getattr(action, 'register_shortcut', True):
if isinstance(action_name, Enum):
action_name = action_name.value
if Plugins.Shortcuts in PLUGIN_REGISTRY:
self.register_shortcut(action, context, action_name)
else:
self.shortcut_queue.append((action, context, action_name))
if isinstance(plugin, SpyderDockablePlugin):
try:
context = '_'
name = 'switch to {}'.format(plugin.CONF_SECTION)
shortcut = CONF.get_shortcut(context, name,
plugin_name=plugin.CONF_SECTION)
except (cp.NoSectionError, cp.NoOptionError):
shortcut = None
sc = QShortcut(QKeySequence(), self,
lambda: self.switch_to_plugin(plugin))
sc.setContext(Qt.ApplicationShortcut)
plugin._shortcut = sc
if Plugins.Shortcuts in PLUGIN_REGISTRY:
self.register_shortcut(sc, context, name)
self.register_shortcut(
plugin.toggle_view_action, context, name)
else:
self.shortcut_queue.append((sc, context, name))
self.shortcut_queue.append(
(plugin.toggle_view_action, context, name))
def unregister_plugin(self, plugin):
"""
Unregister a plugin from the Spyder Main Window.
"""
logger.info("Unloading {}...".format(plugin.NAME))
# Disconnect all slots
signals = [
plugin.sig_quit_requested,
plugin.sig_redirect_stdio_requested,
plugin.sig_status_message_requested,
]
for sig in signals:
try:
sig.disconnect()
except TypeError:
pass
# Unregister shortcuts for actions
logger.info("Unregistering shortcuts for {}...".format(plugin.NAME))
for action_name, action in plugin.get_actions().items():
context = (getattr(action, 'shortcut_context', plugin.NAME)
or plugin.NAME)
self.shortcuts.unregister_shortcut(action, context, action_name)
# Unregister switch to shortcut
shortcut = None
try:
context = '_'
name = 'switch to {}'.format(plugin.CONF_SECTION)
shortcut = CONF.get_shortcut(context, name,
plugin_name=plugin.CONF_SECTION)
except Exception:
pass
if shortcut is not None:
self.shortcuts.unregister_shortcut(
plugin._shortcut,
context,
"Switch to {}".format(plugin.CONF_SECTION),
)
# Remove dockwidget
logger.info("Removing {} dockwidget...".format(plugin.NAME))
self.remove_dockwidget(plugin)
plugin._unregister()
def create_plugin_conf_widget(self, plugin):
"""
Create configuration dialog box page widget.
"""
config_dialog = self.prefs_dialog_instance
if plugin.CONF_WIDGET_CLASS is not None and config_dialog is not None:
conf_widget = plugin.CONF_WIDGET_CLASS(plugin, config_dialog)
conf_widget.initialize()
return conf_widget
@property
def last_plugin(self):
"""
Get last plugin with focus if it is a dockable widget.
If a non-dockable plugin has the focus this will return by default
the Editor plugin.
"""
# Needed to prevent errors with the old API at
# spyder/plugins/base::_switch_to_plugin
return self.layouts.get_last_plugin()
def maximize_dockwidget(self, restore=False):
"""
This is needed to prevent errors with the old API at
spyder/plugins/base::_switch_to_plugin.
See spyder-ide/spyder#15164
Parameters
----------
restore : bool, optional
If the current dockwidget needs to be restored to its unmaximized
state. The default is False.
"""
self.layouts.maximize_dockwidget(restore=restore)
def switch_to_plugin(self, plugin, force_focus=None):
"""
Switch to this plugin.
Notes
-----
This operation unmaximizes the current plugin (if any), raises
this plugin to view (if it's hidden) and gives it focus (if
possible).
"""
last_plugin = self.last_plugin
try:
# New API
if (last_plugin is not None
and last_plugin.get_widget().is_maximized
and last_plugin is not plugin):
self.layouts.maximize_dockwidget()
except AttributeError:
# Old API
if (last_plugin is not None and self.last_plugin._ismaximized
and last_plugin is not plugin):
self.layouts.maximize_dockwidget()
try:
# New API
if not plugin.toggle_view_action.isChecked():
plugin.toggle_view_action.setChecked(True)
plugin.get_widget().is_visible = False
except AttributeError:
# Old API
if not plugin._toggle_view_action.isChecked():
plugin._toggle_view_action.setChecked(True)
plugin._widget._is_visible = False
plugin.change_visibility(True, force_focus=force_focus)
def remove_dockwidget(self, plugin):
"""
Remove a plugin QDockWidget from the main window.
"""
self.removeDockWidget(plugin.dockwidget)
try:
self.widgetlist.remove(plugin)
except ValueError:
pass
def tabify_plugins(self, first, second):
"""Tabify plugin dockwigdets."""
self.tabifyDockWidget(first.dockwidget, second.dockwidget)
def tabify_plugin(self, plugin, default=None):
"""
Tabify the plugin using the list of possible TABIFY options.
Only do this if the dockwidget does not have more dockwidgets
in the same position and if the plugin is using the New API.
"""
def tabify_helper(plugin, next_to_plugins):
for next_to_plugin in next_to_plugins:
try:
self.tabify_plugins(next_to_plugin, plugin)
break
except SpyderAPIError as err:
logger.error(err)
# If TABIFY not defined use the [default]
tabify = getattr(plugin, 'TABIFY', [default])
if not isinstance(tabify, list):
next_to_plugins = [tabify]
else:
next_to_plugins = tabify
# Check if TABIFY is not a list with None as unique value or a default
# list
if tabify in [[None], []]:
return False
# Get the actual plugins from the names
next_to_plugins = [self.get_plugin(p) for p in next_to_plugins]
# First time plugin starts
if plugin.get_conf('first_time', True):
if (isinstance(plugin, SpyderDockablePlugin)
and plugin.NAME != Plugins.Console):
logger.info(
"Tabify {} dockwidget for the first time...".format(
plugin.NAME))
tabify_helper(plugin, next_to_plugins)
# Show external plugins
if plugin.NAME in PLUGIN_REGISTRY.external_plugins:
plugin.get_widget().toggle_view(True)
plugin.set_conf('enable', True)
plugin.set_conf('first_time', False)
else:
# This is needed to ensure plugins are placed correctly when
# switching layouts.
logger.info("Tabify {} dockwidget...".format(plugin.NAME))
# Check if plugin has no other dockwidgets in the same position
if not bool(self.tabifiedDockWidgets(plugin.dockwidget)):
tabify_helper(plugin, next_to_plugins)
return True
def handle_exception(self, error_data):
"""
This method will call the handle exception method of the Console
plugin. It is provided as a signal on the Plugin API for convenience,
so that plugin do not need to explicitly call the Console plugin.
Parameters
----------
error_data: dict
The dictionary containing error data. The expected keys are:
>>> error_data= {
"text": str,
"is_traceback": bool,
"repo": str,
"title": str,
"label": str,
"steps": str,
}
Notes
-----
The `is_traceback` key indicates if `text` contains plain text or a
Python error traceback.
The `title` and `repo` keys indicate how the error data should
customize the report dialog and Github error submission.
The `label` and `steps` keys allow customizing the content of the
error dialog.
"""
console = self.get_plugin(Plugins.Console, error=False)
if console:
console.handle_exception(error_data)
def __init__(self, splash=None, options=None):
QMainWindow.__init__(self)
qapp = QApplication.instance()
if running_under_pytest():
self._proxy_style = None
else:
from spyder.utils.qthelpers import SpyderProxyStyle
# None is needed, see: https://bugreports.qt.io/browse/PYSIDE-922
self._proxy_style = SpyderProxyStyle(None)
# Enabling scaling for high dpi
qapp.setAttribute(Qt.AA_UseHighDpiPixmaps)
# Set Windows app icon to use .ico file
if os.name == "nt":
qapp.setWindowIcon(ima.get_icon("windows_app_icon"))
self.default_style = str(qapp.style().objectName())
self.init_workdir = options.working_directory
self.profile = options.profile
self.multithreaded = options.multithreaded
self.new_instance = options.new_instance
if options.project is not None and not running_in_mac_app():
self.open_project = osp.normpath(osp.join(CWD, options.project))
else:
self.open_project = None
self.window_title = options.window_title
logger.info("Start of MainWindow constructor")
def signal_handler(signum, frame=None):
"""Handler for signals."""
sys.stdout.write('Handling signal: %s\n' % signum)
sys.stdout.flush()
QApplication.quit()
if os.name == "nt":
try:
import win32api
win32api.SetConsoleCtrlHandler(signal_handler, True)
except ImportError:
pass
else:
signal.signal(signal.SIGTERM, signal_handler)
if not DEV:
# Make spyder quit when presing ctrl+C in the console
# In DEV Ctrl+C doesn't quit, because it helps to
# capture the traceback when spyder freezes
signal.signal(signal.SIGINT, signal_handler)
# Use a custom Qt stylesheet
if sys.platform == 'darwin':
spy_path = get_module_source_path('spyder')
img_path = osp.join(spy_path, 'images')
mac_style = open(osp.join(spy_path, 'app', 'mac_stylesheet.qss')).read()
mac_style = mac_style.replace('$IMAGE_PATH', img_path)
self.setStyleSheet(mac_style)
# Shortcut management data
self.shortcut_data = []
self.shortcut_queue = []
# Handle Spyder path
self.path = ()
self.not_active_path = ()
self.project_path = ()
# New API
self._APPLICATION_TOOLBARS = OrderedDict()
self._STATUS_WIDGETS = OrderedDict()
# Mapping of new plugin identifiers vs old attributtes
# names given for plugins or to prevent collisions with other
# attributes, i.e layout (Qt) vs layout (SpyderPluginV2)
self._INTERNAL_PLUGINS_MAPPING = {
'console': Plugins.Console,
'maininterpreter': Plugins.MainInterpreter,
'outlineexplorer': Plugins.OutlineExplorer,
'variableexplorer': Plugins.VariableExplorer,
'ipyconsole': Plugins.IPythonConsole,
'workingdirectory': Plugins.WorkingDirectory,
'projects': Plugins.Projects,
'findinfiles': Plugins.Find,
'layouts': Plugins.Layout,
}
self.thirdparty_plugins = []
# File switcher
self.switcher = None
# Preferences
self.prefs_dialog_size = None
self.prefs_dialog_instance = None
# Actions
self.undo_action = None
self.redo_action = None
self.copy_action = None
self.cut_action = None
self.paste_action = None
self.selectall_action = None
# Menu bars
self.edit_menu = None
self.edit_menu_actions = []
self.search_menu = None
self.search_menu_actions = []
self.source_menu = None
self.source_menu_actions = []
self.run_menu = None
self.run_menu_actions = []
self.debug_menu = None
self.debug_menu_actions = []
# TODO: Move to corresponding Plugins
self.main_toolbar = None
self.main_toolbar_actions = []
self.file_toolbar = None
self.file_toolbar_actions = []
self.run_toolbar = None
self.run_toolbar_actions = []
self.debug_toolbar = None
self.debug_toolbar_actions = []
self.menus = []
if running_under_pytest():
# Show errors in internal console when testing.
CONF.set('main', 'show_internal_errors', False)
self.CURSORBLINK_OSDEFAULT = QApplication.cursorFlashTime()
if set_windows_appusermodelid != None:
res = set_windows_appusermodelid()
logger.info("appusermodelid: %s", res)
# Setting QTimer if running in travis
test_app = os.environ.get('TEST_CI_APP')
if test_app is not None:
app = qapplication()
timer_shutdown_time = 30000
self.timer_shutdown = QTimer(self)
self.timer_shutdown.timeout.connect(app.quit)
self.timer_shutdown.start(timer_shutdown_time)
# Showing splash screen
self.splash = splash
if CONF.get('main', 'current_version', '') != __version__:
CONF.set('main', 'current_version', __version__)
# Execute here the actions to be performed only once after
# each update (there is nothing there for now, but it could
# be useful some day...)
# List of satellite widgets (registered in add_dockwidget):
self.widgetlist = []
# Flags used if closing() is called by the exit() shell command
self.already_closed = False
self.is_starting_up = True
self.is_setting_up = True
self.floating_dockwidgets = []
self.window_size = None
self.window_position = None
# To keep track of the last focused widget
self.last_focused_widget = None
self.previous_focused_widget = None
# Server to open external files on a single instance
# This is needed in order to handle socket creation problems.
# See spyder-ide/spyder#4132.
if os.name == 'nt':
try:
self.open_files_server = socket.socket(socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
except OSError:
self.open_files_server = None
QMessageBox.warning(None, "Spyder",
_("An error occurred while creating a socket needed "
"by Spyder. Please, try to run as an Administrator "
"from cmd.exe the following command and then "
"restart your computer: <br><br><span "
"style=\'color: {color}\'><b>netsh winsock reset "
"</b></span><br>").format(
color=QStylePalette.COLOR_BACKGROUND_4))
else:
self.open_files_server = socket.socket(socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
# Apply main window settings
self.apply_settings()
# To set all dockwidgets tabs to be on top (in case we want to do it
# in the future)
# self.setTabPosition(Qt.AllDockWidgetAreas, QTabWidget.North)
logger.info("End of MainWindow constructor")
# ---- Window setup
def _update_shortcuts_in_panes_menu(self, show=True):
"""
Display the shortcut for the "Switch to plugin..." on the toggle view
action of the plugins displayed in the Help/Panes menu.
Notes
-----
SpyderDockablePlugins provide two actions that function as a single
action. The `Switch to Plugin...` action has an assignable shortcut
via the shortcut preferences. The `Plugin toggle View` in the `View`
application menu, uses a custom `Toggle view action` that displays the
shortcut assigned to the `Switch to Plugin...` action, but is not
triggered by that shortcut.
"""
for plugin_name in PLUGIN_REGISTRY:
plugin = PLUGIN_REGISTRY.get_plugin(plugin_name)
if isinstance(plugin, SpyderDockablePlugin):
try:
# New API
action = plugin.toggle_view_action
except AttributeError:
# Old API
action = plugin._toggle_view_action
if show:
section = plugin.CONF_SECTION
try:
context = '_'
name = 'switch to {}'.format(section)
shortcut = CONF.get_shortcut(
context, name, plugin_name=section)
except (cp.NoSectionError, cp.NoOptionError):
shortcut = QKeySequence()
else:
shortcut = QKeySequence()
action.setShortcut(shortcut)
def setup(self):
"""Setup main window."""
PLUGIN_REGISTRY.sig_plugin_ready.connect(
lambda plugin_name, omit_conf: self.register_plugin(
plugin_name, omit_conf=omit_conf))
PLUGIN_REGISTRY.set_main(self)
# TODO: Remove circular dependency between help and ipython console
# and remove this import. Help plugin should take care of it
from spyder.plugins.help.utils.sphinxify import CSS_PATH, DARK_CSS_PATH
logger.info("*** Start of MainWindow setup ***")
logger.info("Updating PYTHONPATH")
path_dict = self.get_spyder_pythonpath_dict()
self.update_python_path(path_dict)
logger.info("Applying theme configuration...")
ui_theme = CONF.get('appearance', 'ui_theme')
color_scheme = CONF.get('appearance', 'selected')
if ui_theme == 'dark':
if not running_under_pytest():
# Set style proxy to fix combobox popup on mac and qdark
qapp = QApplication.instance()
qapp.setStyle(self._proxy_style)
dark_qss = str(APP_STYLESHEET)
self.setStyleSheet(dark_qss)
self.statusBar().setStyleSheet(dark_qss)
css_path = DARK_CSS_PATH
elif ui_theme == 'light':
if not running_under_pytest():
# Set style proxy to fix combobox popup on mac and qdark
qapp = QApplication.instance()
qapp.setStyle(self._proxy_style)
light_qss = str(APP_STYLESHEET)
self.setStyleSheet(light_qss)
self.statusBar().setStyleSheet(light_qss)
css_path = CSS_PATH
elif ui_theme == 'automatic':
if not is_dark_font_color(color_scheme):
if not running_under_pytest():
# Set style proxy to fix combobox popup on mac and qdark
qapp = QApplication.instance()
qapp.setStyle(self._proxy_style)
dark_qss = str(APP_STYLESHEET)
self.setStyleSheet(dark_qss)
self.statusBar().setStyleSheet(dark_qss)
css_path = DARK_CSS_PATH
else:
light_qss = str(APP_STYLESHEET)
self.setStyleSheet(light_qss)
self.statusBar().setStyleSheet(light_qss)
css_path = CSS_PATH
# Set css_path as a configuration to be used by the plugins
CONF.set('appearance', 'css_path', css_path)
# Status bar
status = self.statusBar()
status.setObjectName("StatusBar")
status.showMessage(_("Welcome to Spyder!"), 5000)
# Switcher instance
logger.info("Loading switcher...")
self.create_switcher()
message = _(
"Spyder Internal Console\n\n"
"This console is used to report application\n"
"internal errors and to inspect Spyder\n"
"internals with the following commands:\n"
" spy.app, spy.window, dir(spy)\n\n"
"Please don't use it to run your code\n\n"
)
CONF.set('internal_console', 'message', message)
CONF.set('internal_console', 'multithreaded', self.multithreaded)
CONF.set('internal_console', 'profile', self.profile)
CONF.set('internal_console', 'commands', [])
CONF.set('internal_console', 'namespace', {})
CONF.set('internal_console', 'show_internal_errors', True)
# Working directory initialization
CONF.set('workingdir', 'init_workdir', self.init_workdir)
# Load and register internal and external plugins
external_plugins = find_external_plugins()
internal_plugins = find_internal_plugins()
all_plugins = external_plugins.copy()
all_plugins.update(internal_plugins.copy())
# Determine 'enable' config for the plugins that have it
enabled_plugins = {}
registry_internal_plugins = {}
registry_external_plugins = {}
for plugin in all_plugins.values():
plugin_name = plugin.NAME
plugin_main_attribute_name = (
self._INTERNAL_PLUGINS_MAPPING[plugin_name]
if plugin_name in self._INTERNAL_PLUGINS_MAPPING
else plugin_name)
if plugin_name in internal_plugins:
registry_internal_plugins[plugin_name] = (
plugin_main_attribute_name, plugin)
else:
registry_external_plugins[plugin_name] = (
plugin_main_attribute_name, plugin)
try:
if CONF.get(plugin_main_attribute_name, "enable"):
enabled_plugins[plugin_name] = plugin
PLUGIN_REGISTRY.set_plugin_enabled(plugin_name)
except (cp.NoOptionError, cp.NoSectionError):
enabled_plugins[plugin_name] = plugin
PLUGIN_REGISTRY.set_plugin_enabled(plugin_name)
PLUGIN_REGISTRY.set_all_internal_plugins(registry_internal_plugins)
PLUGIN_REGISTRY.set_all_external_plugins(registry_external_plugins)
# Instantiate internal Spyder 5 plugins
for plugin_name in internal_plugins:
if plugin_name in enabled_plugins:
PluginClass = internal_plugins[plugin_name]
if issubclass(PluginClass, SpyderPluginV2):
PLUGIN_REGISTRY.register_plugin(self, PluginClass,
external=False)
# Instantiate internal Spyder 4 plugins
for plugin_name in internal_plugins:
if plugin_name in enabled_plugins:
PluginClass = internal_plugins[plugin_name]
if issubclass(PluginClass, SpyderPlugin):
plugin_instance = PLUGIN_REGISTRY.register_plugin(
self, PluginClass, external=False)
self.preferences.register_plugin_preferences(
plugin_instance)
# Instantiate external Spyder 5 plugins
for plugin_name in external_plugins:
if plugin_name in enabled_plugins:
PluginClass = external_plugins[plugin_name]
try:
plugin_instance = PLUGIN_REGISTRY.register_plugin(
self, PluginClass, external=True)
except Exception as error:
print("%s: %s" % (PluginClass, str(error)), file=STDERR)
traceback.print_exc(file=STDERR)
self.set_splash(_("Loading old third-party plugins..."))
for mod in get_spyderplugins_mods():
try:
plugin = PLUGIN_REGISTRY.register_plugin(self, mod,
external=True)
if plugin.check_compatibility()[0]:
if hasattr(plugin, 'CONFIGWIDGET_CLASS'):
self.preferences.register_plugin_preferences(plugin)
if not hasattr(plugin, 'COMPLETION_PROVIDER_NAME'):
self.thirdparty_plugins.append(plugin)
# Add to dependencies dialog
module = mod.__name__
name = module.replace('_', '-')
if plugin.DESCRIPTION:
description = plugin.DESCRIPTION
else:
description = plugin.get_plugin_title()
dependencies.add(module, name, description,
'', None, kind=dependencies.PLUGIN)
except TypeError:
# Fixes spyder-ide/spyder#13977
pass
except Exception as error:
print("%s: %s" % (mod, str(error)), file=STDERR)
traceback.print_exc(file=STDERR)
# Set window title
self.set_window_title()
# Menus
# TODO: Remove when all menus are migrated to use the Main Menu Plugin
logger.info("Creating Menus...")
from spyder.plugins.mainmenu.api import (
ApplicationMenus, ToolsMenuSections, FileMenuSections)
mainmenu = self.mainmenu
self.edit_menu = mainmenu.get_application_menu("edit_menu")
self.search_menu = mainmenu.get_application_menu("search_menu")
self.source_menu = mainmenu.get_application_menu("source_menu")
self.source_menu.aboutToShow.connect(self.update_source_menu)
self.run_menu = mainmenu.get_application_menu("run_menu")
self.debug_menu = mainmenu.get_application_menu("debug_menu")
# Switcher shortcuts
self.file_switcher_action = create_action(
self,
_('File switcher...'),
icon=ima.icon('filelist'),
tip=_('Fast switch between files'),
triggered=self.open_switcher,
context=Qt.ApplicationShortcut,
id_='file_switcher')
self.register_shortcut(self.file_switcher_action, context="_",
name="File switcher")
self.symbol_finder_action = create_action(
self, _('Symbol finder...'),
icon=ima.icon('symbol_find'),
tip=_('Fast symbol search in file'),
triggered=self.open_symbolfinder,
context=Qt.ApplicationShortcut,
id_='symbol_finder')
self.register_shortcut(self.symbol_finder_action, context="_",
name="symbol finder", add_shortcut_to_tip=True)
def create_edit_action(text, tr_text, icon):
textseq = text.split(' ')
method_name = textseq[0].lower()+"".join(textseq[1:])
action = create_action(self, tr_text,
icon=icon,
triggered=self.global_callback,
data=method_name,
context=Qt.WidgetShortcut)
self.register_shortcut(action, "Editor", text)
return action
self.undo_action = create_edit_action('Undo', _('Undo'),
ima.icon('undo'))
self.redo_action = create_edit_action('Redo', _('Redo'),
ima.icon('redo'))
self.copy_action = create_edit_action('Copy', _('Copy'),
ima.icon('editcopy'))
self.cut_action = create_edit_action('Cut', _('Cut'),
ima.icon('editcut'))
self.paste_action = create_edit_action('Paste', _('Paste'),
ima.icon('editpaste'))
self.selectall_action = create_edit_action("Select All",
_("Select All"),
ima.icon('selectall'))
self.edit_menu_actions += [self.undo_action, self.redo_action,
None, self.cut_action, self.copy_action,
self.paste_action, self.selectall_action,
None]
if self.get_plugin(Plugins.Editor, error=False):
self.edit_menu_actions += self.editor.edit_menu_actions
switcher_actions = [
self.file_switcher_action,
self.symbol_finder_action
]
for switcher_action in switcher_actions:
mainmenu.add_item_to_application_menu(
switcher_action,
menu_id=ApplicationMenus.File,
section=FileMenuSections.Switcher,
before_section=FileMenuSections.Restart)
self.set_splash("")
# Toolbars
# TODO: Remove after finishing the migration
logger.info("Creating toolbars...")
toolbar = self.toolbar
self.file_toolbar = toolbar.get_application_toolbar("file_toolbar")
self.run_toolbar = toolbar.get_application_toolbar("run_toolbar")
self.debug_toolbar = toolbar.get_application_toolbar("debug_toolbar")
self.main_toolbar = toolbar.get_application_toolbar("main_toolbar")
# Tools + External Tools (some of this depends on the Application
# plugin)
logger.info("Creating Tools menu...")
spyder_path_action = create_action(
self,
_("PYTHONPATH manager"),
None, icon=ima.icon('pythonpath'),
triggered=self.show_path_manager,
tip=_("PYTHONPATH manager"),
id_='spyder_path_action')
from spyder.plugins.application.container import (
ApplicationActions, WinUserEnvDialog)
winenv_action = None
if WinUserEnvDialog:
winenv_action = ApplicationActions.SpyderWindowsEnvVariables
mainmenu.add_item_to_application_menu(
spyder_path_action,
menu_id=ApplicationMenus.Tools,
section=ToolsMenuSections.Tools,
before=winenv_action,
before_section=ToolsMenuSections.External
)
# Main toolbar
from spyder.plugins.toolbar.api import (
ApplicationToolbars, MainToolbarSections)
self.toolbar.add_item_to_application_toolbar(
spyder_path_action,
toolbar_id=ApplicationToolbars.Main,
section=MainToolbarSections.ApplicationSection
)
self.set_splash(_("Setting up main window..."))
# TODO: Migrate to use the MainMenu Plugin instead of list of actions
# Filling out menu/toolbar entries:
add_actions(self.edit_menu, self.edit_menu_actions)
add_actions(self.search_menu, self.search_menu_actions)
add_actions(self.source_menu, self.source_menu_actions)
add_actions(self.run_menu, self.run_menu_actions)
add_actions(self.debug_menu, self.debug_menu_actions)
# Emitting the signal notifying plugins that main window menu and
# toolbar actions are all defined:
self.all_actions_defined.emit()
def __getattr__(self, attr):
"""
Redefinition of __getattr__ to enable access to plugins.
Loaded plugins can be accessed as attributes of the mainwindow
as before, e.g self.console or self.main.console, preserving the
same accessor as before.
"""
# Mapping of new plugin identifiers vs old attributtes
# names given for plugins
try:
if attr in self._INTERNAL_PLUGINS_MAPPING.keys():
return self.get_plugin(
self._INTERNAL_PLUGINS_MAPPING[attr], error=False)
return self.get_plugin(attr)
except SpyderAPIError:
pass
return super().__getattr__(attr)
def pre_visible_setup(self):
"""
Actions to be performed before the main window is visible.
The actions here are related with setting up the main window.
"""
logger.info("Setting up window...")
for plugin_name in PLUGIN_REGISTRY:
plugin_instance = PLUGIN_REGISTRY.get_plugin(plugin_name)
try:
plugin_instance.before_mainwindow_visible()
except AttributeError:
pass
# Tabify external plugins which were installed after Spyder was
# installed.
# Note: This is only necessary the first time a plugin is loaded.
# Afterwwrds, the plugin placement is recorded on the window hexstate,
# which is loaded by the layouts plugin during the next session.
for plugin_name in PLUGIN_REGISTRY.external_plugins:
plugin_instance = PLUGIN_REGISTRY.get_plugin(plugin_name)
if plugin_instance.get_conf('first_time', True):
self.tabify_plugin(plugin_instance, Plugins.Console)
if self.splash is not None:
self.splash.hide()
# Menu about to show
for child in self.menuBar().children():
if isinstance(child, QMenu):
try:
child.aboutToShow.connect(self.update_edit_menu)
child.aboutToShow.connect(self.update_search_menu)
except TypeError:
pass
# Register custom layouts
for plugin_name in PLUGIN_REGISTRY.external_plugins:
plugin_instance = PLUGIN_REGISTRY.get_plugin(plugin_name)
if hasattr(plugin_instance, 'CUSTOM_LAYOUTS'):
if isinstance(plugin_instance.CUSTOM_LAYOUTS, list):
for custom_layout in plugin_instance.CUSTOM_LAYOUTS:
self.layouts.register_layout(
self, custom_layout)
else:
logger.info(
'Unable to load custom layouts for {}. '
'Expecting a list of layout classes but got {}'
.format(plugin_name, plugin_instance.CUSTOM_LAYOUTS)
)
self.layouts.update_layout_menu_actions()
logger.info("*** End of MainWindow setup ***")
self.is_starting_up = False
def post_visible_setup(self):
"""
Actions to be performed only after the main window's `show` method
is triggered.
"""
# Required plugins
projects = self.get_plugin(Plugins.Projects, error=False)
editor = self.get_plugin(Plugins.Editor, error=False)
# Process pending events and hide splash before loading the
# previous session.
QApplication.processEvents()
if self.splash is not None:
self.splash.hide()
# Call on_mainwindow_visible for all plugins.
for plugin_name in PLUGIN_REGISTRY:
plugin = PLUGIN_REGISTRY.get_plugin(plugin_name)
try:
plugin.on_mainwindow_visible()
QApplication.processEvents()
except AttributeError:
pass
self.restore_scrollbar_position.emit()
# Workaround for spyder-ide/spyder#880.
# QDockWidget objects are not painted if restored as floating
# windows, so we must dock them before showing the mainwindow,
# then set them again as floating windows here.
for widget in self.floating_dockwidgets:
widget.setFloating(True)
# Server to maintain just one Spyder instance and open files in it if
# the user tries to start other instances with
# $ spyder foo.py
if (CONF.get('main', 'single_instance') and not self.new_instance
and self.open_files_server):
t = threading.Thread(target=self.start_open_files_server)
t.setDaemon(True)
t.start()
# Connect the window to the signal emitted by the previous server
# when it gets a client connected to it
self.sig_open_external_file.connect(self.open_external_file)
# Update plugins toggle actions to show the "Switch to" plugin shortcut
self._update_shortcuts_in_panes_menu()
# Load project, if any.
# TODO: Remove this reference to projects once we can send the command
# line options to the plugins.
if self.open_project:
if not running_in_mac_app():
if projects:
projects.open_project(
self.open_project, workdir=self.init_workdir
)
else:
# Load last project if a project was active when Spyder
# was closed
reopen_last_session = False
if projects:
projects.reopen_last_project()
if projects.get_active_project() is None:
reopen_last_session = True
else:
reopen_last_session = True
# If no project is active or Projects is disabled, load last
# session
if editor and reopen_last_session:
editor.setup_open_files(close_previous_files=False)
# Raise the menuBar to the top of the main window widget's stack
# Fixes spyder-ide/spyder#3887.
self.menuBar().raise_()
# To avoid regressions. We shouldn't have loaded the modules
# below at this point.
if DEV is not None:
assert 'pandas' not in sys.modules
assert 'matplotlib' not in sys.modules
# Restore undocked plugins
self.restore_undocked_plugins()
# Notify that the setup of the mainwindow was finished
self.is_setting_up = False
self.sig_setup_finished.emit()
def restore_undocked_plugins(self):
"""Restore plugins that were undocked in the previous session."""
logger.info("Restoring undocked plugins from the previous session")
for plugin_name in PLUGIN_REGISTRY:
plugin = PLUGIN_REGISTRY.get_plugin(plugin_name)
if isinstance(plugin, SpyderDockablePlugin):
if plugin.get_conf('undocked_on_window_close', default=False):
plugin.get_widget().create_window()
elif isinstance(plugin, SpyderPluginWidget):
if plugin.get_option('undocked_on_window_close',
default=False):
plugin._create_window()
def set_window_title(self):
"""Set window title."""
if DEV is not None:
title = u"Spyder %s (Python %s.%s)" % (__version__,
sys.version_info[0],
sys.version_info[1])
elif running_in_mac_app() or is_pynsist():
title = "Spyder"
else:
title = u"Spyder (Python %s.%s)" % (sys.version_info[0],
sys.version_info[1])
if get_debug_level():
title += u" [DEBUG MODE %d]" % get_debug_level()
if self.window_title is not None:
title += u' -- ' + to_text_string(self.window_title)
# TODO: Remove self.projects reference once there's an API for setting
# window title.
projects = self.get_plugin(Plugins.Projects, error=False)
if projects:
path = projects.get_active_project_path()
if path:
path = path.replace(get_home_dir(), u'~')
title = u'{0} - {1}'.format(path, title)
self.base_title = title
self.setWindowTitle(self.base_title)
# TODO: To be removed after all actions are moved to their corresponding
# plugins
def register_shortcut(self, qaction_or_qshortcut, context, name,
add_shortcut_to_tip=True, plugin_name=None):
shortcuts = self.get_plugin(Plugins.Shortcuts, error=False)
if shortcuts:
shortcuts.register_shortcut(
qaction_or_qshortcut,
context,
name,
add_shortcut_to_tip=add_shortcut_to_tip,
plugin_name=plugin_name,
)
# --- Other
def update_source_menu(self):
"""Update source menu options that vary dynamically."""
# This is necessary to avoid an error at startup.
# Fixes spyder-ide/spyder#14901
try:
editor = self.get_plugin(Plugins.Editor, error=False)
if editor:
editor.refresh_formatter_name()
except AttributeError:
pass
def free_memory(self):
"""Free memory after event."""
gc.collect()
def plugin_focus_changed(self):
"""Focus has changed from one plugin to another"""
self.update_edit_menu()
self.update_search_menu()
def show_shortcuts(self, menu):
"""Show action shortcuts in menu."""
menu_actions = menu.actions()
for action in menu_actions:
if getattr(action, '_shown_shortcut', False):
# This is a SpyderAction
if action._shown_shortcut is not None:
action.setShortcut(action._shown_shortcut)
elif action.menu() is not None:
# This is submenu, so we need to call this again
self.show_shortcuts(action.menu())
else:
# We don't need to do anything for other elements
continue
def hide_shortcuts(self, menu):
"""Hide action shortcuts in menu."""
menu_actions = menu.actions()
for action in menu_actions:
if getattr(action, '_shown_shortcut', False):
# This is a SpyderAction
if action._shown_shortcut is not None:
action.setShortcut(QKeySequence())
elif action.menu() is not None:
# This is submenu, so we need to call this again
self.hide_shortcuts(action.menu())
else:
# We don't need to do anything for other elements
continue
def hide_options_menus(self):
"""Hide options menu when menubar is pressed in macOS."""
for plugin in self.widgetlist + self.thirdparty_plugins:
if plugin.CONF_SECTION == 'editor':
editorstack = self.editor.get_current_editorstack()
editorstack.menu.hide()
else:
try:
# New API
plugin.options_menu.hide()
except AttributeError:
# Old API
plugin._options_menu.hide()
def get_focus_widget_properties(self):
"""Get properties of focus widget
Returns tuple (widget, properties) where properties is a tuple of
booleans: (is_console, not_readonly, readwrite_editor)"""
from spyder.plugins.editor.widgets.base import TextEditBaseWidget
from spyder.plugins.ipythonconsole.widgets import ControlWidget
widget = QApplication.focusWidget()
textedit_properties = None
if isinstance(widget, (TextEditBaseWidget, ControlWidget)):
console = isinstance(widget, ControlWidget)
not_readonly = not widget.isReadOnly()
readwrite_editor = not_readonly and not console
textedit_properties = (console, not_readonly, readwrite_editor)
return widget, textedit_properties
def update_edit_menu(self):
"""Update edit menu"""
widget, textedit_properties = self.get_focus_widget_properties()
if textedit_properties is None: # widget is not an editor/console
return
# !!! Below this line, widget is expected to be a QPlainTextEdit
# instance
console, not_readonly, readwrite_editor = textedit_properties
if hasattr(self, 'editor'):
# Editor has focus and there is no file opened in it
if (not console and not_readonly and self.editor
and not self.editor.is_file_opened()):
return
# Disabling all actions to begin with
for child in self.edit_menu.actions():
child.setEnabled(False)
self.selectall_action.setEnabled(True)
# Undo, redo
self.undo_action.setEnabled( readwrite_editor \
and widget.document().isUndoAvailable() )
self.redo_action.setEnabled( readwrite_editor \
and widget.document().isRedoAvailable() )
# Copy, cut, paste, delete
has_selection = widget.has_selected_text()
self.copy_action.setEnabled(has_selection)
self.cut_action.setEnabled(has_selection and not_readonly)
self.paste_action.setEnabled(not_readonly)
# Comment, uncomment, indent, unindent...
if not console and not_readonly:
# This is the editor and current file is writable
if self.get_plugin(Plugins.Editor, error=False):
for action in self.editor.edit_menu_actions:
action.setEnabled(True)
def update_search_menu(self):
"""Update search menu"""
# Disabling all actions except the last one
# (which is Find in files) to begin with
for child in self.search_menu.actions()[:-1]:
child.setEnabled(False)
widget, textedit_properties = self.get_focus_widget_properties()
if textedit_properties is None: # widget is not an editor/console
return
# !!! Below this line, widget is expected to be a QPlainTextEdit
# instance
console, not_readonly, readwrite_editor = textedit_properties
# Find actions only trigger an effect in the Editor
if not console:
for action in self.search_menu.actions():
try:
action.setEnabled(True)
except RuntimeError:
pass
# Disable the replace action for read-only files
if len(self.search_menu_actions) > 3:
self.search_menu_actions[3].setEnabled(readwrite_editor)
def createPopupMenu(self):
return self.application.get_application_context_menu(parent=self)
def set_splash(self, message):
"""Set splash message"""
if self.splash is None:
return
if message:
logger.info(message)
self.splash.show()
self.splash.showMessage(message,
int(Qt.AlignBottom | Qt.AlignCenter |
Qt.AlignAbsolute),
QColor(Qt.white))
QApplication.processEvents()
def closeEvent(self, event):
"""closeEvent reimplementation"""
if self.closing(True):
event.accept()
else:
event.ignore()
def resizeEvent(self, event):
"""Reimplement Qt method"""
if not self.isMaximized() and not self.layouts.get_fullscreen_flag():
self.window_size = self.size()
QMainWindow.resizeEvent(self, event)
# To be used by the tour to be able to resize
self.sig_resized.emit(event)
def moveEvent(self, event):
"""Reimplement Qt method"""
if hasattr(self, 'layouts'):
if not self.isMaximized() and not self.layouts.get_fullscreen_flag():
self.window_position = self.pos()
QMainWindow.moveEvent(self, event)
# To be used by the tour to be able to move
self.sig_moved.emit(event)
def hideEvent(self, event):
"""Reimplement Qt method"""
try:
for plugin in (self.widgetlist + self.thirdparty_plugins):
# TODO: Remove old API
try:
# New API
if plugin.get_widget().isAncestorOf(
self.last_focused_widget):
plugin.change_visibility(True)
except AttributeError:
# Old API
if plugin.isAncestorOf(self.last_focused_widget):
plugin._visibility_changed(True)
QMainWindow.hideEvent(self, event)
except RuntimeError:
QMainWindow.hideEvent(self, event)
def change_last_focused_widget(self, old, now):
"""To keep track of to the last focused widget"""
if (now is None and QApplication.activeWindow() is not None):
QApplication.activeWindow().setFocus()
self.last_focused_widget = QApplication.focusWidget()
elif now is not None:
self.last_focused_widget = now
self.previous_focused_widget = old
def closing(self, cancelable=False, close_immediately=False):
"""Exit tasks"""
if self.already_closed or self.is_starting_up:
return True
if cancelable and CONF.get('main', 'prompt_on_exit'):
reply = QMessageBox.critical(self, 'Spyder',
'Do you really want to exit?',
QMessageBox.Yes, QMessageBox.No)
if reply == QMessageBox.No:
return False
if CONF.get('main', 'single_instance') and self.open_files_server:
self.open_files_server.close()
can_close = PLUGIN_REGISTRY.delete_all_plugins(
excluding={Plugins.Layout}, close_immediately=close_immediately)
if not can_close and not close_immediately:
return False
# Save window settings *after* closing all plugin windows, in order
# to show them in their previous locations in the next session.
# Fixes spyder-ide/spyder#12139
prefix = 'window' + '/'
if self.layouts is not None:
self.layouts.save_current_window_settings(prefix)
PLUGIN_REGISTRY.delete_plugin(Plugins.Layout)
self.already_closed = True
return True
def add_dockwidget(self, plugin):
"""
Add a plugin QDockWidget to the main window.
"""
try:
# New API
if plugin.is_compatible:
dockwidget, location = plugin.create_dockwidget(self)
self.addDockWidget(location, dockwidget)
self.widgetlist.append(plugin)
except AttributeError:
# Old API
if plugin._is_compatible:
dockwidget, location = plugin._create_dockwidget()
self.addDockWidget(location, dockwidget)
self.widgetlist.append(plugin)
def global_callback(self):
"""Global callback"""
widget = QApplication.focusWidget()
action = self.sender()
callback = from_qvariant(action.data(), to_text_string)
from spyder.plugins.editor.widgets.base import TextEditBaseWidget
from spyder.plugins.ipythonconsole.widgets import ControlWidget
if isinstance(widget, (TextEditBaseWidget, ControlWidget)):
getattr(widget, callback)()
else:
return
def redirect_internalshell_stdio(self, state):
console = self.get_plugin(Plugins.Console, error=False)
if console:
if state:
console.redirect_stds()
else:
console.restore_stds()
def open_external_console(self, fname, wdir, args, interact, debug, python,
python_args, systerm, post_mortem=False):
"""Open external console"""
if systerm:
# Running script in an external system terminal
try:
if CONF.get('main_interpreter', 'default'):
executable = get_python_executable()
else:
executable = CONF.get('main_interpreter', 'executable')
programs.run_python_script_in_terminal(
fname, wdir, args, interact, debug, python_args,
executable)
except NotImplementedError:
QMessageBox.critical(self, _("Run"),
_("Running an external system terminal "
"is not supported on platform %s."
) % os.name)
def open_file(self, fname, external=False):
"""
Open filename with the appropriate application
Redirect to the right widget (txt -> editor, spydata -> workspace, ...)
or open file outside Spyder (if extension is not supported)
"""
fname = to_text_string(fname)
ext = osp.splitext(fname)[1]
editor = self.get_plugin(Plugins.Editor, error=False)
variableexplorer = self.get_plugin(
Plugins.VariableExplorer, error=False)
if encoding.is_text_file(fname):
if editor:
editor.load(fname)
elif variableexplorer is not None and ext in IMPORT_EXT:
variableexplorer.get_widget().import_data(fname)
elif not external:
fname = file_uri(fname)
start_file(fname)
def open_external_file(self, fname):
"""
Open external files that can be handled either by the Editor or the
variable explorer inside Spyder.
"""
# Check that file exists
fname = encoding.to_unicode_from_fs(fname)
if osp.exists(osp.join(CWD, fname)):
fpath = osp.join(CWD, fname)
elif osp.exists(fname):
fpath = fname
else:
return
# Don't open script that starts Spyder at startup.
# Fixes issue spyder-ide/spyder#14483
if sys.platform == 'darwin' and 'bin/spyder' in fname:
return
if osp.isfile(fpath):
self.open_file(fpath, external=True)
elif osp.isdir(fpath):
QMessageBox.warning(
self, _("Error"),
_('To open <code>{fpath}</code> as a project with Spyder, '
'please use <code>spyder -p "{fname}"</code>.')
.format(fpath=osp.normpath(fpath), fname=fname)
)
# --- Path Manager
# ------------------------------------------------------------------------
def load_python_path(self):
"""Load path stored in Spyder configuration folder."""
if osp.isfile(self.SPYDER_PATH):
with open(self.SPYDER_PATH, 'r', encoding='utf-8') as f:
path = f.read().splitlines()
self.path = tuple(name for name in path if osp.isdir(name))
if osp.isfile(self.SPYDER_NOT_ACTIVE_PATH):
with open(self.SPYDER_NOT_ACTIVE_PATH, 'r',
encoding='utf-8') as f:
not_active_path = f.read().splitlines()
self.not_active_path = tuple(name for name in not_active_path
if osp.isdir(name))
def save_python_path(self, new_path_dict):
"""
Save path in Spyder configuration folder.
`new_path_dict` is an OrderedDict that has the new paths as keys and
the state as values. The state is `True` for active and `False` for
inactive.
"""
path = [p for p in new_path_dict]
not_active_path = [p for p in new_path_dict if not new_path_dict[p]]
try:
encoding.writelines(path, self.SPYDER_PATH)
encoding.writelines(not_active_path, self.SPYDER_NOT_ACTIVE_PATH)
except EnvironmentError as e:
logger.error(str(e))
CONF.set('main', 'spyder_pythonpath', self.get_spyder_pythonpath())
def get_spyder_pythonpath_dict(self):
"""
Return Spyder PYTHONPATH.
The returned ordered dictionary has the paths as keys and the state
as values. The state is `True` for active and `False` for inactive.
Example:
OrderedDict([('/some/path, True), ('/some/other/path, False)])
"""
self.load_python_path()
path_dict = OrderedDict()
for path in self.path:
path_dict[path] = path not in self.not_active_path
for path in self.project_path:
path_dict[path] = True
return path_dict
def get_spyder_pythonpath(self):
"""
Return Spyder PYTHONPATH.
"""
path_dict = self.get_spyder_pythonpath_dict()
path = [k for k, v in path_dict.items() if v]
return path
def update_python_path(self, new_path_dict):
"""Update python path on Spyder interpreter and kernels."""
# Load previous path
path_dict = self.get_spyder_pythonpath_dict()
# Save path
if path_dict != new_path_dict:
# It doesn't include the project_path
self.save_python_path(new_path_dict)
# Load new path
new_path_dict_p = self.get_spyder_pythonpath_dict() # Includes project
# Update Spyder interpreter
for path in path_dict:
while path in sys.path:
sys.path.remove(path)
for path, active in reversed(new_path_dict_p.items()):
if active:
sys.path.insert(1, path)
# Any plugin that needs to do some work based on this signal should
# connect to it on plugin registration
self.sig_pythonpath_changed.emit(path_dict, new_path_dict_p)
@Slot()
def show_path_manager(self):
"""Show path manager dialog."""
from spyder.widgets.pathmanager import PathManager
projects = self.get_plugin(Plugins.Projects, error=False)
read_only_path = ()
if projects:
read_only_path = tuple(projects.get_pythonpath())
dialog = PathManager(self, self.path, read_only_path,
self.not_active_path, sync=True)
self._path_manager = dialog
dialog.sig_path_changed.connect(self.update_python_path)
dialog.redirect_stdio.connect(self.redirect_internalshell_stdio)
dialog.show()
def pythonpath_changed(self):
"""Project's PYTHONPATH contribution has changed."""
projects = self.get_plugin(Plugins.Projects, error=False)
self.project_path = ()
if projects:
self.project_path = tuple(projects.get_pythonpath())
path_dict = self.get_spyder_pythonpath_dict()
self.update_python_path(path_dict)
#---- Preferences
def apply_settings(self):
"""Apply main window settings."""
qapp = QApplication.instance()
# Set 'gtk+' as the default theme in Gtk-based desktops
# Fixes spyder-ide/spyder#2036.
if is_gtk_desktop() and ('GTK+' in QStyleFactory.keys()):
try:
qapp.setStyle('gtk+')
except:
pass
default = self.DOCKOPTIONS
if CONF.get('main', 'vertical_tabs'):
default = default|QMainWindow.VerticalTabs
self.setDockOptions(default)
self.apply_panes_settings()
if CONF.get('main', 'use_custom_cursor_blinking'):
qapp.setCursorFlashTime(
CONF.get('main', 'custom_cursor_blinking'))
else:
qapp.setCursorFlashTime(self.CURSORBLINK_OSDEFAULT)
def apply_panes_settings(self):
"""Update dockwidgets features settings."""
for plugin in (self.widgetlist + self.thirdparty_plugins):
features = plugin.dockwidget.FEATURES
plugin.dockwidget.setFeatures(features)
try:
# New API
margin = 0
if CONF.get('main', 'use_custom_margin'):
margin = CONF.get('main', 'custom_margin')
plugin.update_margins(margin)
except AttributeError:
# Old API
plugin._update_margins()
@Slot()
def show_preferences(self):
"""Edit Spyder preferences."""
self.preferences.open_dialog(self.prefs_dialog_size)
def set_prefs_size(self, size):
"""Save preferences dialog size."""
self.prefs_dialog_size = size
# -- Open files server
def start_open_files_server(self):
self.open_files_server.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
port = select_port(default_port=OPEN_FILES_PORT)
CONF.set('main', 'open_files_port', port)
self.open_files_server.bind(('127.0.0.1', port))
self.open_files_server.listen(20)
while 1: # 1 is faster than True
try:
req, dummy = self.open_files_server.accept()
except socket.error as e:
# See spyder-ide/spyder#1275 for details on why errno EINTR is
# silently ignored here.
eintr = errno.WSAEINTR if os.name == 'nt' else errno.EINTR
# To avoid a traceback after closing on Windows
if e.args[0] == eintr:
continue
# handle a connection abort on close error
enotsock = (errno.WSAENOTSOCK if os.name == 'nt'
else errno.ENOTSOCK)
if e.args[0] in [errno.ECONNABORTED, enotsock]:
return
raise
fname = req.recv(1024)
fname = fname.decode('utf-8')
self.sig_open_external_file.emit(fname)
req.sendall(b' ')
# ---- Quit and restart, and reset spyder defaults
@Slot()
def reset_spyder(self):
"""
Quit and reset Spyder and then Restart application.
"""
answer = QMessageBox.warning(self, _("Warning"),
_("Spyder will restart and reset to default settings: <br><br>"
"Do you want to continue?"),
QMessageBox.Yes | QMessageBox.No)
if answer == QMessageBox.Yes:
self.restart(reset=True)
@Slot()
def restart(self, reset=False, close_immediately=False):
"""Wrapper to handle plugins request to restart Spyder."""
self.application.restart(
reset=reset, close_immediately=close_immediately)
# ---- Global Switcher
def open_switcher(self, symbol=False):
"""Open switcher dialog box."""
if self.switcher is not None and self.switcher.isVisible():
self.switcher.clear()
self.switcher.hide()
return
if symbol:
self.switcher.set_search_text('@')
else:
self.switcher.set_search_text('')
self.switcher.setup()
self.switcher.show()
# Note: The +6 pixel on the top makes it look better
# FIXME: Why is this using the toolbars menu? A: To not be on top of
# the toolbars.
# Probably toolbars should be taken into account for this 'delta' only
# when are visible
delta_top = (self.toolbar.toolbars_menu.geometry().height() +
self.menuBar().geometry().height() + 6)
self.switcher.set_position(delta_top)
def open_symbolfinder(self):
"""Open symbol list management dialog box."""
self.open_switcher(symbol=True)
def create_switcher(self):
"""Create switcher dialog instance."""
if self.switcher is None:
from spyder.widgets.switcher import Switcher
self.switcher = Switcher(self)
return self.switcher
# --- For OpenGL
def _test_setting_opengl(self, option):
"""Get the current OpenGL implementation in use"""
if option == 'software':
return QCoreApplication.testAttribute(Qt.AA_UseSoftwareOpenGL)
elif option == 'desktop':
return QCoreApplication.testAttribute(Qt.AA_UseDesktopOpenGL)
elif option == 'gles':
return QCoreApplication.testAttribute(Qt.AA_UseOpenGLES)
#==============================================================================
# Main
#==============================================================================
def main(options, args):
"""Main function"""
# **** For Pytest ****
if running_under_pytest():
if CONF.get('main', 'opengl') != 'automatic':
option = CONF.get('main', 'opengl')
set_opengl_implementation(option)
app = create_application()
window = create_window(MainWindow, app, None, options, None)
return window
# **** Handle hide_console option ****
if options.show_console:
print("(Deprecated) --show console does nothing, now the default "
" behavior is to show the console, use --hide-console if you "
"want to hide it")
if set_attached_console_visible is not None:
set_attached_console_visible(not options.hide_console
or options.reset_config_files
or options.reset_to_defaults
or options.optimize
or bool(get_debug_level()))
# **** Set OpenGL implementation to use ****
# This attribute must be set before creating the application.
# See spyder-ide/spyder#11227
if options.opengl_implementation:
option = options.opengl_implementation
set_opengl_implementation(option)
else:
if CONF.get('main', 'opengl') != 'automatic':
option = CONF.get('main', 'opengl')
set_opengl_implementation(option)
# **** Set high DPI scaling ****
# This attribute must be set before creating the application.
if hasattr(Qt, 'AA_EnableHighDpiScaling'):
QCoreApplication.setAttribute(Qt.AA_EnableHighDpiScaling,
CONF.get('main', 'high_dpi_scaling'))
# **** Set debugging info ****
if get_debug_level() > 0:
delete_debug_log_files()
setup_logging(options)
# **** Create the application ****
app = create_application()
# **** Create splash screen ****
splash = create_splash_screen()
if splash is not None:
splash.show()
splash.showMessage(
_("Initializing..."),
int(Qt.AlignBottom | Qt.AlignCenter | Qt.AlignAbsolute),
QColor(Qt.white)
)
QApplication.processEvents()
if options.reset_to_defaults:
# Reset Spyder settings to defaults
CONF.reset_to_defaults()
return
elif options.optimize:
# Optimize the whole Spyder's source code directory
import spyder
programs.run_python_script(module="compileall",
args=[spyder.__path__[0]], p_args=['-O'])
return
# **** Read faulthandler log file ****
faulthandler_file = get_conf_path('faulthandler.log')
previous_crash = ''
if osp.exists(faulthandler_file):
with open(faulthandler_file, 'r') as f:
previous_crash = f.read()
# Remove file to not pick it up for next time.
try:
dst = get_conf_path('faulthandler.log.old')
shutil.move(faulthandler_file, dst)
except Exception:
pass
CONF.set('main', 'previous_crash', previous_crash)
# **** Set color for links ****
set_links_color(app)
# **** Create main window ****
mainwindow = None
try:
if PY3 and options.report_segfault:
import faulthandler
with open(faulthandler_file, 'w') as f:
faulthandler.enable(file=f)
mainwindow = create_window(
MainWindow, app, splash, options, args
)
else:
mainwindow = create_window(MainWindow, app, splash, options, args)
except FontError:
QMessageBox.information(None, "Spyder",
"Spyder was unable to load the <i>Spyder 3</i> "
"icon theme. That's why it's going to fallback to the "
"theme used in Spyder 2.<br><br>"
"For that, please close this window and start Spyder again.")
CONF.set('appearance', 'icon_theme', 'spyder 2')
if mainwindow is None:
# An exception occurred
if splash is not None:
splash.hide()
return
ORIGINAL_SYS_EXIT()
if __name__ == "__main__":
main()
| 40.542381
| 85
| 0.570111
|
1b4951b9a4059712d6b041ddd6946604dd5242b6
| 4,427
|
py
|
Python
|
utils/io.py
|
LeftThink/deep_sort_pytorch
|
c84a117e3dc3c83a620b6ca49380ca9460a659d6
|
[
"MIT"
] | null | null | null |
utils/io.py
|
LeftThink/deep_sort_pytorch
|
c84a117e3dc3c83a620b6ca49380ca9460a659d6
|
[
"MIT"
] | null | null | null |
utils/io.py
|
LeftThink/deep_sort_pytorch
|
c84a117e3dc3c83a620b6ca49380ca9460a659d6
|
[
"MIT"
] | null | null | null |
import os
from typing import Dict
import numpy as np
# from utils.log import get_logger
def write_results(filename, results, data_type):
if data_type == 'mot':
save_format = '{frame},{id},{x1},{y1},{w},{h},-1,-1,-1,-1\n'
elif data_type == 'kitti':
save_format = '{frame} {id} pedestrian 0 0 -10 {x1} {y1} {x2} {y2} -10 -10 -10 -1000 -1000 -1000 -10\n'
else:
raise ValueError(data_type)
with open(filename, 'w') as f:
for frame_id, tlwhs, track_ids in results:
if data_type == 'kitti':
frame_id -= 1
for tlwh, track_id in zip(tlwhs, track_ids):
if track_id < 0:
continue
x1, y1, w, h = tlwh
x2, y2 = x1 + w, y1 + h
line = save_format.format(frame=frame_id, id=track_id, x1=x1, y1=y1, x2=x2, y2=y2, w=w, h=h)
f.write(line)
# def write_results(filename, results_dict: Dict, data_type: str):
# if not filename:
# return
# path = os.path.dirname(filename)
# if not os.path.exists(path):
# os.makedirs(path)
# if data_type in ('mot', 'mcmot', 'lab'):
# save_format = '{frame},{id},{x1},{y1},{w},{h},1,-1,-1,-1\n'
# elif data_type == 'kitti':
# save_format = '{frame} {id} pedestrian -1 -1 -10 {x1} {y1} {x2} {y2} -1 -1 -1 -1000 -1000 -1000 -10 {score}\n'
# else:
# raise ValueError(data_type)
# with open(filename, 'w') as f:
# for frame_id, frame_data in results_dict.items():
# if data_type == 'kitti':
# frame_id -= 1
# for tlwh, track_id in frame_data:
# if track_id < 0:
# continue
# x1, y1, w, h = tlwh
# x2, y2 = x1 + w, y1 + h
# line = save_format.format(frame=frame_id, id=track_id, x1=x1, y1=y1, x2=x2, y2=y2, w=w, h=h, score=1.0)
# f.write(line)
# logger.info('Save results to {}'.format(filename))
#def read_results(filename, data_type: str, is_gt=False, is_ignore=False):
def read_results(filename, data_type, is_gt=False, is_ignore=False):
if data_type in ('mot', 'lab'):
read_fun = read_mot_results
else:
raise ValueError('Unknown data type: {}'.format(data_type))
return read_fun(filename, is_gt, is_ignore)
"""
labels={'ped', ... % 1
'person_on_vhcl', ... % 2
'car', ... % 3
'bicycle', ... % 4
'mbike', ... % 5
'non_mot_vhcl', ... % 6
'static_person', ... % 7
'distractor', ... % 8
'occluder', ... % 9
'occluder_on_grnd', ... %10
'occluder_full', ... % 11
'reflection', ... % 12
'crowd' ... % 13
};
"""
def read_mot_results(filename, is_gt, is_ignore):
valid_labels = {1}
ignore_labels = {2, 7, 8, 12}
results_dict = dict()
if os.path.isfile(filename):
with open(filename, 'r') as f:
for line in f.readlines():
linelist = line.split(',')
if len(linelist) < 7:
continue
fid = int(linelist[0])
if fid < 1:
continue
results_dict.setdefault(fid, list())
if is_gt:
if 'MOT16-' in filename or 'MOT17-' in filename:
label = int(float(linelist[7]))
mark = int(float(linelist[6]))
if mark == 0 or label not in valid_labels:
continue
score = 1
elif is_ignore:
if 'MOT16-' in filename or 'MOT17-' in filename:
label = int(float(linelist[7]))
vis_ratio = float(linelist[8])
if label not in ignore_labels and vis_ratio >= 0:
continue
else:
continue
score = 1
else:
score = float(linelist[6])
tlwh = tuple(map(float, linelist[2:6]))
target_id = int(linelist[1])
results_dict[fid].append((tlwh, target_id, score))
return results_dict
def unzip_objs(objs):
if len(objs) > 0:
tlwhs, ids, scores = zip(*objs)
else:
tlwhs, ids, scores = [], [], []
tlwhs = np.asarray(tlwhs, dtype=float).reshape(-1, 4)
return tlwhs, ids, scores
| 33.037313
| 121
| 0.500791
|
a7476e5349831eaaca0a7a46b5f59bc666bd2981
| 644
|
py
|
Python
|
8b.py
|
tavaresdu/PythonNetworkOS-Assessment
|
dd713fca815ff0dbe836ed8952b45923a68e479d
|
[
"MIT"
] | null | null | null |
8b.py
|
tavaresdu/PythonNetworkOS-Assessment
|
dd713fca815ff0dbe836ed8952b45923a68e479d
|
[
"MIT"
] | null | null | null |
8b.py
|
tavaresdu/PythonNetworkOS-Assessment
|
dd713fca815ff0dbe836ed8952b45923a68e479d
|
[
"MIT"
] | null | null | null |
import threading
a = list(enumerate([183677, 186720, 176916, 186554, 113034,
193701, 131768, 142185, 131518, 105202]))
b = [0 for i in range(len(a))]
def fatorial(n):
fat = n
for i in range(n-1, 1, -1):
fat = fat * i
return fat
def main(thread):
while a:
entry = a.pop()
print('thread '+str(thread)+' input '+str(entry))
b[entry[0]] = fatorial(entry[1])
print('terminado thread '+str(thread)+' input '+str(entry))
threads = []
for i in range(4):
t = threading.Thread(target=main, args=(i,))
t.start()
threads.append(t)
for t in threads:
t.join()
| 22.206897
| 67
| 0.56677
|
1457114d7427a190d9754d8e584e2ccbcfa65b78
| 4,417
|
py
|
Python
|
audio_encoder/audio.py
|
Domhnall-Liopa/Lip2Wav
|
236ae24cd7945da8a75ddea1cfdc3da271c3c59f
|
[
"MIT"
] | null | null | null |
audio_encoder/audio.py
|
Domhnall-Liopa/Lip2Wav
|
236ae24cd7945da8a75ddea1cfdc3da271c3c59f
|
[
"MIT"
] | null | null | null |
audio_encoder/audio.py
|
Domhnall-Liopa/Lip2Wav
|
236ae24cd7945da8a75ddea1cfdc3da271c3c59f
|
[
"MIT"
] | null | null | null |
from scipy.ndimage.morphology import binary_dilation
from audio_encoder.params_data import *
from pathlib import Path
from typing import Optional, Union
import numpy as np
import webrtcvad
import librosa
import struct
int16_max = (2 ** 15) - 1
def preprocess_wav(fpath_or_wav: Union[str, Path, np.ndarray],
source_sr: Optional[int] = None):
"""
Applies the preprocessing operations used in training the Speaker Encoder to a waveform
either on disk or in memory. The waveform will be resampled to match the data hyperparameters.
:param fpath_or_wav: either a filepath to an audio file (many extensions are supported, not
just .wav), either the waveform as a numpy array of floats.
:param source_sr: if passing an audio waveform, the sampling rate of the waveform before
preprocessing. After preprocessing, the waveform's sampling rate will match the data
hyperparameters. If passing a filepath, the sampling rate will be automatically detected and
this argument will be ignored.
"""
# Load the wav from disk if needed
if isinstance(fpath_or_wav, str) or isinstance(fpath_or_wav, Path):
wav, source_sr = librosa.load(fpath_or_wav, sr=None)
else:
wav = fpath_or_wav
# Resample the wav if needed
if source_sr is not None and source_sr != sampling_rate:
wav = librosa.resample(wav, source_sr, sampling_rate)
# Apply the preprocessing: normalize volume and shorten long silences
wav = normalize_volume(wav, audio_norm_target_dBFS, increase_only=True)
wav = trim_long_silences(wav)
return wav
def wav_to_mel_spectrogram(wav):
"""
Derives a mel spectrogram ready to be used by the encoder from a preprocessed audio waveform.
Note: this not a log-mel spectrogram.
"""
frames = librosa.feature.melspectrogram(
wav,
sampling_rate,
n_fft=int(sampling_rate * mel_window_length / 1000),
hop_length=int(sampling_rate * mel_window_step / 1000),
n_mels=mel_n_channels
)
return frames.astype(np.float32).T
def trim_long_silences(wav):
"""
Ensures that segments without voice in the waveform remain no longer than a
threshold determined by the VAD parameters in params.py.
:param wav: the raw waveform as a numpy array of floats
:return: the same waveform with silences trimmed away (length <= original wav length)
"""
# Compute the voice detection window size
samples_per_window = (vad_window_length * sampling_rate) // 1000
# Trim the end of the audio to have a multiple of the window size
wav = wav[:len(wav) - (len(wav) % samples_per_window)]
# Convert the float waveform to 16-bit mono PCM
pcm_wave = struct.pack("%dh" % len(wav), *(np.round(wav * int16_max)).astype(np.int16))
# Perform voice activation detection
voice_flags = []
vad = webrtcvad.Vad(mode=3)
for window_start in range(0, len(wav), samples_per_window):
window_end = window_start + samples_per_window
voice_flags.append(vad.is_speech(pcm_wave[window_start * 2:window_end * 2],
sample_rate=sampling_rate))
voice_flags = np.array(voice_flags)
# Smooth the voice detection with a moving average
def moving_average(array, width):
array_padded = np.concatenate((np.zeros((width - 1) // 2), array, np.zeros(width // 2)))
ret = np.cumsum(array_padded, dtype=float)
ret[width:] = ret[width:] - ret[:-width]
return ret[width - 1:] / width
audio_mask = moving_average(voice_flags, vad_moving_average_width)
audio_mask = np.round(audio_mask).astype(np.bool)
# Dilate the voiced regions
audio_mask = binary_dilation(audio_mask, np.ones(vad_max_silence_length + 1))
audio_mask = np.repeat(audio_mask, samples_per_window)
return wav[audio_mask == True]
def normalize_volume(wav, target_dBFS, increase_only=False, decrease_only=False):
if increase_only and decrease_only:
raise ValueError("Both increase only and decrease only are set")
rms = np.sqrt(np.mean((wav * int16_max) ** 2))
wave_dBFS = 20 * np.log10(rms / int16_max)
dBFS_change = target_dBFS - wave_dBFS
if dBFS_change < 0 and increase_only or dBFS_change > 0 and decrease_only:
return wav
return wav * (10 ** (dBFS_change / 20))
| 40.154545
| 98
| 0.696174
|
6922dc3f892e2547a3e4e8a7e0feaf7cb0bf9a55
| 918
|
py
|
Python
|
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/GL/EXT/framebuffer_multisample.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/GL/EXT/framebuffer_multisample.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/GL/EXT/framebuffer_multisample.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_EXT_framebuffer_multisample'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_EXT_framebuffer_multisample',error_checker=_errors._error_checker)
GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_EXT=_C('GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_EXT',0x8D56)
GL_MAX_SAMPLES_EXT=_C('GL_MAX_SAMPLES_EXT',0x8D57)
GL_RENDERBUFFER_SAMPLES_EXT=_C('GL_RENDERBUFFER_SAMPLES_EXT',0x8CAB)
@_f
@_p.types(None,_cs.GLenum,_cs.GLsizei,_cs.GLenum,_cs.GLsizei,_cs.GLsizei)
def glRenderbufferStorageMultisampleEXT(target,samples,internalformat,width,height):pass
| 45.9
| 125
| 0.816993
|
4edd4b967cac8b5727ae2f3650cf87f0de35215b
| 5,582
|
py
|
Python
|
plipify/_deprecated.py
|
volkamerlab/plipify
|
c7b68bbf325aac904969867c7fbed5179f9670b7
|
[
"MIT"
] | 6
|
2022-01-19T13:31:57.000Z
|
2022-02-09T05:48:08.000Z
|
plipify/_deprecated.py
|
volkamerlab/plipify
|
c7b68bbf325aac904969867c7fbed5179f9670b7
|
[
"MIT"
] | 4
|
2021-10-13T08:45:33.000Z
|
2022-02-11T15:56:09.000Z
|
plipify/_deprecated.py
|
volkamerlab/plipify
|
c7b68bbf325aac904969867c7fbed5179f9670b7
|
[
"MIT"
] | 2
|
2022-01-25T03:01:56.000Z
|
2022-03-21T09:26:31.000Z
|
import csv
import os
import pandas as pd
from tqdm.auto import tqdm
from plip.structure.preparation import PDBComplex
from plip.exchange.report import BindingSiteReport
from IPython.display import display, Markdown
##### PREPARATION ######
# fixed for now
interaction_types = [
"hydrophobic",
"hbond",
"waterbridge",
"saltbridge",
"pistacking",
"pication",
"halogen",
"metal",
]
def read_residues(path):
"""
Reads residue file.
"""
with open(path) as file:
residue_reader = csv.reader(file, delimiter=",")
residues = next(residue_reader)
residues = list(map(int, residues))
return residues
def divide_list(list_name, n):
"""
Split list into lists of given size.
"""
for i in range(0, len(list_name), n):
yield list_name[i : i + n]
def residue_dictionary(residues):
"""
Create index dictionary for residues.
"""
residue_i = range(0, len(residues) * len(interaction_types))
residue_i = list(divide_list(residue_i, len(interaction_types)))
residue_dict = dict(zip(residues, residue_i))
return residue_dict
def interaction_dictionary(interaction_types):
"""
Create index dictionary for interactions.
"""
interaction_i = range(0, len(interaction_types))
interaction_dict = dict(zip(interaction_types, interaction_i))
return interaction_dict
###### PLIP DATA ######
def analyze_interactions(pdbfile):
protlig = PDBComplex()
protlig.load_pdb(pdbfile) # load the pdb
for ligand in protlig.ligands:
protlig.characterize_complex(ligand) # find ligands and analyze interactions
sites = {}
for key, site in sorted(protlig.interaction_sets.items()):
binding_site = BindingSiteReport(site) # collect data about interactions
# tuples of *_features and *_info will be converted to pandas df
keys = (
"hydrophobic",
"hbond",
"waterbridge",
"saltbridge",
"pistacking",
"pication",
"halogen",
"metal",
)
interactions = {
k: [getattr(binding_site, k + "_features")] + getattr(binding_site, k + "_info")
for k in keys
}
sites[key] = interactions
return sites
# for displaying data
def site_to_dataframes(site):
keys = [
"hydrophobic",
"hbond",
"waterbridge",
"saltbridge",
"pistacking",
"pication",
"halogen",
"metal",
]
dfs = {}
for key in keys:
records = site[key][1:]
if not records:
dfs[key] = None
else:
dfs[key] = pd.DataFrame.from_records(records, columns=site[key][0])
return dfs
def get_plip_data(data, name_file):
"""
Get data from plip and store it in a dictionary.
"""
with open(os.path.join(data, name_file)) as f:
non_covalent_filenames = [line.strip() for line in f if line.strip()]
interactions = {}
for filename in tqdm(non_covalent_filenames):
full_filename = os.path.join(data, filename)
if not os.path.isfile(full_filename):
print("File", full_filename, "does not exist?")
continue
interactions[filename] = analyze_interactions(full_filename)
return interactions
def show_plip_data(interactions):
for structure, sites in interactions.items():
display(Markdown("# Structure {}".format(structure)))
for site_name, site_interactions in sites.items():
if not site_name.startswith("LIG"):
continue # fragments are labeled as LIG; other "sites" detected by PLIP are XRC artefacts
display(Markdown("## Site {}".format(site_name)))
for interaction_type, dataframe in site_to_dataframes(site_interactions).items():
if dataframe is not None:
display(Markdown("### {}".format(interaction_type)))
display(dataframe)
###### FINGERPRINTS ######
def interaction_fingerprint(residue_dictionary, interaction_dict, residue_list, interaction_type):
"""
Create one indivual fingerprint for one structure.
"""
fp_length = len(residue_dictionary) * len(interaction_dict)
fp = [0] * fp_length
for residue in residue_list:
fp_index = residue_dictionary[residue][interaction_dict[interaction_type]]
fp[fp_index] = fp[fp_index] + 1
return fp
def interaction_fingerprint_list(interactions, residue_dict, interaction_dict):
"""
Create list of fingerprints for all given structures.
"""
fp_list = []
for sites in interactions.items():
for site_name, site_interactions in sites.items():
if not site_name.startswith("LIG"):
continue # fragments are labeled as LIG; other "sites" detected by PLIP are XRC artefacts
for interaction_type, dataframe in site_to_dataframes(site_interactions).items():
if dataframe is not None:
residue_nos = dataframe["RESNR"].tolist()
fp = interaction_fingerprint(
residue_dict, interaction_dict, residue_nos, interaction_type
)
fp_list.append(fp)
return fp_list
def frequency_interaction_fingerprint(fp_list):
"""
Create frequency fingerprint from list of fingerprints.
"""
count_fp = [sum(i) for i in zip(*fp_list)]
frequency_fp = [float(i) / len(fp_list) for i in count_fp]
return frequency_fp
| 31.184358
| 106
| 0.629703
|
83052e1afca2d962e560e396f8ff4b72430b7b0c
| 907
|
py
|
Python
|
tfc_client/models/notification_configuration.py
|
spiresystems/iwc-tfc-client
|
15e86fef7a8090f6a8c3f792ba46c60d11f7b0ef
|
[
"MIT"
] | 9
|
2019-11-18T13:38:10.000Z
|
2021-09-24T21:59:10.000Z
|
tfc_client/models/notification_configuration.py
|
spiresystems/iwc-tfc-client
|
15e86fef7a8090f6a8c3f792ba46c60d11f7b0ef
|
[
"MIT"
] | 10
|
2019-11-10T23:46:54.000Z
|
2022-03-30T15:46:56.000Z
|
tfc_client/models/notification_configuration.py
|
spiresystems/iwc-tfc-client
|
15e86fef7a8090f6a8c3f792ba46c60d11f7b0ef
|
[
"MIT"
] | 4
|
2019-11-18T14:06:04.000Z
|
2021-11-09T15:42:44.000Z
|
from datetime import datetime
from typing import Optional, List, Any, Dict
try:
from pydantic import HttpUrl
except ImportError:
from typing import AnyStr as HttpUrl
from . import KebabCaseBaseModel
from .data import AttributesModel
from ..enums import NotificationTrigger, NotificationsDestinationType
class DeliveryResponseModel(KebabCaseBaseModel):
url: HttpUrl
body: str
code: int
headers: Dict[str, List[str]]
sent_at: datetime
successful: bool
class NotificationConfigurationModel(AttributesModel):
enabled: Optional[bool] = True
name: Optional[str]
url: Optional[HttpUrl]
destination_type: Optional[NotificationsDestinationType]
token: Optional[Optional[str]]
triggers: Optional[List[NotificationTrigger]]
delivery_responses: Optional[List[DeliveryResponseModel]]
created_at: Optional[datetime]
updated_at: Optional[datetime]
| 27.484848
| 69
| 0.76957
|
1d592159a84cfd86c0d34bfdf1ab9036d63f4b89
| 4,790
|
py
|
Python
|
karesansui/gadget/hostby1iptablesstatus.py
|
GOMYWAY-NETWORKS-LLC/karesansui
|
f4ba1cf6f88cf76c3e4dbc444139d73134f7c9d1
|
[
"MIT"
] | 54
|
2015-01-09T16:49:34.000Z
|
2022-02-03T10:08:30.000Z
|
karesansui/gadget/hostby1iptablesstatus.py
|
GOMYWAY-NETWORKS-LLC/karesansui
|
f4ba1cf6f88cf76c3e4dbc444139d73134f7c9d1
|
[
"MIT"
] | 2
|
2016-02-22T08:23:36.000Z
|
2021-05-10T04:13:01.000Z
|
karesansui/gadget/hostby1iptablesstatus.py
|
GOMYWAY-NETWORKS-LLC/karesansui
|
f4ba1cf6f88cf76c3e4dbc444139d73134f7c9d1
|
[
"MIT"
] | 27
|
2015-08-21T13:33:49.000Z
|
2021-11-19T02:56:20.000Z
|
# -*- coding: utf-8 -*-
#
# This file is part of Karesansui.
#
# Copyright (C) 2009-2012 HDE, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import simplejson as json
import web
import karesansui
from karesansui.lib.const import IPTABLES_COMMAND_CONTROL
from karesansui.lib.rest import Rest, auth
from karesansui.lib.utils import is_param
from karesansui.lib.checker import \
Checker, CHECK_EMPTY, CHECK_VALID
from karesansui.db.access.machine2jobgroup import new as m2j_new
from karesansui.db.access._2pysilhouette import save_job_collaboration
from karesansui.db.access.machine import findbyhost1
from karesansui.db.access._2pysilhouette import jg_save
from karesansui.db.model._2pysilhouette import Job, JobGroup
from pysilhouette.command import dict2command
IPTABLES_ACTION_START = 1<<0
IPTABLES_ACTION_STOP = 1<<1
IPTABLES_STATUS = {
"START" :1,
"STOP" :2,
"RESTART" :3
}
# lib public
def iptables_control(obj, model, action='',options={}):
if action != "":
options['action'] = action
if action == "restart":
action_msg = "Restart iptables"
elif action == "start":
action_msg = "Start iptables"
msg = "Start iptables"
elif action == "stop":
action_msg = "Stop iptables"
_cmd = dict2command(
"%s/%s" % (karesansui.config['application.bin.dir'], IPTABLES_COMMAND_CONTROL), options)
_jobgroup = JobGroup(action_msg, karesansui.sheconf['env.uniqkey'])
_jobgroup.jobs.append(Job("%s command" % action_msg, 0, _cmd))
_machine2jobgroup = m2j_new(machine=model,
jobgroup_id=-1,
uniq_key=karesansui.sheconf['env.uniqkey'],
created_user=obj.me,
modified_user=obj.me,
)
save_job_collaboration(obj.orm,
obj.pysilhouette.orm,
_machine2jobgroup,
_jobgroup,
)
return True
def validates_iptables_status(obj):
checker = Checker()
check = True
_ = obj._
checker.errors = []
if is_param(obj.input, 'status'):
check = checker.check_status(
_('Status'),
obj.input.status,
CHECK_EMPTY | CHECK_VALID,
IPTABLES_STATUS.values()
) and check
else:
check = False
checker.add_error(_('"%s" is required.') % _('Status'))
obj.view.alert = checker.errors
return check
class HostBy1IptablesStatus(Rest):
@auth
def _PUT(self, *param, **params):
"""<comment-ja>
ステータス更新
- param
- read = 0
- start = 1
- stop = 2
- restart = 3
</comment-ja>
<comment-en>
TODO: English Comment
</comment-en>
"""
host_id = self.chk_hostby1(param)
if host_id is None: return web.notfound()
if not validates_iptables_status(self):
return web.badrequest(self.view.alert)
status = int(self.input.status)
model = findbyhost1(self.orm, host_id)
ret = False
if status & IPTABLES_ACTION_STOP and status & IPTABLES_ACTION_START:
ret = iptables_control(self, model, 'restart')
elif status & IPTABLES_ACTION_STOP:
ret = iptables_control(self, model, 'stop')
elif status & IPTABLES_ACTION_START:
ret = iptables_control(self, model, 'start')
if ret is True:
return web.accepted(url=web.ctx.path)
else:
return False
urls = (
'/host/(\d+)/iptables/status/?(\.part)$', HostBy1IptablesStatus,
)
| 31.30719
| 96
| 0.630898
|
1a8a9dfaa17550c7fbc72f01c7a82fd81b97e67b
| 6,877
|
py
|
Python
|
robot/client_vision.py
|
cosmoharrigan/rc-nfq
|
0b18d27d0b95644bded258d5a9fbb5cb4f895c91
|
[
"MIT"
] | 15
|
2016-04-06T10:30:26.000Z
|
2020-03-30T04:26:27.000Z
|
robot/client_vision.py
|
cosmoharrigan/rc-nfq
|
0b18d27d0b95644bded258d5a9fbb5cb4f895c91
|
[
"MIT"
] | null | null | null |
robot/client_vision.py
|
cosmoharrigan/rc-nfq
|
0b18d27d0b95644bded258d5a9fbb5cb4f895c91
|
[
"MIT"
] | 8
|
2016-08-26T01:59:51.000Z
|
2019-10-18T07:48:53.000Z
|
'''Implementation of a client for a custom LEGO Mindstorms EV3 robot.
This script runs on the EV3 and requires the following custom firmware:
- http://www.ev3dev.org/
- https://github.com/topikachu/python-ev3
'''
from time import sleep
import curses
import random
from ev3dev.auto import *
import urllib
import urllib2
import random
import math
# Configure the following parameter:
IP_ADDRESS = '192.168.0.38'
front_lmotor, front_rmotor = [LargeMotor(port) for port in (OUTPUT_D, OUTPUT_A)]
rear_lmotor, rear_rmotor = [LargeMotor(port) for port in (OUTPUT_C, OUTPUT_B)]
# Check that the motors are actually connected
assert front_lmotor.connected
assert front_rmotor.connected
assert rear_lmotor.connected
assert rear_rmotor.connected
# Connect touch sensor and remote control
ts = TouchSensor(); assert ts.connected
cs = ColorSensor(); assert cs.connected
# Put the color sensor into color mode.
cs.mode = 'COL-COLOR'
colors = {0: 'none', 1: 'black', 2: 'blue', 3: 'green', 4: 'yellow', 5: 'red',
6: 'white', 7: 'brown'}
# Define the mapping from action indices to motor torques
MAX_SPEED = 80
SMALL_SPEED = 30
# Tuple ordering: front left, front right, rear left, rear right
actions = {0: (MAX_SPEED, MAX_SPEED, -MAX_SPEED, -MAX_SPEED), # Forward
1: (-MAX_SPEED, -MAX_SPEED, SMALL_SPEED, SMALL_SPEED), # Reverse
2: (-MAX_SPEED, MAX_SPEED, MAX_SPEED, -MAX_SPEED), # Turn Left
3: (MAX_SPEED, -MAX_SPEED, -MAX_SPEED, MAX_SPEED), # Turn Right
4: (0, 0)} # Stop
stdscr = curses.initscr()
# Robot State
state = {}
state['dc_left'] = 0
state['dc_right'] = 0
environment = {}
environment['collision'] = 0
environment['color_sensor'] = 0
environment['cumulative_reward'] = 0
front_lmotor.duty_cycle_sp = 0
front_rmotor.duty_cycle_sp = 0
front_lmotor.position = 0
front_rmotor.position = 0
front_lmotor.run_direct()
front_rmotor.run_direct()
rear_lmotor.duty_cycle_sp = 0
rear_rmotor.duty_cycle_sp = 0
rear_lmotor.position = 0
rear_rmotor.position = 0
rear_lmotor.run_direct()
rear_rmotor.run_direct()
step = 0
time = 0
action = 0 # Initial action before the first (s, a, r, s') tuple is generated
collision_this_timestep = False
color_sensor_fsm_state = 'waiting'
debug_colors = []
# Main loop
print('Starting episode.')
while True:
# ---------------- Update instantaneous state ----------------
# A collision can occur at any time during a time step
ts_value = ts.value()
if ts_value:
collision_this_timestep = True
environment['color_sensor'] = cs.value()
# A finite state machine monitors track progress
if color_sensor_fsm_state == 'waiting':
if colors[environment['color_sensor']] == 'yellow':
color_sensor_fsm_state = 'saw_yellow'
elif colors[environment['color_sensor']] == 'red':
color_sensor_fsm_state = 'saw_red'
elif color_sensor_fsm_state == 'saw_yellow':
if colors[environment['color_sensor']] == 'red':
color_sensor_fsm_state = 'positive_reward'
elif colors[environment['color_sensor']] != 'yellow':
color_sensor_fsm_state = 'waiting'
elif color_sensor_fsm_state == 'saw_red':
if colors[environment['color_sensor']] == 'yellow':
color_sensor_fsm_state = 'negative_reward'
elif colors[environment['color_sensor']] != 'red':
color_sensor_fsm_state = 'waiting'
speed_left = front_lmotor.speed
speed_right = front_rmotor.speed
state['position_left'] = front_lmotor.position
state['position_right'] = front_rmotor.position
stdscr.addstr(1, 0, "Robot State", curses.A_REVERSE)
stdscr.addstr(4, 0, "DC Left: {}".format(str(state['dc_left']).zfill(4)), curses.A_NORMAL)
stdscr.addstr(5, 0, "DC Right: {}".format(str(state['dc_right']).zfill(4)), curses.A_NORMAL)
stdscr.addstr(8, 0, "Cumulative Reward: {}".format(str(environment['cumulative_reward']).zfill(8), curses.A_BOLD))
stdscr.addstr(10, 0, "Environment Information", curses.A_REVERSE)
stdscr.addstr(12, 0, "Touch sensor: {}".format(ts_value), curses.A_NORMAL)
stdscr.addstr(13, 0, "Color sensor: {}".format(colors[environment['color_sensor']].ljust(20)), curses.A_NORMAL)
nc = min(len(debug_colors), 5)
stdscr.addstr(14, 0, "Color history: {}".format(debug_colors[-nc:]), curses.A_NORMAL)
stdscr.addstr(16, 0, "Speed Left: {}".format(str(speed_left).zfill(5)), curses.A_NORMAL)
stdscr.addstr(17, 0, "Speed Right: {}".format(str(speed_right).zfill(5)), curses.A_NORMAL)
stdscr.addstr(18, 0, "Position Left: {}".format(str(state['position_left']).zfill(7)), curses.A_NORMAL)
stdscr.addstr(19, 0, "Position Right: {}".format(str(state['position_right']).zfill(7)), curses.A_NORMAL)
stdscr.addstr(25, 0, "Step: {}".format(str(step).zfill(8)), curses.A_NORMAL)
stdscr.addstr(26, 0, "Time: {}".format(str(time).zfill(8)), curses.A_NORMAL)
if action is not None:
stdscr.addstr(27, 0, "Action: {}".format(action), curses.A_NORMAL)
stdscr.refresh()
# Number of counts to wait between time units
num_counts_per_time_unit = 5
if step == num_counts_per_time_unit:
# --------------- Perform update ---------------
# If not first step:
if time > 0:
a = action
progress_reward = 0
if color_sensor_fsm_state == 'positive_reward':
progress_reward = 5
color_sensor_fsm_state = 'waiting'
elif color_sensor_fsm_state == 'negative_reward':
progress_reward = -2
color_sensor_fsm_state = 'waiting'
# Calculate reward
r = progress_reward - 2 * collision_this_timestep #- 0.1
# Send (_, a, r, _) tuple to server
url = 'http://{}:5000/update/'.format(IP_ADDRESS)
data = {'a': action,
'r': r}
u = urllib2.urlopen(url, data=urllib.urlencode(data))
# Consult the policy to obtain the next action
url = 'http://192.168.0.38:5000/policy/'
action_response = urllib2.urlopen(url)
action = int(action_response.read())
# Apply the action to the robot's actuators
action_front_l_motor, action_front_r_motor, \
action_rear_l_motor, action_rear_r_motor = actions[action]
front_lmotor.duty_cycle_sp = action_front_l_motor
front_rmotor.duty_cycle_sp = action_front_r_motor
rear_lmotor.duty_cycle_sp = action_rear_l_motor
rear_rmotor.duty_cycle_sp = action_rear_r_motor
# --------------- End perform update ---------------
# Reset step variables
collision_this_timestep = False
time += 1
step = 0
previous_state = state.copy()
else:
step += 1
| 36.775401
| 118
| 0.654501
|
254b655a3cf1e662f704872918fab5ac4affc678
| 2,129
|
py
|
Python
|
abtest/settings.py
|
SchuylerGoodman/topicalguide
|
7c26c8be8e1dddb7bf2be33ea9a7ba59034bf620
|
[
"PostgreSQL"
] | null | null | null |
abtest/settings.py
|
SchuylerGoodman/topicalguide
|
7c26c8be8e1dddb7bf2be33ea9a7ba59034bf620
|
[
"PostgreSQL"
] | null | null | null |
abtest/settings.py
|
SchuylerGoodman/topicalguide
|
7c26c8be8e1dddb7bf2be33ea9a7ba59034bf620
|
[
"PostgreSQL"
] | null | null | null |
# The Topical Guide
# Copyright 2010-2011 Brigham Young University
#
# This file is part of the Topical Guide <http://nlp.cs.byu.edu/topic_browser>.
#
# The Topical Guide is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# The Topical Guide is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License
# for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with the Topical Guide. If not, see <http://www.gnu.org/licenses/>.
#
# If you have inquiries regarding any further use of the Topical Guide, please
# contact the Copyright Licensing Office, Brigham Young University, 3760 HBLL,
# Provo, UT 84602, (801) 422-9339 or 422-3821, e-mail copyright@byu.edu.
import os
import topicalguide.settings as settings
from topicalguide.settings import BASE_DIR
"""
TEST_LIST is a dict of test name to test directory information.
Example:
TEST_LIST = {
'a' : {
'BASE_DIR' : os.path.join(BASE_DIR, 'a'),
'TEMPLATE_DIR' : 'templates',
'TEMPLATE' : 'root.html',
'VIEW_PACKAGE' : 'a.root.root',
}
}
This example creates a test at http://example.edu/a/
It uses the full path to BASE_DIR/a/ as the base directory for all files needed by this test's template and view
The templates for the test are in BASE_DIR/a/templates/
The template to test is BASE_DIR/a/templates/root.html
The package name of the view that is rendering that template is a.root.root.
(In other words it is the function root() at BASE_DIR/a/root.py)
"""
TEST_LIST = {
}
# this adds the template paths to the global site settings, so that the Django loader can find them
new_template_dirs = [os.path.join(TEST_LIST[i]['BASE_DIR'], TEST_LIST[i]['TEMPLATE_DIR']) for i in TEST_LIST]
settings.TEMPLATE_DIRS.extend(new_template_dirs)
| 39.425926
| 112
| 0.744481
|
2290e4b60991083c8ca8db3553f944d5cb115c84
| 126
|
py
|
Python
|
tests/test_data/test_data_utils/test_adj_matrix.py
|
jvrana/caldera
|
a346324e77f20739e00a82f97530dda4906f59dd
|
[
"MIT"
] | 2
|
2021-12-13T17:52:17.000Z
|
2021-12-13T17:52:18.000Z
|
tests/test_data/test_data_utils/test_adj_matrix.py
|
jvrana/caldera
|
a346324e77f20739e00a82f97530dda4906f59dd
|
[
"MIT"
] | 4
|
2020-10-06T21:06:15.000Z
|
2020-10-10T01:18:23.000Z
|
tests/test_data/test_data_utils/test_adj_matrix.py
|
jvrana/caldera
|
a346324e77f20739e00a82f97530dda4906f59dd
|
[
"MIT"
] | null | null | null |
from caldera.data.utils import adj_matrix
def test_adj_matrix(random_data_example):
M = adj_matrix(random_data_example)
| 21
| 41
| 0.81746
|
2af54c222a2d7e10b01408a78e2976cfb5c94ea3
| 204
|
py
|
Python
|
tests/app/urls.py
|
gasman/wagtailmodelchooser
|
1aef9c0f3589d9ad81fe04dadeacc90a27e315d8
|
[
"BSD-2-Clause"
] | 49
|
2019-03-01T15:50:32.000Z
|
2022-03-01T10:47:57.000Z
|
tests/app/urls.py
|
gasman/wagtailmodelchooser
|
1aef9c0f3589d9ad81fe04dadeacc90a27e315d8
|
[
"BSD-2-Clause"
] | 15
|
2019-08-08T11:47:27.000Z
|
2022-02-15T06:18:48.000Z
|
tests/app/urls.py
|
gasman/wagtailmodelchooser
|
1aef9c0f3589d9ad81fe04dadeacc90a27e315d8
|
[
"BSD-2-Clause"
] | 18
|
2019-03-11T19:30:49.000Z
|
2022-03-02T13:07:13.000Z
|
import wagtail.admin.urls
import wagtail.core.urls
from django.conf.urls import include, url
urlpatterns = [
url(r'^admin/', include(wagtail.admin.urls)),
url(r'', include(wagtail.core.urls)),
]
| 22.666667
| 49
| 0.715686
|
8c02c3283e50ade5af27c52730541e89a2f548c4
| 712
|
py
|
Python
|
euchre/abstract/team.py
|
bradleycwojcik/euchre-cli
|
e4ffcdb16720d8dafe6b5b00b50eb923c1fcfe27
|
[
"MIT"
] | 3
|
2020-10-07T08:23:12.000Z
|
2021-11-20T16:33:40.000Z
|
euchre/abstract/team.py
|
bradleycwojcik/euchre-cli
|
e4ffcdb16720d8dafe6b5b00b50eb923c1fcfe27
|
[
"MIT"
] | 28
|
2020-07-14T01:29:33.000Z
|
2021-11-20T04:48:09.000Z
|
euchre/abstract/team.py
|
boldandbrad/euchre-cli
|
6e03f76c5feb50d677ab2558707182fa7dd5d127
|
[
"MIT"
] | 4
|
2020-09-07T04:25:04.000Z
|
2021-11-11T07:20:01.000Z
|
class Team:
"""Representation of a team of players."""
def __init__(self, name: str) -> None:
self.name = name
self.game_score = 0
# reset every hand
self.called_trump = False
self.trick_score = 0
def won_hand(self, points: int) -> None:
"""Increment game_score for winning hand.
Args:
points (int): Number of points to add to score.
"""
self.game_score += points
def won_trick(self) -> None:
"""Increment trick_score for winning trick."""
self.trick_score += 1
def __repr__(self) -> str:
return f"Team({self.name})"
def __str__(self) -> str:
return f"{self.name}"
| 24.551724
| 59
| 0.566011
|
68df5b3d02c789002b5a9bdf336d6c4a321e780f
| 6,684
|
py
|
Python
|
bin/charts.py
|
grinchios/college_project-commented
|
3cc5ccd6c1b946b625e1dcc0689c9273f6ba2178
|
[
"MIT"
] | null | null | null |
bin/charts.py
|
grinchios/college_project-commented
|
3cc5ccd6c1b946b625e1dcc0689c9273f6ba2178
|
[
"MIT"
] | null | null | null |
bin/charts.py
|
grinchios/college_project-commented
|
3cc5ccd6c1b946b625e1dcc0689c9273f6ba2178
|
[
"MIT"
] | null | null | null |
from PyQt5 import QtGui, QtWidgets
from matplotlib.backends.backend_qt5agg \
import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
from PyQt5.QtWidgets import *
from datetime import date, timedelta, datetime
'''
P1
Creating graphs for key data. The graphs on the managers panel will be the weeks intake and a bar chart for stock which will load with the page. There will be graphs for the most common treatment, income for a time period and outgoings per stock type these will be on a separate page with buttons to generate them.
O4
Data will be shown in tables, all the graphs on the manager panel will load with the page but any on the main chart form will have buttons to generate them.
'''
class createCharts():
'''
P1
Creating graphs for key data. The graphs on the managers panel will be the weeks intake and a bar chart for stock which will load with the page. There will be graphs for the most common treatment, income for a time period and outgoings per stock type these will be on a separate page with buttons to generate them.
'''
'''
O4
Data will be shown in tables, all the graphs on the manager panel will load with the page but any on the main chart form will have buttons to generate them.
'''
def __init__(self, fromDate, toDate):
self.fromDate = fromDate
self.toDate = toDate
def mostPopTreatment(self, appointments, treatments):
tmpdict = {}
# adds each treatment type to the dictionary
# and increments on each occasion of it
for i in range(len(appointments)):
# gets the id and name of the performed treatment to be displayed
treatmentID = appointments[i][str(i)]['treatment']
treatmentName = treatments[int(treatmentID)][treatmentID]['name']
# gets the appointment date for checking
appointmentDate = appointments[i][str(i)]['date']
#todo
# date validation
if self.fromDate <= appointmentDate <= self.toDate:
try:
tmpdict[treatmentName] += 1
except:
tmpdict[treatmentName] = 1
# creates the list for the graph to display
returnList = []
# initialises an inner list of
# [x value, y value]
innerList = ['', 0]
for treatment in list(tmpdict.keys()):
innerList = [treatment, int(tmpdict[treatment])]
returnList.append(innerList)
return returnList
def income(self, appointments):
d1 = date(int(self.toDate[:4]), int(self.toDate[5:7]), int(self.toDate[8:]))
d2 = date(int(self.fromDate[:4]), int(self.fromDate[5:7]), int(self.fromDate[8:]))
delta = d1 - d2
tmpdict = {}
# creates a dictionary of each day
for i in range(delta.days + 1):
tmpdict[(d2 + timedelta(days=i)).strftime('%Y-%m-%d')] = 0
for i in range(len(appointments)):
# gets the appointment date for checking
appointmentDate = appointments[i][str(i)]['date']
appointmentPrice = appointments[i][str(i)]['cost']
if self.fromDate <= appointmentDate <= self.toDate:
tmpdict[appointmentDate] += appointmentPrice
# creates the list for the graph to display
returnList = []
# initialises an inner list of
# [x value, y value]
innerList = ['', 0]
keys = list(tmpdict.keys())
for treatment in keys:
innerList = [treatment, int(tmpdict[treatment])]
returnList.append(innerList)
returnList = sorted(returnList, key=lambda x: datetime.strptime(x[0], '%Y-%m-%d'))
return returnList
def outgoings(self, appointments, treatments):
tmpdict = {}
# adds each treatment type to the dictionary
# and increments on each occasion of it
# todo
# date validation
for i in range(len(appointments)):
# gets the appointment date for checking
appointmentDate = appointments[i][str(i)]['date']
if self.fromDate <= appointmentDate <= self.toDate:
# gets the id and name of the performed treatment to be displayed
treatmentID = appointments[i][str(i)]['treatment']
stockUsed = treatments[int(treatmentID)][treatmentID]['stock']
# for each piece of stock used
# add total outgoing to the graph data
for stock in stockUsed:
try:
stockName = stock[1]
stockPrice = float(stock[2])
stockAmount = float(stock[3])
tmpdict[stockName] += int(stockPrice) * int(stockAmount)
except:
tmpdict[stockName] = int(stockPrice) * int(stockAmount)
# creates the list for the graph to display
returnList = []
# initialises an inner list of
# [x value, y value]
innerList = ['', 0]
for treatment in list(tmpdict.keys()):
innerList = [treatment, int(tmpdict[treatment])]
returnList.append(innerList)
return returnList
class MplCanvas(FigureCanvas):
def __init__(self):
self.fig = Figure()
self.ax = self.fig.add_subplot(111)
FigureCanvas.__init__(self, self.fig)
FigureCanvas.setSizePolicy(self,
QSizePolicy.Expanding,
QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
class MplWidget(QtWidgets.QWidget):
def __init__(self, parent = None):
QtWidgets.QWidget.__init__(self, parent)
self.canvas = MplCanvas()
self.vbl = QVBoxLayout()
self.vbl.addWidget(self.canvas)
self.setLayout(self.vbl)
def update_graph(self, graphValues, rot, ha):
# Updates the graph with new letters frequencies
self.canvas.ax.clear()
# initialises the xaxis labels
xlabel = [graphValues[i][0] for i in range(len(graphValues))]
for i in range(len(graphValues)):
j = graphValues[i]
# j[0] = x position/label
# j[1] = y value to bar up to
self.canvas.ax.bar(j[0], j[1])
self.canvas.ax.set_xticks(xlabel)
# writes the xaxis labels on an angle
self.canvas.ax.set_xticklabels(xlabel, rotation=rot, ha=ha)
# enable grid only on the Y axis
self.canvas.ax.get_yaxis().grid(True)
# force an image redraw
self.canvas.draw()
| 36.725275
| 315
| 0.605326
|
b7fcd02753024a6e184594a08694e410eeac6f79
| 8,488
|
py
|
Python
|
generateattcks/generateattcks/generateattcks.py
|
randomaccess3/pyattck
|
3837599df542456d8b788e0e7bea67ed34e130cc
|
[
"MIT"
] | null | null | null |
generateattcks/generateattcks/generateattcks.py
|
randomaccess3/pyattck
|
3837599df542456d8b788e0e7bea67ed34e130cc
|
[
"MIT"
] | null | null | null |
generateattcks/generateattcks/generateattcks.py
|
randomaccess3/pyattck
|
3837599df542456d8b788e0e7bea67ed34e130cc
|
[
"MIT"
] | null | null | null |
import datetime, json, os
from .adversaryemulation import AdversaryEmulation
from .atomicredteam import AtomicRedTeam
from .stockpile import MitreStockpile
from .threathuntingtables import ThreatHuntingTables
from .sysmonhunter import SysmonHunter
from .blueteamlabs import BlueTeamLabs
from .atomicthreatcoverage import AtomicThreatCoverage
from .osqueryattack import OsqueryAttack
from .attckempire import AttckEmpire
from .threathuntingbook import ThreatHuntingBook
from .nsmattck import NSMAttck
from .litmustest import LitmusTest
from .c2matrix import C2Matrix
from .aptthreattracking import APTThreatTracking
from .elemental import ElementalAttack
from .malwarearchaeology import MalwareArchaeology
from .newbeeattackdata import NewBeeAttackDataset
class GenerateAttcks(object):
__conversion_file = os.path.abspath(os.path.join(os.path.dirname(__file__), 'conversion' + '.json'))
conversion_data = None
def __init__(self):
self._datasets = {}
self._datasets['last_updated'] = datetime.datetime.now().isoformat()
self._datasets['techniques'] = []
self.conversion_data = self.__get_conversion_data()
def __get_conversion_data(self):
if not self.conversion_data:
with open(self.__conversion_file, 'r') as file:
self.conversion_data = json.load(file)
return self.conversion_data
def get(self):
self.add_adversary_emulation()
self.add_atomic_red_team()
self.add_mitre_stockpile()
self.add_threat_hunting_tables()
self.add_sysmon_hunter()
self.add_blue_team_labs()
self.add_atomic_threat_coverage()
self.add_osquery_attack()
self.add_attck_empire()
self.add_threat_hunting_book()
self.add_nsm_attck()
self.add_litmust_test()
self.add_c2_matrix()
self.add_apt_threat_tracking()
self.add_elemental_attack()
self.add_malware_archaeology()
self.add_new_bee_attack_data()
technique_list = []
for technique in self._datasets['techniques']:
if self.conversion_data.get(technique['technique_id']):
for t in self.conversion_data[technique['technique_id']]:
clone_technique = technique
clone_technique['technique_id'] = t
technique_list.append(clone_technique)
else:
technique_list.append(technique)
self._datasets['techniques'] = technique_list
return self._datasets
def add_new_bee_attack_data(self):
for item in NewBeeAttackDataset().get():
self.__add_to_output(item)
def add_malware_archaeology(self):
for item in MalwareArchaeology().get():
self.__add_to_output(item)
def add_elemental_attack(self):
for item in ElementalAttack().get():
self.__add_to_output(item)
def add_apt_threat_tracking(self):
apt_threat_tracking = APTThreatTracking().get()
for item in apt_threat_tracking:
if item:
for key,val in item.items():
if key == 'malware':
self._datasets['tools'] = val
del item[key]
break
self._datasets['actors'] = apt_threat_tracking
def add_c2_matrix(self):
c2_dict = {}
c2 = C2Matrix().get()
for item in c2['c2_data']:
c2_dict[item['name']] = item['data']
self._datasets['c2_data'] = c2_dict
def save(self):
with open('generated_attck_data.json', 'w') as f:
f.write(json.dumps(self.get()))
def add_adversary_emulation(self):
for item in AdversaryEmulation().get():
self.__add_to_output(item)
def add_atomic_red_team(self):
for item in AtomicRedTeam().get():
self.__add_to_output(item)
def add_mitre_stockpile(self):
stockpile = MitreStockpile()
stockpile.run()
for item in stockpile.get_stockpile():
self.__add_to_output(item)
def add_threat_hunting_tables(self):
for item in ThreatHuntingTables().get():
self.__add_to_output(item)
def add_sysmon_hunter(self):
for item in SysmonHunter().get():
self.__add_to_output(item)
def add_blue_team_labs(self):
for item in BlueTeamLabs().get():
self.__add_to_output(item)
def add_atomic_threat_coverage(self):
for item in AtomicThreatCoverage().get():
self.__add_to_output(item)
def add_osquery_attack(self):
for item in OsqueryAttack().get():
self.__add_to_output(item)
def add_attck_empire(self):
for item in AttckEmpire().get():
self.__add_to_output(item)
def add_threat_hunting_book(self):
for item in ThreatHuntingBook().get():
self.__add_to_output(item)
def add_nsm_attck(self):
for item in NSMAttck().get():
self.__add_to_output(item)
def add_litmust_test(self):
for item in LitmusTest().get():
self.__add_to_output(item)
def __add_to_output(self, data):
status = False
for t in self._datasets['techniques']:
if 'technique_id' in data:
if data['technique_id'].startswith('T') and len(data['technique_id']) == 5:
if data['technique_id'] == t['technique_id']:
status = True
if 'commands' in data:
if 'commands' not in t:
t['commands'] = []
for item in data['commands']:
t['commands'].append(item)
if 'parsed_datasets' in data:
if 'parsed_datasets' not in t:
t['parsed_datasets'] = []
for item in data['parsed_datasets']:
t['parsed_datasets'].append(item)
if 'command_list' in data:
if 'command_list' not in t:
t['command_list'] = []
for item in data['command_list']:
t['command_list'].append(item)
if 'attack_paths' in data:
if 'attack_paths' not in t:
t['attack_paths'] = []
for item in data['attack_paths']:
t['attack_paths'].append(item)
if 'queries' in data:
if 'queries' not in t:
t['queries'] = []
for item in data['queries']:
t['queries'].append(item)
if 'possible_detections' in data:
if 'possible_detections' not in t:
t['possible_detections'] = []
for item in data['possible_detections']:
t['possible_detections'].append(item)
if 'external_reference' in data:
if 'external_reference' not in t:
t['external_reference'] = []
for item in data['external_reference']:
t['external_reference'].append(item)
if not status:
if 'technique_id' in data:
self._datasets['techniques'].append({
'technique_id': data['technique_id'],
'commands': [] if 'commands' not in data else data['commands'],
'parsed_datasets': [] if 'parsed_datasets' not in data else data['parsed_datasets'],
'command_list': [] if 'command_list' not in data else data['command_list'],
'attack_paths': [] if 'attack_paths' not in data else data['attack_paths'],
'queries': [] if 'queries' not in data else data['queries'],
'possible_detections': [] if 'possible_detections' not in data else data['possible_detections'],
'external_reference': [] if 'external_reference' not in data else data['external_reference']
})
| 40.61244
| 116
| 0.564915
|
213b25286583edfcdcb6369ea84354111b229635
| 3,806
|
py
|
Python
|
longaudio.py
|
catherine8224/Transcribe_Audio
|
2f977b8e2eaeb45363c572d121692d38ce1c2cb8
|
[
"Apache-2.0"
] | null | null | null |
longaudio.py
|
catherine8224/Transcribe_Audio
|
2f977b8e2eaeb45363c572d121692d38ce1c2cb8
|
[
"Apache-2.0"
] | 1
|
2020-03-28T01:57:26.000Z
|
2020-03-28T01:57:26.000Z
|
longaudio.py
|
catherine8224/Transcribe_Audio
|
2f977b8e2eaeb45363c572d121692d38ce1c2cb8
|
[
"Apache-2.0"
] | null | null | null |
# importing libraries
import speech_recognition as sr; import os; from pydub import AudioSegment
import io; from scipy.io.wavfile import write; import numpy as np
#from pydub.silence import split_on_silence
from pydub.utils import make_chunks; import re; from google.cloud import storage
def length(fname): #finding the length of audio file
storage_client = storage.Client()
bucket = storage_client.get_bucket('awesome-bucketness')
bb = bucket.get_blob(fname) #Retrieve blob after uploading to GCS
bblength = bb.download_as_string()
audio = AudioSegment.from_file(io.BytesIO(bblength), format=re.split('\.', fname)[1]) #also good with wav, m4a, ogg
if audio is None: #var = os.system("ffmpeg -i /Users/catherineng/Downloads/0aeaedfc-b0ee-4ad1-a6b7-85ed8e588400.ogg -loglevel quiet -stats -f null - 2>&1 | awk '{print $2}' | sed s/://g | sed s/[a-z]//g | sed s/=//g")
var = system("ffmpeg -i " + fname + " -loglevel quiet -stats -f null - 2>&1 | awk '{print $2}' | sed s/://g | sed s/[a-z]//g | sed s/=//g")
return var
else:
return audio.duration_seconds #return audio.info.length
def transcribe_audio(f, name, lang):
song = AudioSegment.from_file(io.BytesIO(f), re.split('\.', name)[1])
audio_chunk = np.array(song.get_array_of_samples()) #convert it to array.array
audio_chunk = audio_chunk.reshape(song.channels, -1, order='F').T
output = io.BytesIO(); rate = song.frame_rate
write(output, rate, audio_chunk)
r = sr.Recognizer()
with sr.AudioFile(output) as source:
audio = r.record(source)
transcription = r.recognize_google(audio, language=lang) #class 'str'
return transcription
# a function that splits the audio file into chunks and applies speech recognition
def silence_based_conversion(path, name, lang):
# open the audio file stored in the local system as a wav file.
song = AudioSegment.from_file(io.BytesIO(path), format=re.split('\.', name)[1])
# move into the directory to store the audio files. #os.chdir('audio_chunks')
chunks = make_chunks(song, 30000)
transcription = ""
# process each chunk
for i, chunk in enumerate(chunks):
# Create 0.5 (10/10000) seconds silence chunk
chunk_silent = AudioSegment.silent(duration = 10)
# add 0.5 sec silence to beginning and end of audio chunk. This is done so that it doesn't seem abruptly sliced.
audio_chunk = chunk_silent + chunk + chunk_silent
audio_chunk = audio_chunk.get_array_of_samples() #convert it to array.array
audio_chunk= np.array(audio_chunk) #convert it to numpy.ndarray
#print(audio_chunk.shape) #(5293748,)
audio_chunk = audio_chunk.reshape(song.channels, -1, order='F').T #reshape to (num of samples, num of channels)
#print(audio_chunk.shape) #(2, 2646874) --> (2646874, 2)
#specify the bitrate to be 192 kbps #audio_chunk.export("./chunk" + str(i) + ".wav", format ="wav")
output = io.BytesIO()
#audio_chunk.export(output, formsong.dtyat="wav")
rate = song.frame_rate
write(output, rate, audio_chunk)
# create a speech recognition object
r = sr.Recognizer()
# recognize the chunk
with sr.AudioFile(output) as source:
# remove this if it is not working correctly.
#r.adjust_for_ambient_noise(source, duration=5)
#audio_listened = r.listen(source)
audio = r.record(source)
try:
#try converting it to text
rec = r.recognize_google(audio, language=lang) #write the output to the file. #fh.write(rec+". ")
# catch any errors.
except sr.UnknownValueError:
return "Could not understand audio"
except sr.RequestError as e:
return "Could not request results. check your internet connection"
transcription += rec + " "
return transcription
#os.chdir('..')
if __name__ == '__main__':
print('Enter the audio file path')
path = input()
silence_based_conversion(path, 'en-US')
| 44.776471
| 218
| 0.715975
|
17722e949df1e297e4d7f5264ecea08fc2049e11
| 12,656
|
py
|
Python
|
pandas/tests/indexing/test_chaining_and_caching.py
|
garyteofanus/pandas
|
cc51219fad8add8f442b847ccdabd3f9e9077cb6
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 6
|
2020-09-10T15:03:25.000Z
|
2021-04-01T22:48:33.000Z
|
pandas/tests/indexing/test_chaining_and_caching.py
|
garyteofanus/pandas
|
cc51219fad8add8f442b847ccdabd3f9e9077cb6
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 3
|
2020-03-09T13:15:03.000Z
|
2020-03-20T10:07:10.000Z
|
pandas/tests/indexing/test_chaining_and_caching.py
|
garyteofanus/pandas
|
cc51219fad8add8f442b847ccdabd3f9e9077cb6
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 4
|
2020-02-07T05:05:32.000Z
|
2020-05-11T06:06:17.000Z
|
import numpy as np
import pytest
import pandas as pd
from pandas import DataFrame, Series, Timestamp, date_range, option_context
import pandas._testing as tm
import pandas.core.common as com
class TestCaching:
def test_slice_consolidate_invalidate_item_cache(self):
# this is chained assignment, but will 'work'
with option_context("chained_assignment", None):
# #3970
df = DataFrame({"aa": np.arange(5), "bb": [2.2] * 5})
# Creates a second float block
df["cc"] = 0.0
# caches a reference to the 'bb' series
df["bb"]
# repr machinery triggers consolidation
repr(df)
# Assignment to wrong series
df["bb"].iloc[0] = 0.17
df._clear_item_cache()
tm.assert_almost_equal(df["bb"][0], 0.17)
def test_setitem_cache_updating(self):
# GH 5424
cont = ["one", "two", "three", "four", "five", "six", "seven"]
for do_ref in [False, False]:
df = DataFrame({"a": cont, "b": cont[3:] + cont[:3], "c": np.arange(7)})
# ref the cache
if do_ref:
df.loc[0, "c"]
# set it
df.loc[7, "c"] = 1
assert df.loc[0, "c"] == 0.0
assert df.loc[7, "c"] == 1.0
# GH 7084
# not updating cache on series setting with slices
expected = DataFrame(
{"A": [600, 600, 600]}, index=date_range("5/7/2014", "5/9/2014")
)
out = DataFrame({"A": [0, 0, 0]}, index=date_range("5/7/2014", "5/9/2014"))
df = DataFrame({"C": ["A", "A", "A"], "D": [100, 200, 300]})
# loop through df to update out
six = Timestamp("5/7/2014")
eix = Timestamp("5/9/2014")
for ix, row in df.iterrows():
out.loc[six:eix, row["C"]] = out.loc[six:eix, row["C"]] + row["D"]
tm.assert_frame_equal(out, expected)
tm.assert_series_equal(out["A"], expected["A"])
# try via a chain indexing
# this actually works
out = DataFrame({"A": [0, 0, 0]}, index=date_range("5/7/2014", "5/9/2014"))
for ix, row in df.iterrows():
v = out[row["C"]][six:eix] + row["D"]
out[row["C"]][six:eix] = v
tm.assert_frame_equal(out, expected)
tm.assert_series_equal(out["A"], expected["A"])
out = DataFrame({"A": [0, 0, 0]}, index=date_range("5/7/2014", "5/9/2014"))
for ix, row in df.iterrows():
out.loc[six:eix, row["C"]] += row["D"]
tm.assert_frame_equal(out, expected)
tm.assert_series_equal(out["A"], expected["A"])
class TestChaining:
def test_setitem_chained_setfault(self):
# GH6026
data = ["right", "left", "left", "left", "right", "left", "timeout"]
mdata = ["right", "left", "left", "left", "right", "left", "none"]
df = DataFrame({"response": np.array(data)})
mask = df.response == "timeout"
df.response[mask] = "none"
tm.assert_frame_equal(df, DataFrame({"response": mdata}))
recarray = np.rec.fromarrays([data], names=["response"])
df = DataFrame(recarray)
mask = df.response == "timeout"
df.response[mask] = "none"
tm.assert_frame_equal(df, DataFrame({"response": mdata}))
df = DataFrame({"response": data, "response1": data})
mask = df.response == "timeout"
df.response[mask] = "none"
tm.assert_frame_equal(df, DataFrame({"response": mdata, "response1": data}))
# GH 6056
expected = DataFrame(dict(A=[np.nan, "bar", "bah", "foo", "bar"]))
df = DataFrame(dict(A=np.array(["foo", "bar", "bah", "foo", "bar"])))
df["A"].iloc[0] = np.nan
result = df.head()
tm.assert_frame_equal(result, expected)
df = DataFrame(dict(A=np.array(["foo", "bar", "bah", "foo", "bar"])))
df.A.iloc[0] = np.nan
result = df.head()
tm.assert_frame_equal(result, expected)
def test_detect_chained_assignment(self):
pd.set_option("chained_assignment", "raise")
# work with the chain
expected = DataFrame([[-5, 1], [-6, 3]], columns=list("AB"))
df = DataFrame(np.arange(4).reshape(2, 2), columns=list("AB"), dtype="int64")
assert df._is_copy is None
df["A"][0] = -5
df["A"][1] = -6
tm.assert_frame_equal(df, expected)
# test with the chaining
df = DataFrame(
{
"A": Series(range(2), dtype="int64"),
"B": np.array(np.arange(2, 4), dtype=np.float64),
}
)
assert df._is_copy is None
with pytest.raises(com.SettingWithCopyError):
df["A"][0] = -5
with pytest.raises(com.SettingWithCopyError):
df["A"][1] = np.nan
assert df["A"]._is_copy is None
# Using a copy (the chain), fails
df = DataFrame(
{
"A": Series(range(2), dtype="int64"),
"B": np.array(np.arange(2, 4), dtype=np.float64),
}
)
with pytest.raises(com.SettingWithCopyError):
df.loc[0]["A"] = -5
# Doc example
df = DataFrame(
{
"a": ["one", "one", "two", "three", "two", "one", "six"],
"c": Series(range(7), dtype="int64"),
}
)
assert df._is_copy is None
with pytest.raises(com.SettingWithCopyError):
indexer = df.a.str.startswith("o")
df[indexer]["c"] = 42
expected = DataFrame({"A": [111, "bbb", "ccc"], "B": [1, 2, 3]})
df = DataFrame({"A": ["aaa", "bbb", "ccc"], "B": [1, 2, 3]})
with pytest.raises(com.SettingWithCopyError):
df["A"][0] = 111
with pytest.raises(com.SettingWithCopyError):
df.loc[0]["A"] = 111
df.loc[0, "A"] = 111
tm.assert_frame_equal(df, expected)
# gh-5475: Make sure that is_copy is picked up reconstruction
df = DataFrame({"A": [1, 2]})
assert df._is_copy is None
with tm.ensure_clean("__tmp__pickle") as path:
df.to_pickle(path)
df2 = pd.read_pickle(path)
df2["B"] = df2["A"]
df2["B"] = df2["A"]
# gh-5597: a spurious raise as we are setting the entire column here
from string import ascii_letters as letters
def random_text(nobs=100):
df = []
for i in range(nobs):
idx = np.random.randint(len(letters), size=2)
idx.sort()
df.append([letters[idx[0] : idx[1]]])
return DataFrame(df, columns=["letters"])
df = random_text(100000)
# Always a copy
x = df.iloc[[0, 1, 2]]
assert x._is_copy is not None
x = df.iloc[[0, 1, 2, 4]]
assert x._is_copy is not None
# Explicitly copy
indexer = df.letters.apply(lambda x: len(x) > 10)
df = df.loc[indexer].copy()
assert df._is_copy is None
df["letters"] = df["letters"].apply(str.lower)
# Implicitly take
df = random_text(100000)
indexer = df.letters.apply(lambda x: len(x) > 10)
df = df.loc[indexer]
assert df._is_copy is not None
df["letters"] = df["letters"].apply(str.lower)
# Implicitly take 2
df = random_text(100000)
indexer = df.letters.apply(lambda x: len(x) > 10)
df = df.loc[indexer]
assert df._is_copy is not None
df.loc[:, "letters"] = df["letters"].apply(str.lower)
# Should be ok even though it's a copy!
assert df._is_copy is None
df["letters"] = df["letters"].apply(str.lower)
assert df._is_copy is None
df = random_text(100000)
indexer = df.letters.apply(lambda x: len(x) > 10)
df.loc[indexer, "letters"] = df.loc[indexer, "letters"].apply(str.lower)
# an identical take, so no copy
df = DataFrame({"a": [1]}).dropna()
assert df._is_copy is None
df["a"] += 1
df = DataFrame(np.random.randn(10, 4))
s = df.iloc[:, 0].sort_values()
tm.assert_series_equal(s, df.iloc[:, 0].sort_values())
tm.assert_series_equal(s, df[0].sort_values())
# see gh-6025: false positives
df = DataFrame({"column1": ["a", "a", "a"], "column2": [4, 8, 9]})
str(df)
df["column1"] = df["column1"] + "b"
str(df)
df = df[df["column2"] != 8]
str(df)
df["column1"] = df["column1"] + "c"
str(df)
# from SO:
# https://stackoverflow.com/questions/24054495/potential-bug-setting-value-for-undefined-column-using-iloc
df = DataFrame(np.arange(0, 9), columns=["count"])
df["group"] = "b"
with pytest.raises(com.SettingWithCopyError):
df.iloc[0:5]["group"] = "a"
# Mixed type setting but same dtype & changing dtype
df = DataFrame(
dict(
A=date_range("20130101", periods=5),
B=np.random.randn(5),
C=np.arange(5, dtype="int64"),
D=list("abcde"),
)
)
with pytest.raises(com.SettingWithCopyError):
df.loc[2]["D"] = "foo"
with pytest.raises(com.SettingWithCopyError):
df.loc[2]["C"] = "foo"
with pytest.raises(com.SettingWithCopyError):
df["C"][2] = "foo"
def test_setting_with_copy_bug(self):
# operating on a copy
df = DataFrame(
{"a": list(range(4)), "b": list("ab.."), "c": ["a", "b", np.nan, "d"]}
)
mask = pd.isna(df.c)
msg = "A value is trying to be set on a copy of a slice from a DataFrame"
with pytest.raises(com.SettingWithCopyError, match=msg):
df[["c"]][mask] = df[["b"]][mask]
# invalid warning as we are returning a new object
# GH 8730
df1 = DataFrame({"x": Series(["a", "b", "c"]), "y": Series(["d", "e", "f"])})
df2 = df1[["x"]]
# this should not raise
df2["y"] = ["g", "h", "i"]
def test_detect_chained_assignment_warnings(self):
with option_context("chained_assignment", "warn"):
df = DataFrame({"A": ["aaa", "bbb", "ccc"], "B": [1, 2, 3]})
with tm.assert_produces_warning(com.SettingWithCopyWarning):
df.loc[0]["A"] = 111
def test_detect_chained_assignment_warnings_filter_and_dupe_cols(self):
# xref gh-13017.
with option_context("chained_assignment", "warn"):
df = pd.DataFrame(
[[1, 2, 3], [4, 5, 6], [7, 8, -9]], columns=["a", "a", "c"]
)
with tm.assert_produces_warning(com.SettingWithCopyWarning):
df.c.loc[df.c > 0] = None
expected = pd.DataFrame(
[[1, 2, 3], [4, 5, 6], [7, 8, -9]], columns=["a", "a", "c"]
)
tm.assert_frame_equal(df, expected)
def test_chained_getitem_with_lists(self):
# GH6394
# Regression in chained getitem indexing with embedded list-like from
# 0.12
df = DataFrame({"A": 5 * [np.zeros(3)], "B": 5 * [np.ones(3)]})
expected = df["A"].iloc[2]
result = df.loc[2, "A"]
tm.assert_numpy_array_equal(result, expected)
result2 = df.iloc[2]["A"]
tm.assert_numpy_array_equal(result2, expected)
result3 = df["A"].loc[2]
tm.assert_numpy_array_equal(result3, expected)
result4 = df["A"].iloc[2]
tm.assert_numpy_array_equal(result4, expected)
def test_cache_updating(self):
# GH 4939, make sure to update the cache on setitem
df = tm.makeDataFrame()
df["A"] # cache series
df.loc["Hello Friend"] = df.iloc[0]
assert "Hello Friend" in df["A"].index
assert "Hello Friend" in df["B"].index
# 10264
df = DataFrame(
np.zeros((5, 5), dtype="int64"),
columns=["a", "b", "c", "d", "e"],
index=range(5),
)
df["f"] = 0
df.f.values[3] = 1
# TODO(wesm): unused?
# y = df.iloc[np.arange(2, len(df))]
df.f.values[3] = 2
expected = DataFrame(
np.zeros((5, 6), dtype="int64"),
columns=["a", "b", "c", "d", "e", "f"],
index=range(5),
)
expected.at[3, "f"] = 2
tm.assert_frame_equal(df, expected)
expected = Series([0, 0, 0, 2, 0], name="f")
tm.assert_series_equal(df.f, expected)
| 32.285714
| 114
| 0.521413
|
5cc03aa13b15c899d731e7f2e7bd81a78284709e
| 155
|
py
|
Python
|
scrape.py
|
hamesjan/SurfForecastBot
|
83429cdbe0d569b75478f5318a7db84d993bdcb4
|
[
"MIT"
] | null | null | null |
scrape.py
|
hamesjan/SurfForecastBot
|
83429cdbe0d569b75478f5318a7db84d993bdcb4
|
[
"MIT"
] | null | null | null |
scrape.py
|
hamesjan/SurfForecastBot
|
83429cdbe0d569b75478f5318a7db84d993bdcb4
|
[
"MIT"
] | null | null | null |
class Scrapper:
# Class attribute
species = "Canis familiaris"
def __init__(self, name, age):
self.name = name
self.age = age
| 19.375
| 34
| 0.6
|
93e6b439ced8c788e5889e43f05a5693c6bbcd6d
| 255,535
|
py
|
Python
|
google_appengine/google/appengine/api/taskqueue/taskqueue_service_pb.py
|
iTrollYou/WEB_Spotify_Youtube
|
5315cdf78361942bba0b52daa8b65d74998d2db5
|
[
"MIT"
] | null | null | null |
google_appengine/google/appengine/api/taskqueue/taskqueue_service_pb.py
|
iTrollYou/WEB_Spotify_Youtube
|
5315cdf78361942bba0b52daa8b65d74998d2db5
|
[
"MIT"
] | null | null | null |
google_appengine/google/appengine/api/taskqueue/taskqueue_service_pb.py
|
iTrollYou/WEB_Spotify_Youtube
|
5315cdf78361942bba0b52daa8b65d74998d2db5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import abc
import array
try:
from thread import allocate_lock as _Lock
except ImportError:
from threading import Lock as _Lock
if hasattr(__builtins__, 'xrange'): range = xrange
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
from google.appengine.datastore.datastore_v3_pb import *
import google.appengine.datastore.datastore_v3_pb
google_dot_apphosting_dot_datastore_dot_datastore__v3__pb = __import__('google.appengine.datastore.datastore_v3_pb', {}, {}, [''])
from google.net.proto.message_set import MessageSet
class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage):
OK = 0
UNKNOWN_QUEUE = 1
TRANSIENT_ERROR = 2
INTERNAL_ERROR = 3
TASK_TOO_LARGE = 4
INVALID_TASK_NAME = 5
INVALID_QUEUE_NAME = 6
INVALID_URL = 7
INVALID_QUEUE_RATE = 8
PERMISSION_DENIED = 9
TASK_ALREADY_EXISTS = 10
TOMBSTONED_TASK = 11
INVALID_ETA = 12
INVALID_REQUEST = 13
UNKNOWN_TASK = 14
TOMBSTONED_QUEUE = 15
DUPLICATE_TASK_NAME = 16
SKIPPED = 17
TOO_MANY_TASKS = 18
INVALID_PAYLOAD = 19
INVALID_RETRY_PARAMETERS = 20
INVALID_QUEUE_MODE = 21
ACL_LOOKUP_ERROR = 22
TRANSACTIONAL_REQUEST_TOO_LARGE = 23
INCORRECT_CREATOR_NAME = 24
TASK_LEASE_EXPIRED = 25
QUEUE_PAUSED = 26
INVALID_TAG = 27
INVALID_LOGGING_CONFIG = 28
DATASTORE_ERROR = 10000
_ErrorCode_NAMES = {
0: "OK",
1: "UNKNOWN_QUEUE",
2: "TRANSIENT_ERROR",
3: "INTERNAL_ERROR",
4: "TASK_TOO_LARGE",
5: "INVALID_TASK_NAME",
6: "INVALID_QUEUE_NAME",
7: "INVALID_URL",
8: "INVALID_QUEUE_RATE",
9: "PERMISSION_DENIED",
10: "TASK_ALREADY_EXISTS",
11: "TOMBSTONED_TASK",
12: "INVALID_ETA",
13: "INVALID_REQUEST",
14: "UNKNOWN_TASK",
15: "TOMBSTONED_QUEUE",
16: "DUPLICATE_TASK_NAME",
17: "SKIPPED",
18: "TOO_MANY_TASKS",
19: "INVALID_PAYLOAD",
20: "INVALID_RETRY_PARAMETERS",
21: "INVALID_QUEUE_MODE",
22: "ACL_LOOKUP_ERROR",
23: "TRANSACTIONAL_REQUEST_TOO_LARGE",
24: "INCORRECT_CREATOR_NAME",
25: "TASK_LEASE_EXPIRED",
26: "QUEUE_PAUSED",
27: "INVALID_TAG",
28: "INVALID_LOGGING_CONFIG",
10000: "DATASTORE_ERROR",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueServiceError'
class TaskQueueRetryParameters(ProtocolBuffer.ProtocolMessage):
has_retry_limit_ = 0
retry_limit_ = 0
has_age_limit_sec_ = 0
age_limit_sec_ = 0
has_min_backoff_sec_ = 0
min_backoff_sec_ = 0.1
has_max_backoff_sec_ = 0
max_backoff_sec_ = 3600.0
has_max_doublings_ = 0
max_doublings_ = 16
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def retry_limit(self): return self.retry_limit_
def set_retry_limit(self, x):
self.has_retry_limit_ = 1
self.retry_limit_ = x
def clear_retry_limit(self):
if self.has_retry_limit_:
self.has_retry_limit_ = 0
self.retry_limit_ = 0
def has_retry_limit(self): return self.has_retry_limit_
def age_limit_sec(self): return self.age_limit_sec_
def set_age_limit_sec(self, x):
self.has_age_limit_sec_ = 1
self.age_limit_sec_ = x
def clear_age_limit_sec(self):
if self.has_age_limit_sec_:
self.has_age_limit_sec_ = 0
self.age_limit_sec_ = 0
def has_age_limit_sec(self): return self.has_age_limit_sec_
def min_backoff_sec(self): return self.min_backoff_sec_
def set_min_backoff_sec(self, x):
self.has_min_backoff_sec_ = 1
self.min_backoff_sec_ = x
def clear_min_backoff_sec(self):
if self.has_min_backoff_sec_:
self.has_min_backoff_sec_ = 0
self.min_backoff_sec_ = 0.1
def has_min_backoff_sec(self): return self.has_min_backoff_sec_
def max_backoff_sec(self): return self.max_backoff_sec_
def set_max_backoff_sec(self, x):
self.has_max_backoff_sec_ = 1
self.max_backoff_sec_ = x
def clear_max_backoff_sec(self):
if self.has_max_backoff_sec_:
self.has_max_backoff_sec_ = 0
self.max_backoff_sec_ = 3600.0
def has_max_backoff_sec(self): return self.has_max_backoff_sec_
def max_doublings(self): return self.max_doublings_
def set_max_doublings(self, x):
self.has_max_doublings_ = 1
self.max_doublings_ = x
def clear_max_doublings(self):
if self.has_max_doublings_:
self.has_max_doublings_ = 0
self.max_doublings_ = 16
def has_max_doublings(self): return self.has_max_doublings_
def MergeFrom(self, x):
assert x is not self
if (x.has_retry_limit()): self.set_retry_limit(x.retry_limit())
if (x.has_age_limit_sec()): self.set_age_limit_sec(x.age_limit_sec())
if (x.has_min_backoff_sec()): self.set_min_backoff_sec(x.min_backoff_sec())
if (x.has_max_backoff_sec()): self.set_max_backoff_sec(x.max_backoff_sec())
if (x.has_max_doublings()): self.set_max_doublings(x.max_doublings())
def Equals(self, x):
if x is self: return 1
if self.has_retry_limit_ != x.has_retry_limit_: return 0
if self.has_retry_limit_ and self.retry_limit_ != x.retry_limit_: return 0
if self.has_age_limit_sec_ != x.has_age_limit_sec_: return 0
if self.has_age_limit_sec_ and self.age_limit_sec_ != x.age_limit_sec_: return 0
if self.has_min_backoff_sec_ != x.has_min_backoff_sec_: return 0
if self.has_min_backoff_sec_ and self.min_backoff_sec_ != x.min_backoff_sec_: return 0
if self.has_max_backoff_sec_ != x.has_max_backoff_sec_: return 0
if self.has_max_backoff_sec_ and self.max_backoff_sec_ != x.max_backoff_sec_: return 0
if self.has_max_doublings_ != x.has_max_doublings_: return 0
if self.has_max_doublings_ and self.max_doublings_ != x.max_doublings_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_retry_limit_): n += 1 + self.lengthVarInt64(self.retry_limit_)
if (self.has_age_limit_sec_): n += 1 + self.lengthVarInt64(self.age_limit_sec_)
if (self.has_min_backoff_sec_): n += 9
if (self.has_max_backoff_sec_): n += 9
if (self.has_max_doublings_): n += 1 + self.lengthVarInt64(self.max_doublings_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_retry_limit_): n += 1 + self.lengthVarInt64(self.retry_limit_)
if (self.has_age_limit_sec_): n += 1 + self.lengthVarInt64(self.age_limit_sec_)
if (self.has_min_backoff_sec_): n += 9
if (self.has_max_backoff_sec_): n += 9
if (self.has_max_doublings_): n += 1 + self.lengthVarInt64(self.max_doublings_)
return n
def Clear(self):
self.clear_retry_limit()
self.clear_age_limit_sec()
self.clear_min_backoff_sec()
self.clear_max_backoff_sec()
self.clear_max_doublings()
def OutputUnchecked(self, out):
if (self.has_retry_limit_):
out.putVarInt32(8)
out.putVarInt32(self.retry_limit_)
if (self.has_age_limit_sec_):
out.putVarInt32(16)
out.putVarInt64(self.age_limit_sec_)
if (self.has_min_backoff_sec_):
out.putVarInt32(25)
out.putDouble(self.min_backoff_sec_)
if (self.has_max_backoff_sec_):
out.putVarInt32(33)
out.putDouble(self.max_backoff_sec_)
if (self.has_max_doublings_):
out.putVarInt32(40)
out.putVarInt32(self.max_doublings_)
def OutputPartial(self, out):
if (self.has_retry_limit_):
out.putVarInt32(8)
out.putVarInt32(self.retry_limit_)
if (self.has_age_limit_sec_):
out.putVarInt32(16)
out.putVarInt64(self.age_limit_sec_)
if (self.has_min_backoff_sec_):
out.putVarInt32(25)
out.putDouble(self.min_backoff_sec_)
if (self.has_max_backoff_sec_):
out.putVarInt32(33)
out.putDouble(self.max_backoff_sec_)
if (self.has_max_doublings_):
out.putVarInt32(40)
out.putVarInt32(self.max_doublings_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_retry_limit(d.getVarInt32())
continue
if tt == 16:
self.set_age_limit_sec(d.getVarInt64())
continue
if tt == 25:
self.set_min_backoff_sec(d.getDouble())
continue
if tt == 33:
self.set_max_backoff_sec(d.getDouble())
continue
if tt == 40:
self.set_max_doublings(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_retry_limit_: res+=prefix+("retry_limit: %s\n" % self.DebugFormatInt32(self.retry_limit_))
if self.has_age_limit_sec_: res+=prefix+("age_limit_sec: %s\n" % self.DebugFormatInt64(self.age_limit_sec_))
if self.has_min_backoff_sec_: res+=prefix+("min_backoff_sec: %s\n" % self.DebugFormat(self.min_backoff_sec_))
if self.has_max_backoff_sec_: res+=prefix+("max_backoff_sec: %s\n" % self.DebugFormat(self.max_backoff_sec_))
if self.has_max_doublings_: res+=prefix+("max_doublings: %s\n" % self.DebugFormatInt32(self.max_doublings_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kretry_limit = 1
kage_limit_sec = 2
kmin_backoff_sec = 3
kmax_backoff_sec = 4
kmax_doublings = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "retry_limit",
2: "age_limit_sec",
3: "min_backoff_sec",
4: "max_backoff_sec",
5: "max_doublings",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.DOUBLE,
5: ProtocolBuffer.Encoder.NUMERIC,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueRetryParameters'
class TaskQueueAcl(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.user_email_ = []
self.writer_email_ = []
if contents is not None: self.MergeFromString(contents)
def user_email_size(self): return len(self.user_email_)
def user_email_list(self): return self.user_email_
def user_email(self, i):
return self.user_email_[i]
def set_user_email(self, i, x):
self.user_email_[i] = x
def add_user_email(self, x):
self.user_email_.append(x)
def clear_user_email(self):
self.user_email_ = []
def writer_email_size(self): return len(self.writer_email_)
def writer_email_list(self): return self.writer_email_
def writer_email(self, i):
return self.writer_email_[i]
def set_writer_email(self, i, x):
self.writer_email_[i] = x
def add_writer_email(self, x):
self.writer_email_.append(x)
def clear_writer_email(self):
self.writer_email_ = []
def MergeFrom(self, x):
assert x is not self
for i in range(x.user_email_size()): self.add_user_email(x.user_email(i))
for i in range(x.writer_email_size()): self.add_writer_email(x.writer_email(i))
def Equals(self, x):
if x is self: return 1
if len(self.user_email_) != len(x.user_email_): return 0
for e1, e2 in zip(self.user_email_, x.user_email_):
if e1 != e2: return 0
if len(self.writer_email_) != len(x.writer_email_): return 0
for e1, e2 in zip(self.writer_email_, x.writer_email_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.user_email_)
for i in range(len(self.user_email_)): n += self.lengthString(len(self.user_email_[i]))
n += 1 * len(self.writer_email_)
for i in range(len(self.writer_email_)): n += self.lengthString(len(self.writer_email_[i]))
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.user_email_)
for i in range(len(self.user_email_)): n += self.lengthString(len(self.user_email_[i]))
n += 1 * len(self.writer_email_)
for i in range(len(self.writer_email_)): n += self.lengthString(len(self.writer_email_[i]))
return n
def Clear(self):
self.clear_user_email()
self.clear_writer_email()
def OutputUnchecked(self, out):
for i in range(len(self.user_email_)):
out.putVarInt32(10)
out.putPrefixedString(self.user_email_[i])
for i in range(len(self.writer_email_)):
out.putVarInt32(18)
out.putPrefixedString(self.writer_email_[i])
def OutputPartial(self, out):
for i in range(len(self.user_email_)):
out.putVarInt32(10)
out.putPrefixedString(self.user_email_[i])
for i in range(len(self.writer_email_)):
out.putVarInt32(18)
out.putPrefixedString(self.writer_email_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.add_user_email(d.getPrefixedString())
continue
if tt == 18:
self.add_writer_email(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.user_email_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("user_email%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.writer_email_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("writer_email%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kuser_email = 1
kwriter_email = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "user_email",
2: "writer_email",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueAcl'
class TaskQueueHttpHeader(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
if (self.has_value_):
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_key(d.getPrefixedString())
continue
if tt == 18:
self.set_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kkey = 1
kvalue = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "value",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueHttpHeader'
class TaskQueueMode(ProtocolBuffer.ProtocolMessage):
PUSH = 0
PULL = 1
_Mode_NAMES = {
0: "PUSH",
1: "PULL",
}
def Mode_Name(cls, x): return cls._Mode_NAMES.get(x, "")
Mode_Name = classmethod(Mode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueMode'
class TaskQueueAddRequest_Header(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(58)
out.putPrefixedString(self.key_)
out.putVarInt32(66)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(58)
out.putPrefixedString(self.key_)
if (self.has_value_):
out.putVarInt32(66)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 52: break
if tt == 58:
self.set_key(d.getPrefixedString())
continue
if tt == 66:
self.set_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
class TaskQueueAddRequest_CronTimetable(ProtocolBuffer.ProtocolMessage):
has_schedule_ = 0
schedule_ = ""
has_timezone_ = 0
timezone_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def schedule(self): return self.schedule_
def set_schedule(self, x):
self.has_schedule_ = 1
self.schedule_ = x
def clear_schedule(self):
if self.has_schedule_:
self.has_schedule_ = 0
self.schedule_ = ""
def has_schedule(self): return self.has_schedule_
def timezone(self): return self.timezone_
def set_timezone(self, x):
self.has_timezone_ = 1
self.timezone_ = x
def clear_timezone(self):
if self.has_timezone_:
self.has_timezone_ = 0
self.timezone_ = ""
def has_timezone(self): return self.has_timezone_
def MergeFrom(self, x):
assert x is not self
if (x.has_schedule()): self.set_schedule(x.schedule())
if (x.has_timezone()): self.set_timezone(x.timezone())
def Equals(self, x):
if x is self: return 1
if self.has_schedule_ != x.has_schedule_: return 0
if self.has_schedule_ and self.schedule_ != x.schedule_: return 0
if self.has_timezone_ != x.has_timezone_: return 0
if self.has_timezone_ and self.timezone_ != x.timezone_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_schedule_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: schedule not set.')
if (not self.has_timezone_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: timezone not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.schedule_))
n += self.lengthString(len(self.timezone_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_schedule_):
n += 1
n += self.lengthString(len(self.schedule_))
if (self.has_timezone_):
n += 1
n += self.lengthString(len(self.timezone_))
return n
def Clear(self):
self.clear_schedule()
self.clear_timezone()
def OutputUnchecked(self, out):
out.putVarInt32(106)
out.putPrefixedString(self.schedule_)
out.putVarInt32(114)
out.putPrefixedString(self.timezone_)
def OutputPartial(self, out):
if (self.has_schedule_):
out.putVarInt32(106)
out.putPrefixedString(self.schedule_)
if (self.has_timezone_):
out.putVarInt32(114)
out.putPrefixedString(self.timezone_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 100: break
if tt == 106:
self.set_schedule(d.getPrefixedString())
continue
if tt == 114:
self.set_timezone(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_schedule_: res+=prefix+("schedule: %s\n" % self.DebugFormatString(self.schedule_))
if self.has_timezone_: res+=prefix+("timezone: %s\n" % self.DebugFormatString(self.timezone_))
return res
class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
GET = 1
POST = 2
HEAD = 3
PUT = 4
DELETE = 5
_RequestMethod_NAMES = {
1: "GET",
2: "POST",
3: "HEAD",
4: "PUT",
5: "DELETE",
}
def RequestMethod_Name(cls, x): return cls._RequestMethod_NAMES.get(x, "")
RequestMethod_Name = classmethod(RequestMethod_Name)
has_queue_name_ = 0
queue_name_ = ""
has_task_name_ = 0
task_name_ = ""
has_eta_usec_ = 0
eta_usec_ = 0
has_method_ = 0
method_ = 2
has_url_ = 0
url_ = ""
has_body_ = 0
body_ = ""
has_transaction_ = 0
transaction_ = None
has_datastore_transaction_ = 0
datastore_transaction_ = ""
has_app_id_ = 0
app_id_ = ""
has_crontimetable_ = 0
crontimetable_ = None
has_description_ = 0
description_ = ""
has_payload_ = 0
payload_ = None
has_retry_parameters_ = 0
retry_parameters_ = None
has_mode_ = 0
mode_ = 0
has_tag_ = 0
tag_ = ""
has_cron_retry_parameters_ = 0
cron_retry_parameters_ = None
def __init__(self, contents=None):
self.header_ = []
self.lazy_init_lock_ = _Lock()
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def eta_usec(self): return self.eta_usec_
def set_eta_usec(self, x):
self.has_eta_usec_ = 1
self.eta_usec_ = x
def clear_eta_usec(self):
if self.has_eta_usec_:
self.has_eta_usec_ = 0
self.eta_usec_ = 0
def has_eta_usec(self): return self.has_eta_usec_
def method(self): return self.method_
def set_method(self, x):
self.has_method_ = 1
self.method_ = x
def clear_method(self):
if self.has_method_:
self.has_method_ = 0
self.method_ = 2
def has_method(self): return self.has_method_
def url(self): return self.url_
def set_url(self, x):
self.has_url_ = 1
self.url_ = x
def clear_url(self):
if self.has_url_:
self.has_url_ = 0
self.url_ = ""
def has_url(self): return self.has_url_
def header_size(self): return len(self.header_)
def header_list(self): return self.header_
def header(self, i):
return self.header_[i]
def mutable_header(self, i):
return self.header_[i]
def add_header(self):
x = TaskQueueAddRequest_Header()
self.header_.append(x)
return x
def clear_header(self):
self.header_ = []
def body(self): return self.body_
def set_body(self, x):
self.has_body_ = 1
self.body_ = x
def clear_body(self):
if self.has_body_:
self.has_body_ = 0
self.body_ = ""
def has_body(self): return self.has_body_
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def datastore_transaction(self): return self.datastore_transaction_
def set_datastore_transaction(self, x):
self.has_datastore_transaction_ = 1
self.datastore_transaction_ = x
def clear_datastore_transaction(self):
if self.has_datastore_transaction_:
self.has_datastore_transaction_ = 0
self.datastore_transaction_ = ""
def has_datastore_transaction(self): return self.has_datastore_transaction_
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def crontimetable(self):
if self.crontimetable_ is None:
self.lazy_init_lock_.acquire()
try:
if self.crontimetable_ is None: self.crontimetable_ = TaskQueueAddRequest_CronTimetable()
finally:
self.lazy_init_lock_.release()
return self.crontimetable_
def mutable_crontimetable(self): self.has_crontimetable_ = 1; return self.crontimetable()
def clear_crontimetable(self):
if self.has_crontimetable_:
self.has_crontimetable_ = 0;
if self.crontimetable_ is not None: self.crontimetable_.Clear()
def has_crontimetable(self): return self.has_crontimetable_
def description(self): return self.description_
def set_description(self, x):
self.has_description_ = 1
self.description_ = x
def clear_description(self):
if self.has_description_:
self.has_description_ = 0
self.description_ = ""
def has_description(self): return self.has_description_
def payload(self):
if self.payload_ is None:
self.lazy_init_lock_.acquire()
try:
if self.payload_ is None: self.payload_ = MessageSet()
finally:
self.lazy_init_lock_.release()
return self.payload_
def mutable_payload(self): self.has_payload_ = 1; return self.payload()
def clear_payload(self):
if self.has_payload_:
self.has_payload_ = 0;
if self.payload_ is not None: self.payload_.Clear()
def has_payload(self): return self.has_payload_
def retry_parameters(self):
if self.retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.retry_parameters_ is None: self.retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.retry_parameters_
def mutable_retry_parameters(self): self.has_retry_parameters_ = 1; return self.retry_parameters()
def clear_retry_parameters(self):
if self.has_retry_parameters_:
self.has_retry_parameters_ = 0;
if self.retry_parameters_ is not None: self.retry_parameters_.Clear()
def has_retry_parameters(self): return self.has_retry_parameters_
def mode(self): return self.mode_
def set_mode(self, x):
self.has_mode_ = 1
self.mode_ = x
def clear_mode(self):
if self.has_mode_:
self.has_mode_ = 0
self.mode_ = 0
def has_mode(self): return self.has_mode_
def tag(self): return self.tag_
def set_tag(self, x):
self.has_tag_ = 1
self.tag_ = x
def clear_tag(self):
if self.has_tag_:
self.has_tag_ = 0
self.tag_ = ""
def has_tag(self): return self.has_tag_
def cron_retry_parameters(self):
if self.cron_retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cron_retry_parameters_ is None: self.cron_retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.cron_retry_parameters_
def mutable_cron_retry_parameters(self): self.has_cron_retry_parameters_ = 1; return self.cron_retry_parameters()
def clear_cron_retry_parameters(self):
if self.has_cron_retry_parameters_:
self.has_cron_retry_parameters_ = 0;
if self.cron_retry_parameters_ is not None: self.cron_retry_parameters_.Clear()
def has_cron_retry_parameters(self): return self.has_cron_retry_parameters_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_task_name()): self.set_task_name(x.task_name())
if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
if (x.has_method()): self.set_method(x.method())
if (x.has_url()): self.set_url(x.url())
for i in range(x.header_size()): self.add_header().CopyFrom(x.header(i))
if (x.has_body()): self.set_body(x.body())
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
if (x.has_datastore_transaction()): self.set_datastore_transaction(x.datastore_transaction())
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_crontimetable()): self.mutable_crontimetable().MergeFrom(x.crontimetable())
if (x.has_description()): self.set_description(x.description())
if (x.has_payload()): self.mutable_payload().MergeFrom(x.payload())
if (x.has_retry_parameters()): self.mutable_retry_parameters().MergeFrom(x.retry_parameters())
if (x.has_mode()): self.set_mode(x.mode())
if (x.has_tag()): self.set_tag(x.tag())
if (x.has_cron_retry_parameters()): self.mutable_cron_retry_parameters().MergeFrom(x.cron_retry_parameters())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
if self.has_eta_usec_ != x.has_eta_usec_: return 0
if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
if self.has_method_ != x.has_method_: return 0
if self.has_method_ and self.method_ != x.method_: return 0
if self.has_url_ != x.has_url_: return 0
if self.has_url_ and self.url_ != x.url_: return 0
if len(self.header_) != len(x.header_): return 0
for e1, e2 in zip(self.header_, x.header_):
if e1 != e2: return 0
if self.has_body_ != x.has_body_: return 0
if self.has_body_ and self.body_ != x.body_: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if self.has_datastore_transaction_ != x.has_datastore_transaction_: return 0
if self.has_datastore_transaction_ and self.datastore_transaction_ != x.datastore_transaction_: return 0
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_crontimetable_ != x.has_crontimetable_: return 0
if self.has_crontimetable_ and self.crontimetable_ != x.crontimetable_: return 0
if self.has_description_ != x.has_description_: return 0
if self.has_description_ and self.description_ != x.description_: return 0
if self.has_payload_ != x.has_payload_: return 0
if self.has_payload_ and self.payload_ != x.payload_: return 0
if self.has_retry_parameters_ != x.has_retry_parameters_: return 0
if self.has_retry_parameters_ and self.retry_parameters_ != x.retry_parameters_: return 0
if self.has_mode_ != x.has_mode_: return 0
if self.has_mode_ and self.mode_ != x.mode_: return 0
if self.has_tag_ != x.has_tag_: return 0
if self.has_tag_ and self.tag_ != x.tag_: return 0
if self.has_cron_retry_parameters_ != x.has_cron_retry_parameters_: return 0
if self.has_cron_retry_parameters_ and self.cron_retry_parameters_ != x.cron_retry_parameters_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
if (not self.has_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
for p in self.header_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
if (self.has_crontimetable_ and not self.crontimetable_.IsInitialized(debug_strs)): initialized = 0
if (self.has_payload_ and not self.payload_.IsInitialized(debug_strs)): initialized = 0
if (self.has_retry_parameters_ and not self.retry_parameters_.IsInitialized(debug_strs)): initialized = 0
if (self.has_cron_retry_parameters_ and not self.cron_retry_parameters_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
n += 2 * len(self.header_)
for i in range(len(self.header_)): n += self.header_[i].ByteSize()
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
if (self.has_datastore_transaction_): n += 2 + self.lengthString(len(self.datastore_transaction_))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSize()
if (self.has_description_): n += 1 + self.lengthString(len(self.description_))
if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSize())
if (self.has_retry_parameters_): n += 2 + self.lengthString(self.retry_parameters_.ByteSize())
if (self.has_mode_): n += 2 + self.lengthVarInt64(self.mode_)
if (self.has_tag_): n += 2 + self.lengthString(len(self.tag_))
if (self.has_cron_retry_parameters_): n += 2 + self.lengthString(self.cron_retry_parameters_.ByteSize())
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
if (self.has_eta_usec_):
n += 1
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
n += 2 * len(self.header_)
for i in range(len(self.header_)): n += self.header_[i].ByteSizePartial()
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
if (self.has_datastore_transaction_): n += 2 + self.lengthString(len(self.datastore_transaction_))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSizePartial()
if (self.has_description_): n += 1 + self.lengthString(len(self.description_))
if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSizePartial())
if (self.has_retry_parameters_): n += 2 + self.lengthString(self.retry_parameters_.ByteSizePartial())
if (self.has_mode_): n += 2 + self.lengthVarInt64(self.mode_)
if (self.has_tag_): n += 2 + self.lengthString(len(self.tag_))
if (self.has_cron_retry_parameters_): n += 2 + self.lengthString(self.cron_retry_parameters_.ByteSizePartial())
return n
def Clear(self):
self.clear_queue_name()
self.clear_task_name()
self.clear_eta_usec()
self.clear_method()
self.clear_url()
self.clear_header()
self.clear_body()
self.clear_transaction()
self.clear_datastore_transaction()
self.clear_app_id()
self.clear_crontimetable()
self.clear_description()
self.clear_payload()
self.clear_retry_parameters()
self.clear_mode()
self.clear_tag()
self.clear_cron_retry_parameters()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_url_):
out.putVarInt32(34)
out.putPrefixedString(self.url_)
if (self.has_method_):
out.putVarInt32(40)
out.putVarInt32(self.method_)
for i in range(len(self.header_)):
out.putVarInt32(51)
self.header_[i].OutputUnchecked(out)
out.putVarInt32(52)
if (self.has_body_):
out.putVarInt32(74)
out.putPrefixedString(self.body_)
if (self.has_transaction_):
out.putVarInt32(82)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
if (self.has_app_id_):
out.putVarInt32(90)
out.putPrefixedString(self.app_id_)
if (self.has_crontimetable_):
out.putVarInt32(99)
self.crontimetable_.OutputUnchecked(out)
out.putVarInt32(100)
if (self.has_description_):
out.putVarInt32(122)
out.putPrefixedString(self.description_)
if (self.has_payload_):
out.putVarInt32(130)
out.putVarInt32(self.payload_.ByteSize())
self.payload_.OutputUnchecked(out)
if (self.has_retry_parameters_):
out.putVarInt32(138)
out.putVarInt32(self.retry_parameters_.ByteSize())
self.retry_parameters_.OutputUnchecked(out)
if (self.has_mode_):
out.putVarInt32(144)
out.putVarInt32(self.mode_)
if (self.has_tag_):
out.putVarInt32(154)
out.putPrefixedString(self.tag_)
if (self.has_cron_retry_parameters_):
out.putVarInt32(162)
out.putVarInt32(self.cron_retry_parameters_.ByteSize())
self.cron_retry_parameters_.OutputUnchecked(out)
if (self.has_datastore_transaction_):
out.putVarInt32(170)
out.putPrefixedString(self.datastore_transaction_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
if (self.has_task_name_):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
if (self.has_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_url_):
out.putVarInt32(34)
out.putPrefixedString(self.url_)
if (self.has_method_):
out.putVarInt32(40)
out.putVarInt32(self.method_)
for i in range(len(self.header_)):
out.putVarInt32(51)
self.header_[i].OutputPartial(out)
out.putVarInt32(52)
if (self.has_body_):
out.putVarInt32(74)
out.putPrefixedString(self.body_)
if (self.has_transaction_):
out.putVarInt32(82)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
if (self.has_app_id_):
out.putVarInt32(90)
out.putPrefixedString(self.app_id_)
if (self.has_crontimetable_):
out.putVarInt32(99)
self.crontimetable_.OutputPartial(out)
out.putVarInt32(100)
if (self.has_description_):
out.putVarInt32(122)
out.putPrefixedString(self.description_)
if (self.has_payload_):
out.putVarInt32(130)
out.putVarInt32(self.payload_.ByteSizePartial())
self.payload_.OutputPartial(out)
if (self.has_retry_parameters_):
out.putVarInt32(138)
out.putVarInt32(self.retry_parameters_.ByteSizePartial())
self.retry_parameters_.OutputPartial(out)
if (self.has_mode_):
out.putVarInt32(144)
out.putVarInt32(self.mode_)
if (self.has_tag_):
out.putVarInt32(154)
out.putPrefixedString(self.tag_)
if (self.has_cron_retry_parameters_):
out.putVarInt32(162)
out.putVarInt32(self.cron_retry_parameters_.ByteSizePartial())
self.cron_retry_parameters_.OutputPartial(out)
if (self.has_datastore_transaction_):
out.putVarInt32(170)
out.putPrefixedString(self.datastore_transaction_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 18:
self.set_task_name(d.getPrefixedString())
continue
if tt == 24:
self.set_eta_usec(d.getVarInt64())
continue
if tt == 34:
self.set_url(d.getPrefixedString())
continue
if tt == 40:
self.set_method(d.getVarInt32())
continue
if tt == 51:
self.add_header().TryMerge(d)
continue
if tt == 74:
self.set_body(d.getPrefixedString())
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 90:
self.set_app_id(d.getPrefixedString())
continue
if tt == 99:
self.mutable_crontimetable().TryMerge(d)
continue
if tt == 122:
self.set_description(d.getPrefixedString())
continue
if tt == 130:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_payload().TryMerge(tmp)
continue
if tt == 138:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_retry_parameters().TryMerge(tmp)
continue
if tt == 144:
self.set_mode(d.getVarInt32())
continue
if tt == 154:
self.set_tag(d.getPrefixedString())
continue
if tt == 162:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cron_retry_parameters().TryMerge(tmp)
continue
if tt == 170:
self.set_datastore_transaction(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatInt32(self.method_))
if self.has_url_: res+=prefix+("url: %s\n" % self.DebugFormatString(self.url_))
cnt=0
for e in self.header_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Header%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_datastore_transaction_: res+=prefix+("datastore_transaction: %s\n" % self.DebugFormatString(self.datastore_transaction_))
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_crontimetable_:
res+=prefix+"CronTimetable {\n"
res+=self.crontimetable_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_description_: res+=prefix+("description: %s\n" % self.DebugFormatString(self.description_))
if self.has_payload_:
res+=prefix+"payload <\n"
res+=self.payload_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_retry_parameters_:
res+=prefix+"retry_parameters <\n"
res+=self.retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
if self.has_tag_: res+=prefix+("tag: %s\n" % self.DebugFormatString(self.tag_))
if self.has_cron_retry_parameters_:
res+=prefix+"cron_retry_parameters <\n"
res+=self.cron_retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kqueue_name = 1
ktask_name = 2
keta_usec = 3
kmethod = 5
kurl = 4
kHeaderGroup = 6
kHeaderkey = 7
kHeadervalue = 8
kbody = 9
ktransaction = 10
kdatastore_transaction = 21
kapp_id = 11
kCronTimetableGroup = 12
kCronTimetableschedule = 13
kCronTimetabletimezone = 14
kdescription = 15
kpayload = 16
kretry_parameters = 17
kmode = 18
ktag = 19
kcron_retry_parameters = 20
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "queue_name",
2: "task_name",
3: "eta_usec",
4: "url",
5: "method",
6: "Header",
7: "key",
8: "value",
9: "body",
10: "transaction",
11: "app_id",
12: "CronTimetable",
13: "schedule",
14: "timezone",
15: "description",
16: "payload",
17: "retry_parameters",
18: "mode",
19: "tag",
20: "cron_retry_parameters",
21: "datastore_transaction",
}, 21)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.STARTGROUP,
7: ProtocolBuffer.Encoder.STRING,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.STRING,
12: ProtocolBuffer.Encoder.STARTGROUP,
13: ProtocolBuffer.Encoder.STRING,
14: ProtocolBuffer.Encoder.STRING,
15: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.STRING,
17: ProtocolBuffer.Encoder.STRING,
18: ProtocolBuffer.Encoder.NUMERIC,
19: ProtocolBuffer.Encoder.STRING,
20: ProtocolBuffer.Encoder.STRING,
21: ProtocolBuffer.Encoder.STRING,
}, 21, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueAddRequest'
class TaskQueueAddResponse(ProtocolBuffer.ProtocolMessage):
has_chosen_task_name_ = 0
chosen_task_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def chosen_task_name(self): return self.chosen_task_name_
def set_chosen_task_name(self, x):
self.has_chosen_task_name_ = 1
self.chosen_task_name_ = x
def clear_chosen_task_name(self):
if self.has_chosen_task_name_:
self.has_chosen_task_name_ = 0
self.chosen_task_name_ = ""
def has_chosen_task_name(self): return self.has_chosen_task_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_chosen_task_name()): self.set_chosen_task_name(x.chosen_task_name())
def Equals(self, x):
if x is self: return 1
if self.has_chosen_task_name_ != x.has_chosen_task_name_: return 0
if self.has_chosen_task_name_ and self.chosen_task_name_ != x.chosen_task_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
return n
def Clear(self):
self.clear_chosen_task_name()
def OutputUnchecked(self, out):
if (self.has_chosen_task_name_):
out.putVarInt32(10)
out.putPrefixedString(self.chosen_task_name_)
def OutputPartial(self, out):
if (self.has_chosen_task_name_):
out.putVarInt32(10)
out.putPrefixedString(self.chosen_task_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_chosen_task_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_chosen_task_name_: res+=prefix+("chosen_task_name: %s\n" % self.DebugFormatString(self.chosen_task_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kchosen_task_name = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "chosen_task_name",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueAddResponse'
class TaskQueueBulkAddRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.add_request_ = []
if contents is not None: self.MergeFromString(contents)
def add_request_size(self): return len(self.add_request_)
def add_request_list(self): return self.add_request_
def add_request(self, i):
return self.add_request_[i]
def mutable_add_request(self, i):
return self.add_request_[i]
def add_add_request(self):
x = TaskQueueAddRequest()
self.add_request_.append(x)
return x
def clear_add_request(self):
self.add_request_ = []
def MergeFrom(self, x):
assert x is not self
for i in range(x.add_request_size()): self.add_add_request().CopyFrom(x.add_request(i))
def Equals(self, x):
if x is self: return 1
if len(self.add_request_) != len(x.add_request_): return 0
for e1, e2 in zip(self.add_request_, x.add_request_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.add_request_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.add_request_)
for i in range(len(self.add_request_)): n += self.lengthString(self.add_request_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.add_request_)
for i in range(len(self.add_request_)): n += self.lengthString(self.add_request_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_add_request()
def OutputUnchecked(self, out):
for i in range(len(self.add_request_)):
out.putVarInt32(10)
out.putVarInt32(self.add_request_[i].ByteSize())
self.add_request_[i].OutputUnchecked(out)
def OutputPartial(self, out):
for i in range(len(self.add_request_)):
out.putVarInt32(10)
out.putVarInt32(self.add_request_[i].ByteSizePartial())
self.add_request_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_add_request().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.add_request_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("add_request%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kadd_request = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "add_request",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueBulkAddRequest'
class TaskQueueBulkAddResponse_TaskResult(ProtocolBuffer.ProtocolMessage):
has_result_ = 0
result_ = 0
has_chosen_task_name_ = 0
chosen_task_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def result(self): return self.result_
def set_result(self, x):
self.has_result_ = 1
self.result_ = x
def clear_result(self):
if self.has_result_:
self.has_result_ = 0
self.result_ = 0
def has_result(self): return self.has_result_
def chosen_task_name(self): return self.chosen_task_name_
def set_chosen_task_name(self, x):
self.has_chosen_task_name_ = 1
self.chosen_task_name_ = x
def clear_chosen_task_name(self):
if self.has_chosen_task_name_:
self.has_chosen_task_name_ = 0
self.chosen_task_name_ = ""
def has_chosen_task_name(self): return self.has_chosen_task_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_result()): self.set_result(x.result())
if (x.has_chosen_task_name()): self.set_chosen_task_name(x.chosen_task_name())
def Equals(self, x):
if x is self: return 1
if self.has_result_ != x.has_result_: return 0
if self.has_result_ and self.result_ != x.result_: return 0
if self.has_chosen_task_name_ != x.has_chosen_task_name_: return 0
if self.has_chosen_task_name_ and self.chosen_task_name_ != x.chosen_task_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_result_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: result not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.result_)
if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_result_):
n += 1
n += self.lengthVarInt64(self.result_)
if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
return n
def Clear(self):
self.clear_result()
self.clear_chosen_task_name()
def OutputUnchecked(self, out):
out.putVarInt32(16)
out.putVarInt32(self.result_)
if (self.has_chosen_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.chosen_task_name_)
def OutputPartial(self, out):
if (self.has_result_):
out.putVarInt32(16)
out.putVarInt32(self.result_)
if (self.has_chosen_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.chosen_task_name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 16:
self.set_result(d.getVarInt32())
continue
if tt == 26:
self.set_chosen_task_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_result_: res+=prefix+("result: %s\n" % self.DebugFormatInt32(self.result_))
if self.has_chosen_task_name_: res+=prefix+("chosen_task_name: %s\n" % self.DebugFormatString(self.chosen_task_name_))
return res
class TaskQueueBulkAddResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.taskresult_ = []
if contents is not None: self.MergeFromString(contents)
def taskresult_size(self): return len(self.taskresult_)
def taskresult_list(self): return self.taskresult_
def taskresult(self, i):
return self.taskresult_[i]
def mutable_taskresult(self, i):
return self.taskresult_[i]
def add_taskresult(self):
x = TaskQueueBulkAddResponse_TaskResult()
self.taskresult_.append(x)
return x
def clear_taskresult(self):
self.taskresult_ = []
def MergeFrom(self, x):
assert x is not self
for i in range(x.taskresult_size()): self.add_taskresult().CopyFrom(x.taskresult(i))
def Equals(self, x):
if x is self: return 1
if len(self.taskresult_) != len(x.taskresult_): return 0
for e1, e2 in zip(self.taskresult_, x.taskresult_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.taskresult_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.taskresult_)
for i in range(len(self.taskresult_)): n += self.taskresult_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.taskresult_)
for i in range(len(self.taskresult_)): n += self.taskresult_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_taskresult()
def OutputUnchecked(self, out):
for i in range(len(self.taskresult_)):
out.putVarInt32(11)
self.taskresult_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in range(len(self.taskresult_)):
out.putVarInt32(11)
self.taskresult_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_taskresult().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.taskresult_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("TaskResult%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kTaskResultGroup = 1
kTaskResultresult = 2
kTaskResultchosen_task_name = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "TaskResult",
2: "result",
3: "chosen_task_name",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueBulkAddResponse'
class TaskQueueDeleteRequest(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
has_app_id_ = 0
app_id_ = ""
def __init__(self, contents=None):
self.task_name_ = []
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name_size(self): return len(self.task_name_)
def task_name_list(self): return self.task_name_
def task_name(self, i):
return self.task_name_[i]
def set_task_name(self, i, x):
self.task_name_[i] = x
def add_task_name(self, x):
self.task_name_.append(x)
def clear_task_name(self):
self.task_name_ = []
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
for i in range(x.task_name_size()): self.add_task_name(x.task_name(i))
if (x.has_app_id()): self.set_app_id(x.app_id())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if len(self.task_name_) != len(x.task_name_): return 0
for e1, e2 in zip(self.task_name_, x.task_name_):
if e1 != e2: return 0
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
n += 1 * len(self.task_name_)
for i in range(len(self.task_name_)): n += self.lengthString(len(self.task_name_[i]))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
n += 1 * len(self.task_name_)
for i in range(len(self.task_name_)): n += self.lengthString(len(self.task_name_[i]))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
return n
def Clear(self):
self.clear_queue_name()
self.clear_task_name()
self.clear_app_id()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
for i in range(len(self.task_name_)):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_[i])
if (self.has_app_id_):
out.putVarInt32(26)
out.putPrefixedString(self.app_id_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
for i in range(len(self.task_name_)):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_[i])
if (self.has_app_id_):
out.putVarInt32(26)
out.putPrefixedString(self.app_id_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 18:
self.add_task_name(d.getPrefixedString())
continue
if tt == 26:
self.set_app_id(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
cnt=0
for e in self.task_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("task_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kqueue_name = 1
ktask_name = 2
kapp_id = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "queue_name",
2: "task_name",
3: "app_id",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteRequest'
class TaskQueueDeleteResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.result_ = []
if contents is not None: self.MergeFromString(contents)
def result_size(self): return len(self.result_)
def result_list(self): return self.result_
def result(self, i):
return self.result_[i]
def set_result(self, i, x):
self.result_[i] = x
def add_result(self, x):
self.result_.append(x)
def clear_result(self):
self.result_ = []
def MergeFrom(self, x):
assert x is not self
for i in range(x.result_size()): self.add_result(x.result(i))
def Equals(self, x):
if x is self: return 1
if len(self.result_) != len(x.result_): return 0
for e1, e2 in zip(self.result_, x.result_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.result_)
for i in range(len(self.result_)): n += self.lengthVarInt64(self.result_[i])
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.result_)
for i in range(len(self.result_)): n += self.lengthVarInt64(self.result_[i])
return n
def Clear(self):
self.clear_result()
def OutputUnchecked(self, out):
for i in range(len(self.result_)):
out.putVarInt32(24)
out.putVarInt32(self.result_[i])
def OutputPartial(self, out):
for i in range(len(self.result_)):
out.putVarInt32(24)
out.putVarInt32(self.result_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 24:
self.add_result(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.result_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("result%s: %s\n" % (elm, self.DebugFormatInt32(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kresult = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
3: "result",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteResponse'
class TaskQueueForceRunRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_task_name_ = 0
task_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_task_name()): self.set_task_name(x.task_name())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
n += self.lengthString(len(self.task_name_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_task_name()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(26)
out.putPrefixedString(self.task_name_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.task_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 26:
self.set_task_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
ktask_name = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "task_name",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueForceRunRequest'
class TaskQueueForceRunResponse(ProtocolBuffer.ProtocolMessage):
has_result_ = 0
result_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def result(self): return self.result_
def set_result(self, x):
self.has_result_ = 1
self.result_ = x
def clear_result(self):
if self.has_result_:
self.has_result_ = 0
self.result_ = 0
def has_result(self): return self.has_result_
def MergeFrom(self, x):
assert x is not self
if (x.has_result()): self.set_result(x.result())
def Equals(self, x):
if x is self: return 1
if self.has_result_ != x.has_result_: return 0
if self.has_result_ and self.result_ != x.result_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_result_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: result not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.result_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_result_):
n += 1
n += self.lengthVarInt64(self.result_)
return n
def Clear(self):
self.clear_result()
def OutputUnchecked(self, out):
out.putVarInt32(24)
out.putVarInt32(self.result_)
def OutputPartial(self, out):
if (self.has_result_):
out.putVarInt32(24)
out.putVarInt32(self.result_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 24:
self.set_result(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_result_: res+=prefix+("result: %s\n" % self.DebugFormatInt32(self.result_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kresult = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
3: "result",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueForceRunResponse'
class TaskQueueUpdateQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_bucket_refill_per_second_ = 0
bucket_refill_per_second_ = 0.0
has_bucket_capacity_ = 0
bucket_capacity_ = 0
has_user_specified_rate_ = 0
user_specified_rate_ = ""
has_retry_parameters_ = 0
retry_parameters_ = None
has_max_concurrent_requests_ = 0
max_concurrent_requests_ = 0
has_mode_ = 0
mode_ = 0
has_acl_ = 0
acl_ = None
def __init__(self, contents=None):
self.header_override_ = []
self.lazy_init_lock_ = _Lock()
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def bucket_refill_per_second(self): return self.bucket_refill_per_second_
def set_bucket_refill_per_second(self, x):
self.has_bucket_refill_per_second_ = 1
self.bucket_refill_per_second_ = x
def clear_bucket_refill_per_second(self):
if self.has_bucket_refill_per_second_:
self.has_bucket_refill_per_second_ = 0
self.bucket_refill_per_second_ = 0.0
def has_bucket_refill_per_second(self): return self.has_bucket_refill_per_second_
def bucket_capacity(self): return self.bucket_capacity_
def set_bucket_capacity(self, x):
self.has_bucket_capacity_ = 1
self.bucket_capacity_ = x
def clear_bucket_capacity(self):
if self.has_bucket_capacity_:
self.has_bucket_capacity_ = 0
self.bucket_capacity_ = 0
def has_bucket_capacity(self): return self.has_bucket_capacity_
def user_specified_rate(self): return self.user_specified_rate_
def set_user_specified_rate(self, x):
self.has_user_specified_rate_ = 1
self.user_specified_rate_ = x
def clear_user_specified_rate(self):
if self.has_user_specified_rate_:
self.has_user_specified_rate_ = 0
self.user_specified_rate_ = ""
def has_user_specified_rate(self): return self.has_user_specified_rate_
def retry_parameters(self):
if self.retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.retry_parameters_ is None: self.retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.retry_parameters_
def mutable_retry_parameters(self): self.has_retry_parameters_ = 1; return self.retry_parameters()
def clear_retry_parameters(self):
if self.has_retry_parameters_:
self.has_retry_parameters_ = 0;
if self.retry_parameters_ is not None: self.retry_parameters_.Clear()
def has_retry_parameters(self): return self.has_retry_parameters_
def max_concurrent_requests(self): return self.max_concurrent_requests_
def set_max_concurrent_requests(self, x):
self.has_max_concurrent_requests_ = 1
self.max_concurrent_requests_ = x
def clear_max_concurrent_requests(self):
if self.has_max_concurrent_requests_:
self.has_max_concurrent_requests_ = 0
self.max_concurrent_requests_ = 0
def has_max_concurrent_requests(self): return self.has_max_concurrent_requests_
def mode(self): return self.mode_
def set_mode(self, x):
self.has_mode_ = 1
self.mode_ = x
def clear_mode(self):
if self.has_mode_:
self.has_mode_ = 0
self.mode_ = 0
def has_mode(self): return self.has_mode_
def acl(self):
if self.acl_ is None:
self.lazy_init_lock_.acquire()
try:
if self.acl_ is None: self.acl_ = TaskQueueAcl()
finally:
self.lazy_init_lock_.release()
return self.acl_
def mutable_acl(self): self.has_acl_ = 1; return self.acl()
def clear_acl(self):
if self.has_acl_:
self.has_acl_ = 0;
if self.acl_ is not None: self.acl_.Clear()
def has_acl(self): return self.has_acl_
def header_override_size(self): return len(self.header_override_)
def header_override_list(self): return self.header_override_
def header_override(self, i):
return self.header_override_[i]
def mutable_header_override(self, i):
return self.header_override_[i]
def add_header_override(self):
x = TaskQueueHttpHeader()
self.header_override_.append(x)
return x
def clear_header_override(self):
self.header_override_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_bucket_refill_per_second()): self.set_bucket_refill_per_second(x.bucket_refill_per_second())
if (x.has_bucket_capacity()): self.set_bucket_capacity(x.bucket_capacity())
if (x.has_user_specified_rate()): self.set_user_specified_rate(x.user_specified_rate())
if (x.has_retry_parameters()): self.mutable_retry_parameters().MergeFrom(x.retry_parameters())
if (x.has_max_concurrent_requests()): self.set_max_concurrent_requests(x.max_concurrent_requests())
if (x.has_mode()): self.set_mode(x.mode())
if (x.has_acl()): self.mutable_acl().MergeFrom(x.acl())
for i in range(x.header_override_size()): self.add_header_override().CopyFrom(x.header_override(i))
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_bucket_refill_per_second_ != x.has_bucket_refill_per_second_: return 0
if self.has_bucket_refill_per_second_ and self.bucket_refill_per_second_ != x.bucket_refill_per_second_: return 0
if self.has_bucket_capacity_ != x.has_bucket_capacity_: return 0
if self.has_bucket_capacity_ and self.bucket_capacity_ != x.bucket_capacity_: return 0
if self.has_user_specified_rate_ != x.has_user_specified_rate_: return 0
if self.has_user_specified_rate_ and self.user_specified_rate_ != x.user_specified_rate_: return 0
if self.has_retry_parameters_ != x.has_retry_parameters_: return 0
if self.has_retry_parameters_ and self.retry_parameters_ != x.retry_parameters_: return 0
if self.has_max_concurrent_requests_ != x.has_max_concurrent_requests_: return 0
if self.has_max_concurrent_requests_ and self.max_concurrent_requests_ != x.max_concurrent_requests_: return 0
if self.has_mode_ != x.has_mode_: return 0
if self.has_mode_ and self.mode_ != x.mode_: return 0
if self.has_acl_ != x.has_acl_: return 0
if self.has_acl_ and self.acl_ != x.acl_: return 0
if len(self.header_override_) != len(x.header_override_): return 0
for e1, e2 in zip(self.header_override_, x.header_override_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_bucket_refill_per_second_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bucket_refill_per_second not set.')
if (not self.has_bucket_capacity_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bucket_capacity not set.')
if (self.has_retry_parameters_ and not self.retry_parameters_.IsInitialized(debug_strs)): initialized = 0
if (self.has_acl_ and not self.acl_.IsInitialized(debug_strs)): initialized = 0
for p in self.header_override_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
n += self.lengthVarInt64(self.bucket_capacity_)
if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
if (self.has_retry_parameters_): n += 1 + self.lengthString(self.retry_parameters_.ByteSize())
if (self.has_max_concurrent_requests_): n += 1 + self.lengthVarInt64(self.max_concurrent_requests_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSize())
n += 1 * len(self.header_override_)
for i in range(len(self.header_override_)): n += self.lengthString(self.header_override_[i].ByteSize())
return n + 11
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_bucket_refill_per_second_):
n += 9
if (self.has_bucket_capacity_):
n += 1
n += self.lengthVarInt64(self.bucket_capacity_)
if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
if (self.has_retry_parameters_): n += 1 + self.lengthString(self.retry_parameters_.ByteSizePartial())
if (self.has_max_concurrent_requests_): n += 1 + self.lengthVarInt64(self.max_concurrent_requests_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSizePartial())
n += 1 * len(self.header_override_)
for i in range(len(self.header_override_)): n += self.lengthString(self.header_override_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_bucket_refill_per_second()
self.clear_bucket_capacity()
self.clear_user_specified_rate()
self.clear_retry_parameters()
self.clear_max_concurrent_requests()
self.clear_mode()
self.clear_acl()
self.clear_header_override()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(25)
out.putDouble(self.bucket_refill_per_second_)
out.putVarInt32(32)
out.putVarInt32(self.bucket_capacity_)
if (self.has_user_specified_rate_):
out.putVarInt32(42)
out.putPrefixedString(self.user_specified_rate_)
if (self.has_retry_parameters_):
out.putVarInt32(50)
out.putVarInt32(self.retry_parameters_.ByteSize())
self.retry_parameters_.OutputUnchecked(out)
if (self.has_max_concurrent_requests_):
out.putVarInt32(56)
out.putVarInt32(self.max_concurrent_requests_)
if (self.has_mode_):
out.putVarInt32(64)
out.putVarInt32(self.mode_)
if (self.has_acl_):
out.putVarInt32(74)
out.putVarInt32(self.acl_.ByteSize())
self.acl_.OutputUnchecked(out)
for i in range(len(self.header_override_)):
out.putVarInt32(82)
out.putVarInt32(self.header_override_[i].ByteSize())
self.header_override_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_bucket_refill_per_second_):
out.putVarInt32(25)
out.putDouble(self.bucket_refill_per_second_)
if (self.has_bucket_capacity_):
out.putVarInt32(32)
out.putVarInt32(self.bucket_capacity_)
if (self.has_user_specified_rate_):
out.putVarInt32(42)
out.putPrefixedString(self.user_specified_rate_)
if (self.has_retry_parameters_):
out.putVarInt32(50)
out.putVarInt32(self.retry_parameters_.ByteSizePartial())
self.retry_parameters_.OutputPartial(out)
if (self.has_max_concurrent_requests_):
out.putVarInt32(56)
out.putVarInt32(self.max_concurrent_requests_)
if (self.has_mode_):
out.putVarInt32(64)
out.putVarInt32(self.mode_)
if (self.has_acl_):
out.putVarInt32(74)
out.putVarInt32(self.acl_.ByteSizePartial())
self.acl_.OutputPartial(out)
for i in range(len(self.header_override_)):
out.putVarInt32(82)
out.putVarInt32(self.header_override_[i].ByteSizePartial())
self.header_override_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 25:
self.set_bucket_refill_per_second(d.getDouble())
continue
if tt == 32:
self.set_bucket_capacity(d.getVarInt32())
continue
if tt == 42:
self.set_user_specified_rate(d.getPrefixedString())
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_retry_parameters().TryMerge(tmp)
continue
if tt == 56:
self.set_max_concurrent_requests(d.getVarInt32())
continue
if tt == 64:
self.set_mode(d.getVarInt32())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_acl().TryMerge(tmp)
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_header_override().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_bucket_refill_per_second_: res+=prefix+("bucket_refill_per_second: %s\n" % self.DebugFormat(self.bucket_refill_per_second_))
if self.has_bucket_capacity_: res+=prefix+("bucket_capacity: %s\n" % self.DebugFormatInt32(self.bucket_capacity_))
if self.has_user_specified_rate_: res+=prefix+("user_specified_rate: %s\n" % self.DebugFormatString(self.user_specified_rate_))
if self.has_retry_parameters_:
res+=prefix+"retry_parameters <\n"
res+=self.retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_max_concurrent_requests_: res+=prefix+("max_concurrent_requests: %s\n" % self.DebugFormatInt32(self.max_concurrent_requests_))
if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
if self.has_acl_:
res+=prefix+"acl <\n"
res+=self.acl_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.header_override_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("header_override%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
kbucket_refill_per_second = 3
kbucket_capacity = 4
kuser_specified_rate = 5
kretry_parameters = 6
kmax_concurrent_requests = 7
kmode = 8
kacl = 9
kheader_override = 10
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "bucket_refill_per_second",
4: "bucket_capacity",
5: "user_specified_rate",
6: "retry_parameters",
7: "max_concurrent_requests",
8: "mode",
9: "acl",
10: "header_override",
}, 10)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
}, 10, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateQueueRequest'
class TaskQueueUpdateQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateQueueResponse'
class TaskQueueFetchQueuesRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_max_rows_ = 0
max_rows_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def max_rows(self): return self.max_rows_
def set_max_rows(self, x):
self.has_max_rows_ = 1
self.max_rows_ = x
def clear_max_rows(self):
if self.has_max_rows_:
self.has_max_rows_ = 0
self.max_rows_ = 0
def has_max_rows(self): return self.has_max_rows_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_max_rows()): self.set_max_rows(x.max_rows())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_max_rows_ != x.has_max_rows_: return 0
if self.has_max_rows_ and self.max_rows_ != x.max_rows_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_max_rows_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: max_rows not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthVarInt64(self.max_rows_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_max_rows_):
n += 1
n += self.lengthVarInt64(self.max_rows_)
return n
def Clear(self):
self.clear_app_id()
self.clear_max_rows()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(16)
out.putVarInt32(self.max_rows_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_max_rows_):
out.putVarInt32(16)
out.putVarInt32(self.max_rows_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 16:
self.set_max_rows(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_max_rows_: res+=prefix+("max_rows: %s\n" % self.DebugFormatInt32(self.max_rows_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kapp_id = 1
kmax_rows = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "max_rows",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueuesRequest'
class TaskQueueFetchQueuesResponse_Queue(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
has_bucket_refill_per_second_ = 0
bucket_refill_per_second_ = 0.0
has_bucket_capacity_ = 0
bucket_capacity_ = 0.0
has_user_specified_rate_ = 0
user_specified_rate_ = ""
has_paused_ = 0
paused_ = 0
has_retry_parameters_ = 0
retry_parameters_ = None
has_max_concurrent_requests_ = 0
max_concurrent_requests_ = 0
has_mode_ = 0
mode_ = 0
has_acl_ = 0
acl_ = None
has_creator_name_ = 0
creator_name_ = "apphosting"
def __init__(self, contents=None):
self.header_override_ = []
self.lazy_init_lock_ = _Lock()
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def bucket_refill_per_second(self): return self.bucket_refill_per_second_
def set_bucket_refill_per_second(self, x):
self.has_bucket_refill_per_second_ = 1
self.bucket_refill_per_second_ = x
def clear_bucket_refill_per_second(self):
if self.has_bucket_refill_per_second_:
self.has_bucket_refill_per_second_ = 0
self.bucket_refill_per_second_ = 0.0
def has_bucket_refill_per_second(self): return self.has_bucket_refill_per_second_
def bucket_capacity(self): return self.bucket_capacity_
def set_bucket_capacity(self, x):
self.has_bucket_capacity_ = 1
self.bucket_capacity_ = x
def clear_bucket_capacity(self):
if self.has_bucket_capacity_:
self.has_bucket_capacity_ = 0
self.bucket_capacity_ = 0.0
def has_bucket_capacity(self): return self.has_bucket_capacity_
def user_specified_rate(self): return self.user_specified_rate_
def set_user_specified_rate(self, x):
self.has_user_specified_rate_ = 1
self.user_specified_rate_ = x
def clear_user_specified_rate(self):
if self.has_user_specified_rate_:
self.has_user_specified_rate_ = 0
self.user_specified_rate_ = ""
def has_user_specified_rate(self): return self.has_user_specified_rate_
def paused(self): return self.paused_
def set_paused(self, x):
self.has_paused_ = 1
self.paused_ = x
def clear_paused(self):
if self.has_paused_:
self.has_paused_ = 0
self.paused_ = 0
def has_paused(self): return self.has_paused_
def retry_parameters(self):
if self.retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.retry_parameters_ is None: self.retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.retry_parameters_
def mutable_retry_parameters(self): self.has_retry_parameters_ = 1; return self.retry_parameters()
def clear_retry_parameters(self):
if self.has_retry_parameters_:
self.has_retry_parameters_ = 0;
if self.retry_parameters_ is not None: self.retry_parameters_.Clear()
def has_retry_parameters(self): return self.has_retry_parameters_
def max_concurrent_requests(self): return self.max_concurrent_requests_
def set_max_concurrent_requests(self, x):
self.has_max_concurrent_requests_ = 1
self.max_concurrent_requests_ = x
def clear_max_concurrent_requests(self):
if self.has_max_concurrent_requests_:
self.has_max_concurrent_requests_ = 0
self.max_concurrent_requests_ = 0
def has_max_concurrent_requests(self): return self.has_max_concurrent_requests_
def mode(self): return self.mode_
def set_mode(self, x):
self.has_mode_ = 1
self.mode_ = x
def clear_mode(self):
if self.has_mode_:
self.has_mode_ = 0
self.mode_ = 0
def has_mode(self): return self.has_mode_
def acl(self):
if self.acl_ is None:
self.lazy_init_lock_.acquire()
try:
if self.acl_ is None: self.acl_ = TaskQueueAcl()
finally:
self.lazy_init_lock_.release()
return self.acl_
def mutable_acl(self): self.has_acl_ = 1; return self.acl()
def clear_acl(self):
if self.has_acl_:
self.has_acl_ = 0;
if self.acl_ is not None: self.acl_.Clear()
def has_acl(self): return self.has_acl_
def header_override_size(self): return len(self.header_override_)
def header_override_list(self): return self.header_override_
def header_override(self, i):
return self.header_override_[i]
def mutable_header_override(self, i):
return self.header_override_[i]
def add_header_override(self):
x = TaskQueueHttpHeader()
self.header_override_.append(x)
return x
def clear_header_override(self):
self.header_override_ = []
def creator_name(self): return self.creator_name_
def set_creator_name(self, x):
self.has_creator_name_ = 1
self.creator_name_ = x
def clear_creator_name(self):
if self.has_creator_name_:
self.has_creator_name_ = 0
self.creator_name_ = "apphosting"
def has_creator_name(self): return self.has_creator_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_bucket_refill_per_second()): self.set_bucket_refill_per_second(x.bucket_refill_per_second())
if (x.has_bucket_capacity()): self.set_bucket_capacity(x.bucket_capacity())
if (x.has_user_specified_rate()): self.set_user_specified_rate(x.user_specified_rate())
if (x.has_paused()): self.set_paused(x.paused())
if (x.has_retry_parameters()): self.mutable_retry_parameters().MergeFrom(x.retry_parameters())
if (x.has_max_concurrent_requests()): self.set_max_concurrent_requests(x.max_concurrent_requests())
if (x.has_mode()): self.set_mode(x.mode())
if (x.has_acl()): self.mutable_acl().MergeFrom(x.acl())
for i in range(x.header_override_size()): self.add_header_override().CopyFrom(x.header_override(i))
if (x.has_creator_name()): self.set_creator_name(x.creator_name())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_bucket_refill_per_second_ != x.has_bucket_refill_per_second_: return 0
if self.has_bucket_refill_per_second_ and self.bucket_refill_per_second_ != x.bucket_refill_per_second_: return 0
if self.has_bucket_capacity_ != x.has_bucket_capacity_: return 0
if self.has_bucket_capacity_ and self.bucket_capacity_ != x.bucket_capacity_: return 0
if self.has_user_specified_rate_ != x.has_user_specified_rate_: return 0
if self.has_user_specified_rate_ and self.user_specified_rate_ != x.user_specified_rate_: return 0
if self.has_paused_ != x.has_paused_: return 0
if self.has_paused_ and self.paused_ != x.paused_: return 0
if self.has_retry_parameters_ != x.has_retry_parameters_: return 0
if self.has_retry_parameters_ and self.retry_parameters_ != x.retry_parameters_: return 0
if self.has_max_concurrent_requests_ != x.has_max_concurrent_requests_: return 0
if self.has_max_concurrent_requests_ and self.max_concurrent_requests_ != x.max_concurrent_requests_: return 0
if self.has_mode_ != x.has_mode_: return 0
if self.has_mode_ and self.mode_ != x.mode_: return 0
if self.has_acl_ != x.has_acl_: return 0
if self.has_acl_ and self.acl_ != x.acl_: return 0
if len(self.header_override_) != len(x.header_override_): return 0
for e1, e2 in zip(self.header_override_, x.header_override_):
if e1 != e2: return 0
if self.has_creator_name_ != x.has_creator_name_: return 0
if self.has_creator_name_ and self.creator_name_ != x.creator_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_bucket_refill_per_second_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bucket_refill_per_second not set.')
if (not self.has_bucket_capacity_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bucket_capacity not set.')
if (not self.has_paused_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: paused not set.')
if (self.has_retry_parameters_ and not self.retry_parameters_.IsInitialized(debug_strs)): initialized = 0
if (self.has_acl_ and not self.acl_.IsInitialized(debug_strs)): initialized = 0
for p in self.header_override_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
if (self.has_retry_parameters_): n += 1 + self.lengthString(self.retry_parameters_.ByteSize())
if (self.has_max_concurrent_requests_): n += 1 + self.lengthVarInt64(self.max_concurrent_requests_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSize())
n += 1 * len(self.header_override_)
for i in range(len(self.header_override_)): n += self.lengthString(self.header_override_[i].ByteSize())
if (self.has_creator_name_): n += 1 + self.lengthString(len(self.creator_name_))
return n + 21
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_bucket_refill_per_second_):
n += 9
if (self.has_bucket_capacity_):
n += 9
if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
if (self.has_paused_):
n += 2
if (self.has_retry_parameters_): n += 1 + self.lengthString(self.retry_parameters_.ByteSizePartial())
if (self.has_max_concurrent_requests_): n += 1 + self.lengthVarInt64(self.max_concurrent_requests_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSizePartial())
n += 1 * len(self.header_override_)
for i in range(len(self.header_override_)): n += self.lengthString(self.header_override_[i].ByteSizePartial())
if (self.has_creator_name_): n += 1 + self.lengthString(len(self.creator_name_))
return n
def Clear(self):
self.clear_queue_name()
self.clear_bucket_refill_per_second()
self.clear_bucket_capacity()
self.clear_user_specified_rate()
self.clear_paused()
self.clear_retry_parameters()
self.clear_max_concurrent_requests()
self.clear_mode()
self.clear_acl()
self.clear_header_override()
self.clear_creator_name()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(25)
out.putDouble(self.bucket_refill_per_second_)
out.putVarInt32(33)
out.putDouble(self.bucket_capacity_)
if (self.has_user_specified_rate_):
out.putVarInt32(42)
out.putPrefixedString(self.user_specified_rate_)
out.putVarInt32(48)
out.putBoolean(self.paused_)
if (self.has_retry_parameters_):
out.putVarInt32(58)
out.putVarInt32(self.retry_parameters_.ByteSize())
self.retry_parameters_.OutputUnchecked(out)
if (self.has_max_concurrent_requests_):
out.putVarInt32(64)
out.putVarInt32(self.max_concurrent_requests_)
if (self.has_mode_):
out.putVarInt32(72)
out.putVarInt32(self.mode_)
if (self.has_acl_):
out.putVarInt32(82)
out.putVarInt32(self.acl_.ByteSize())
self.acl_.OutputUnchecked(out)
for i in range(len(self.header_override_)):
out.putVarInt32(90)
out.putVarInt32(self.header_override_[i].ByteSize())
self.header_override_[i].OutputUnchecked(out)
if (self.has_creator_name_):
out.putVarInt32(98)
out.putPrefixedString(self.creator_name_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_bucket_refill_per_second_):
out.putVarInt32(25)
out.putDouble(self.bucket_refill_per_second_)
if (self.has_bucket_capacity_):
out.putVarInt32(33)
out.putDouble(self.bucket_capacity_)
if (self.has_user_specified_rate_):
out.putVarInt32(42)
out.putPrefixedString(self.user_specified_rate_)
if (self.has_paused_):
out.putVarInt32(48)
out.putBoolean(self.paused_)
if (self.has_retry_parameters_):
out.putVarInt32(58)
out.putVarInt32(self.retry_parameters_.ByteSizePartial())
self.retry_parameters_.OutputPartial(out)
if (self.has_max_concurrent_requests_):
out.putVarInt32(64)
out.putVarInt32(self.max_concurrent_requests_)
if (self.has_mode_):
out.putVarInt32(72)
out.putVarInt32(self.mode_)
if (self.has_acl_):
out.putVarInt32(82)
out.putVarInt32(self.acl_.ByteSizePartial())
self.acl_.OutputPartial(out)
for i in range(len(self.header_override_)):
out.putVarInt32(90)
out.putVarInt32(self.header_override_[i].ByteSizePartial())
self.header_override_[i].OutputPartial(out)
if (self.has_creator_name_):
out.putVarInt32(98)
out.putPrefixedString(self.creator_name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 25:
self.set_bucket_refill_per_second(d.getDouble())
continue
if tt == 33:
self.set_bucket_capacity(d.getDouble())
continue
if tt == 42:
self.set_user_specified_rate(d.getPrefixedString())
continue
if tt == 48:
self.set_paused(d.getBoolean())
continue
if tt == 58:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_retry_parameters().TryMerge(tmp)
continue
if tt == 64:
self.set_max_concurrent_requests(d.getVarInt32())
continue
if tt == 72:
self.set_mode(d.getVarInt32())
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_acl().TryMerge(tmp)
continue
if tt == 90:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_header_override().TryMerge(tmp)
continue
if tt == 98:
self.set_creator_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_bucket_refill_per_second_: res+=prefix+("bucket_refill_per_second: %s\n" % self.DebugFormat(self.bucket_refill_per_second_))
if self.has_bucket_capacity_: res+=prefix+("bucket_capacity: %s\n" % self.DebugFormat(self.bucket_capacity_))
if self.has_user_specified_rate_: res+=prefix+("user_specified_rate: %s\n" % self.DebugFormatString(self.user_specified_rate_))
if self.has_paused_: res+=prefix+("paused: %s\n" % self.DebugFormatBool(self.paused_))
if self.has_retry_parameters_:
res+=prefix+"retry_parameters <\n"
res+=self.retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_max_concurrent_requests_: res+=prefix+("max_concurrent_requests: %s\n" % self.DebugFormatInt32(self.max_concurrent_requests_))
if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
if self.has_acl_:
res+=prefix+"acl <\n"
res+=self.acl_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.header_override_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("header_override%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_creator_name_: res+=prefix+("creator_name: %s\n" % self.DebugFormatString(self.creator_name_))
return res
class TaskQueueFetchQueuesResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.queue_ = []
if contents is not None: self.MergeFromString(contents)
def queue_size(self): return len(self.queue_)
def queue_list(self): return self.queue_
def queue(self, i):
return self.queue_[i]
def mutable_queue(self, i):
return self.queue_[i]
def add_queue(self):
x = TaskQueueFetchQueuesResponse_Queue()
self.queue_.append(x)
return x
def clear_queue(self):
self.queue_ = []
def MergeFrom(self, x):
assert x is not self
for i in range(x.queue_size()): self.add_queue().CopyFrom(x.queue(i))
def Equals(self, x):
if x is self: return 1
if len(self.queue_) != len(x.queue_): return 0
for e1, e2 in zip(self.queue_, x.queue_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.queue_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.queue_)
for i in range(len(self.queue_)): n += self.queue_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.queue_)
for i in range(len(self.queue_)): n += self.queue_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_queue()
def OutputUnchecked(self, out):
for i in range(len(self.queue_)):
out.putVarInt32(11)
self.queue_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in range(len(self.queue_)):
out.putVarInt32(11)
self.queue_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_queue().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.queue_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Queue%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kQueueGroup = 1
kQueuequeue_name = 2
kQueuebucket_refill_per_second = 3
kQueuebucket_capacity = 4
kQueueuser_specified_rate = 5
kQueuepaused = 6
kQueueretry_parameters = 7
kQueuemax_concurrent_requests = 8
kQueuemode = 9
kQueueacl = 10
kQueueheader_override = 11
kQueuecreator_name = 12
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Queue",
2: "queue_name",
3: "bucket_refill_per_second",
4: "bucket_capacity",
5: "user_specified_rate",
6: "paused",
7: "retry_parameters",
8: "max_concurrent_requests",
9: "mode",
10: "acl",
11: "header_override",
12: "creator_name",
}, 12)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.DOUBLE,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.STRING,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.NUMERIC,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.STRING,
12: ProtocolBuffer.Encoder.STRING,
}, 12, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueuesResponse'
class TaskQueueFetchQueueStatsRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_max_num_tasks_ = 0
max_num_tasks_ = 0
def __init__(self, contents=None):
self.queue_name_ = []
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name_size(self): return len(self.queue_name_)
def queue_name_list(self): return self.queue_name_
def queue_name(self, i):
return self.queue_name_[i]
def set_queue_name(self, i, x):
self.queue_name_[i] = x
def add_queue_name(self, x):
self.queue_name_.append(x)
def clear_queue_name(self):
self.queue_name_ = []
def max_num_tasks(self): return self.max_num_tasks_
def set_max_num_tasks(self, x):
self.has_max_num_tasks_ = 1
self.max_num_tasks_ = x
def clear_max_num_tasks(self):
if self.has_max_num_tasks_:
self.has_max_num_tasks_ = 0
self.max_num_tasks_ = 0
def has_max_num_tasks(self): return self.has_max_num_tasks_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
for i in range(x.queue_name_size()): self.add_queue_name(x.queue_name(i))
if (x.has_max_num_tasks()): self.set_max_num_tasks(x.max_num_tasks())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if len(self.queue_name_) != len(x.queue_name_): return 0
for e1, e2 in zip(self.queue_name_, x.queue_name_):
if e1 != e2: return 0
if self.has_max_num_tasks_ != x.has_max_num_tasks_: return 0
if self.has_max_num_tasks_ and self.max_num_tasks_ != x.max_num_tasks_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += 1 * len(self.queue_name_)
for i in range(len(self.queue_name_)): n += self.lengthString(len(self.queue_name_[i]))
if (self.has_max_num_tasks_): n += 1 + self.lengthVarInt64(self.max_num_tasks_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += 1 * len(self.queue_name_)
for i in range(len(self.queue_name_)): n += self.lengthString(len(self.queue_name_[i]))
if (self.has_max_num_tasks_): n += 1 + self.lengthVarInt64(self.max_num_tasks_)
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_max_num_tasks()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
for i in range(len(self.queue_name_)):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_[i])
if (self.has_max_num_tasks_):
out.putVarInt32(24)
out.putVarInt32(self.max_num_tasks_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
for i in range(len(self.queue_name_)):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_[i])
if (self.has_max_num_tasks_):
out.putVarInt32(24)
out.putVarInt32(self.max_num_tasks_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.add_queue_name(d.getPrefixedString())
continue
if tt == 24:
self.set_max_num_tasks(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
cnt=0
for e in self.queue_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("queue_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_max_num_tasks_: res+=prefix+("max_num_tasks: %s\n" % self.DebugFormatInt32(self.max_num_tasks_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
kmax_num_tasks = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "max_num_tasks",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueueStatsRequest'
class TaskQueueScannerQueueInfo(ProtocolBuffer.ProtocolMessage):
has_executed_last_minute_ = 0
executed_last_minute_ = 0
has_executed_last_hour_ = 0
executed_last_hour_ = 0
has_sampling_duration_seconds_ = 0
sampling_duration_seconds_ = 0.0
has_requests_in_flight_ = 0
requests_in_flight_ = 0
has_enforced_rate_ = 0
enforced_rate_ = 0.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def executed_last_minute(self): return self.executed_last_minute_
def set_executed_last_minute(self, x):
self.has_executed_last_minute_ = 1
self.executed_last_minute_ = x
def clear_executed_last_minute(self):
if self.has_executed_last_minute_:
self.has_executed_last_minute_ = 0
self.executed_last_minute_ = 0
def has_executed_last_minute(self): return self.has_executed_last_minute_
def executed_last_hour(self): return self.executed_last_hour_
def set_executed_last_hour(self, x):
self.has_executed_last_hour_ = 1
self.executed_last_hour_ = x
def clear_executed_last_hour(self):
if self.has_executed_last_hour_:
self.has_executed_last_hour_ = 0
self.executed_last_hour_ = 0
def has_executed_last_hour(self): return self.has_executed_last_hour_
def sampling_duration_seconds(self): return self.sampling_duration_seconds_
def set_sampling_duration_seconds(self, x):
self.has_sampling_duration_seconds_ = 1
self.sampling_duration_seconds_ = x
def clear_sampling_duration_seconds(self):
if self.has_sampling_duration_seconds_:
self.has_sampling_duration_seconds_ = 0
self.sampling_duration_seconds_ = 0.0
def has_sampling_duration_seconds(self): return self.has_sampling_duration_seconds_
def requests_in_flight(self): return self.requests_in_flight_
def set_requests_in_flight(self, x):
self.has_requests_in_flight_ = 1
self.requests_in_flight_ = x
def clear_requests_in_flight(self):
if self.has_requests_in_flight_:
self.has_requests_in_flight_ = 0
self.requests_in_flight_ = 0
def has_requests_in_flight(self): return self.has_requests_in_flight_
def enforced_rate(self): return self.enforced_rate_
def set_enforced_rate(self, x):
self.has_enforced_rate_ = 1
self.enforced_rate_ = x
def clear_enforced_rate(self):
if self.has_enforced_rate_:
self.has_enforced_rate_ = 0
self.enforced_rate_ = 0.0
def has_enforced_rate(self): return self.has_enforced_rate_
def MergeFrom(self, x):
assert x is not self
if (x.has_executed_last_minute()): self.set_executed_last_minute(x.executed_last_minute())
if (x.has_executed_last_hour()): self.set_executed_last_hour(x.executed_last_hour())
if (x.has_sampling_duration_seconds()): self.set_sampling_duration_seconds(x.sampling_duration_seconds())
if (x.has_requests_in_flight()): self.set_requests_in_flight(x.requests_in_flight())
if (x.has_enforced_rate()): self.set_enforced_rate(x.enforced_rate())
def Equals(self, x):
if x is self: return 1
if self.has_executed_last_minute_ != x.has_executed_last_minute_: return 0
if self.has_executed_last_minute_ and self.executed_last_minute_ != x.executed_last_minute_: return 0
if self.has_executed_last_hour_ != x.has_executed_last_hour_: return 0
if self.has_executed_last_hour_ and self.executed_last_hour_ != x.executed_last_hour_: return 0
if self.has_sampling_duration_seconds_ != x.has_sampling_duration_seconds_: return 0
if self.has_sampling_duration_seconds_ and self.sampling_duration_seconds_ != x.sampling_duration_seconds_: return 0
if self.has_requests_in_flight_ != x.has_requests_in_flight_: return 0
if self.has_requests_in_flight_ and self.requests_in_flight_ != x.requests_in_flight_: return 0
if self.has_enforced_rate_ != x.has_enforced_rate_: return 0
if self.has_enforced_rate_ and self.enforced_rate_ != x.enforced_rate_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_executed_last_minute_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: executed_last_minute not set.')
if (not self.has_executed_last_hour_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: executed_last_hour not set.')
if (not self.has_sampling_duration_seconds_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: sampling_duration_seconds not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.executed_last_minute_)
n += self.lengthVarInt64(self.executed_last_hour_)
if (self.has_requests_in_flight_): n += 1 + self.lengthVarInt64(self.requests_in_flight_)
if (self.has_enforced_rate_): n += 9
return n + 11
def ByteSizePartial(self):
n = 0
if (self.has_executed_last_minute_):
n += 1
n += self.lengthVarInt64(self.executed_last_minute_)
if (self.has_executed_last_hour_):
n += 1
n += self.lengthVarInt64(self.executed_last_hour_)
if (self.has_sampling_duration_seconds_):
n += 9
if (self.has_requests_in_flight_): n += 1 + self.lengthVarInt64(self.requests_in_flight_)
if (self.has_enforced_rate_): n += 9
return n
def Clear(self):
self.clear_executed_last_minute()
self.clear_executed_last_hour()
self.clear_sampling_duration_seconds()
self.clear_requests_in_flight()
self.clear_enforced_rate()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.executed_last_minute_)
out.putVarInt32(16)
out.putVarInt64(self.executed_last_hour_)
out.putVarInt32(25)
out.putDouble(self.sampling_duration_seconds_)
if (self.has_requests_in_flight_):
out.putVarInt32(32)
out.putVarInt32(self.requests_in_flight_)
if (self.has_enforced_rate_):
out.putVarInt32(41)
out.putDouble(self.enforced_rate_)
def OutputPartial(self, out):
if (self.has_executed_last_minute_):
out.putVarInt32(8)
out.putVarInt64(self.executed_last_minute_)
if (self.has_executed_last_hour_):
out.putVarInt32(16)
out.putVarInt64(self.executed_last_hour_)
if (self.has_sampling_duration_seconds_):
out.putVarInt32(25)
out.putDouble(self.sampling_duration_seconds_)
if (self.has_requests_in_flight_):
out.putVarInt32(32)
out.putVarInt32(self.requests_in_flight_)
if (self.has_enforced_rate_):
out.putVarInt32(41)
out.putDouble(self.enforced_rate_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_executed_last_minute(d.getVarInt64())
continue
if tt == 16:
self.set_executed_last_hour(d.getVarInt64())
continue
if tt == 25:
self.set_sampling_duration_seconds(d.getDouble())
continue
if tt == 32:
self.set_requests_in_flight(d.getVarInt32())
continue
if tt == 41:
self.set_enforced_rate(d.getDouble())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_executed_last_minute_: res+=prefix+("executed_last_minute: %s\n" % self.DebugFormatInt64(self.executed_last_minute_))
if self.has_executed_last_hour_: res+=prefix+("executed_last_hour: %s\n" % self.DebugFormatInt64(self.executed_last_hour_))
if self.has_sampling_duration_seconds_: res+=prefix+("sampling_duration_seconds: %s\n" % self.DebugFormat(self.sampling_duration_seconds_))
if self.has_requests_in_flight_: res+=prefix+("requests_in_flight: %s\n" % self.DebugFormatInt32(self.requests_in_flight_))
if self.has_enforced_rate_: res+=prefix+("enforced_rate: %s\n" % self.DebugFormat(self.enforced_rate_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kexecuted_last_minute = 1
kexecuted_last_hour = 2
ksampling_duration_seconds = 3
krequests_in_flight = 4
kenforced_rate = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "executed_last_minute",
2: "executed_last_hour",
3: "sampling_duration_seconds",
4: "requests_in_flight",
5: "enforced_rate",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.DOUBLE,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueScannerQueueInfo'
class TaskQueueFetchQueueStatsResponse_QueueStats(ProtocolBuffer.ProtocolMessage):
has_num_tasks_ = 0
num_tasks_ = 0
has_oldest_eta_usec_ = 0
oldest_eta_usec_ = 0
has_scanner_info_ = 0
scanner_info_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = _Lock()
if contents is not None: self.MergeFromString(contents)
def num_tasks(self): return self.num_tasks_
def set_num_tasks(self, x):
self.has_num_tasks_ = 1
self.num_tasks_ = x
def clear_num_tasks(self):
if self.has_num_tasks_:
self.has_num_tasks_ = 0
self.num_tasks_ = 0
def has_num_tasks(self): return self.has_num_tasks_
def oldest_eta_usec(self): return self.oldest_eta_usec_
def set_oldest_eta_usec(self, x):
self.has_oldest_eta_usec_ = 1
self.oldest_eta_usec_ = x
def clear_oldest_eta_usec(self):
if self.has_oldest_eta_usec_:
self.has_oldest_eta_usec_ = 0
self.oldest_eta_usec_ = 0
def has_oldest_eta_usec(self): return self.has_oldest_eta_usec_
def scanner_info(self):
if self.scanner_info_ is None:
self.lazy_init_lock_.acquire()
try:
if self.scanner_info_ is None: self.scanner_info_ = TaskQueueScannerQueueInfo()
finally:
self.lazy_init_lock_.release()
return self.scanner_info_
def mutable_scanner_info(self): self.has_scanner_info_ = 1; return self.scanner_info()
def clear_scanner_info(self):
if self.has_scanner_info_:
self.has_scanner_info_ = 0;
if self.scanner_info_ is not None: self.scanner_info_.Clear()
def has_scanner_info(self): return self.has_scanner_info_
def MergeFrom(self, x):
assert x is not self
if (x.has_num_tasks()): self.set_num_tasks(x.num_tasks())
if (x.has_oldest_eta_usec()): self.set_oldest_eta_usec(x.oldest_eta_usec())
if (x.has_scanner_info()): self.mutable_scanner_info().MergeFrom(x.scanner_info())
def Equals(self, x):
if x is self: return 1
if self.has_num_tasks_ != x.has_num_tasks_: return 0
if self.has_num_tasks_ and self.num_tasks_ != x.num_tasks_: return 0
if self.has_oldest_eta_usec_ != x.has_oldest_eta_usec_: return 0
if self.has_oldest_eta_usec_ and self.oldest_eta_usec_ != x.oldest_eta_usec_: return 0
if self.has_scanner_info_ != x.has_scanner_info_: return 0
if self.has_scanner_info_ and self.scanner_info_ != x.scanner_info_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_num_tasks_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: num_tasks not set.')
if (not self.has_oldest_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: oldest_eta_usec not set.')
if (self.has_scanner_info_ and not self.scanner_info_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.num_tasks_)
n += self.lengthVarInt64(self.oldest_eta_usec_)
if (self.has_scanner_info_): n += 1 + self.lengthString(self.scanner_info_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_num_tasks_):
n += 1
n += self.lengthVarInt64(self.num_tasks_)
if (self.has_oldest_eta_usec_):
n += 1
n += self.lengthVarInt64(self.oldest_eta_usec_)
if (self.has_scanner_info_): n += 1 + self.lengthString(self.scanner_info_.ByteSizePartial())
return n
def Clear(self):
self.clear_num_tasks()
self.clear_oldest_eta_usec()
self.clear_scanner_info()
def OutputUnchecked(self, out):
out.putVarInt32(16)
out.putVarInt32(self.num_tasks_)
out.putVarInt32(24)
out.putVarInt64(self.oldest_eta_usec_)
if (self.has_scanner_info_):
out.putVarInt32(34)
out.putVarInt32(self.scanner_info_.ByteSize())
self.scanner_info_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_num_tasks_):
out.putVarInt32(16)
out.putVarInt32(self.num_tasks_)
if (self.has_oldest_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.oldest_eta_usec_)
if (self.has_scanner_info_):
out.putVarInt32(34)
out.putVarInt32(self.scanner_info_.ByteSizePartial())
self.scanner_info_.OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 16:
self.set_num_tasks(d.getVarInt32())
continue
if tt == 24:
self.set_oldest_eta_usec(d.getVarInt64())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_scanner_info().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_num_tasks_: res+=prefix+("num_tasks: %s\n" % self.DebugFormatInt32(self.num_tasks_))
if self.has_oldest_eta_usec_: res+=prefix+("oldest_eta_usec: %s\n" % self.DebugFormatInt64(self.oldest_eta_usec_))
if self.has_scanner_info_:
res+=prefix+"scanner_info <\n"
res+=self.scanner_info_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
class TaskQueueFetchQueueStatsResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.queuestats_ = []
if contents is not None: self.MergeFromString(contents)
def queuestats_size(self): return len(self.queuestats_)
def queuestats_list(self): return self.queuestats_
def queuestats(self, i):
return self.queuestats_[i]
def mutable_queuestats(self, i):
return self.queuestats_[i]
def add_queuestats(self):
x = TaskQueueFetchQueueStatsResponse_QueueStats()
self.queuestats_.append(x)
return x
def clear_queuestats(self):
self.queuestats_ = []
def MergeFrom(self, x):
assert x is not self
for i in range(x.queuestats_size()): self.add_queuestats().CopyFrom(x.queuestats(i))
def Equals(self, x):
if x is self: return 1
if len(self.queuestats_) != len(x.queuestats_): return 0
for e1, e2 in zip(self.queuestats_, x.queuestats_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.queuestats_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.queuestats_)
for i in range(len(self.queuestats_)): n += self.queuestats_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.queuestats_)
for i in range(len(self.queuestats_)): n += self.queuestats_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_queuestats()
def OutputUnchecked(self, out):
for i in range(len(self.queuestats_)):
out.putVarInt32(11)
self.queuestats_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in range(len(self.queuestats_)):
out.putVarInt32(11)
self.queuestats_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_queuestats().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.queuestats_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("QueueStats%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kQueueStatsGroup = 1
kQueueStatsnum_tasks = 2
kQueueStatsoldest_eta_usec = 3
kQueueStatsscanner_info = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "QueueStats",
2: "num_tasks",
3: "oldest_eta_usec",
4: "scanner_info",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueueStatsResponse'
class TaskQueuePauseQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_pause_ = 0
pause_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def pause(self): return self.pause_
def set_pause(self, x):
self.has_pause_ = 1
self.pause_ = x
def clear_pause(self):
if self.has_pause_:
self.has_pause_ = 0
self.pause_ = 0
def has_pause(self): return self.has_pause_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_pause()): self.set_pause(x.pause())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_pause_ != x.has_pause_: return 0
if self.has_pause_ and self.pause_ != x.pause_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_pause_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: pause not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_pause_):
n += 2
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_pause()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(24)
out.putBoolean(self.pause_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_pause_):
out.putVarInt32(24)
out.putBoolean(self.pause_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 24:
self.set_pause(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_pause_: res+=prefix+("pause: %s\n" % self.DebugFormatBool(self.pause_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
kpause = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "pause",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePauseQueueRequest'
class TaskQueuePauseQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePauseQueueResponse'
class TaskQueuePurgeQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePurgeQueueRequest'
class TaskQueuePurgeQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePurgeQueueResponse'
class TaskQueueDeleteQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteQueueRequest'
class TaskQueueDeleteQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteQueueResponse'
class TaskQueueDeleteGroupRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
return n
def Clear(self):
self.clear_app_id()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kapp_id = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteGroupRequest'
class TaskQueueDeleteGroupResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteGroupResponse'
class TaskQueueQueryTasksRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_start_task_name_ = 0
start_task_name_ = ""
has_start_eta_usec_ = 0
start_eta_usec_ = 0
has_start_tag_ = 0
start_tag_ = ""
has_max_rows_ = 0
max_rows_ = 1
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def start_task_name(self): return self.start_task_name_
def set_start_task_name(self, x):
self.has_start_task_name_ = 1
self.start_task_name_ = x
def clear_start_task_name(self):
if self.has_start_task_name_:
self.has_start_task_name_ = 0
self.start_task_name_ = ""
def has_start_task_name(self): return self.has_start_task_name_
def start_eta_usec(self): return self.start_eta_usec_
def set_start_eta_usec(self, x):
self.has_start_eta_usec_ = 1
self.start_eta_usec_ = x
def clear_start_eta_usec(self):
if self.has_start_eta_usec_:
self.has_start_eta_usec_ = 0
self.start_eta_usec_ = 0
def has_start_eta_usec(self): return self.has_start_eta_usec_
def start_tag(self): return self.start_tag_
def set_start_tag(self, x):
self.has_start_tag_ = 1
self.start_tag_ = x
def clear_start_tag(self):
if self.has_start_tag_:
self.has_start_tag_ = 0
self.start_tag_ = ""
def has_start_tag(self): return self.has_start_tag_
def max_rows(self): return self.max_rows_
def set_max_rows(self, x):
self.has_max_rows_ = 1
self.max_rows_ = x
def clear_max_rows(self):
if self.has_max_rows_:
self.has_max_rows_ = 0
self.max_rows_ = 1
def has_max_rows(self): return self.has_max_rows_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_start_task_name()): self.set_start_task_name(x.start_task_name())
if (x.has_start_eta_usec()): self.set_start_eta_usec(x.start_eta_usec())
if (x.has_start_tag()): self.set_start_tag(x.start_tag())
if (x.has_max_rows()): self.set_max_rows(x.max_rows())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_start_task_name_ != x.has_start_task_name_: return 0
if self.has_start_task_name_ and self.start_task_name_ != x.start_task_name_: return 0
if self.has_start_eta_usec_ != x.has_start_eta_usec_: return 0
if self.has_start_eta_usec_ and self.start_eta_usec_ != x.start_eta_usec_: return 0
if self.has_start_tag_ != x.has_start_tag_: return 0
if self.has_start_tag_ and self.start_tag_ != x.start_tag_: return 0
if self.has_max_rows_ != x.has_max_rows_: return 0
if self.has_max_rows_ and self.max_rows_ != x.max_rows_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
if (self.has_start_task_name_): n += 1 + self.lengthString(len(self.start_task_name_))
if (self.has_start_eta_usec_): n += 1 + self.lengthVarInt64(self.start_eta_usec_)
if (self.has_start_tag_): n += 1 + self.lengthString(len(self.start_tag_))
if (self.has_max_rows_): n += 1 + self.lengthVarInt64(self.max_rows_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_start_task_name_): n += 1 + self.lengthString(len(self.start_task_name_))
if (self.has_start_eta_usec_): n += 1 + self.lengthVarInt64(self.start_eta_usec_)
if (self.has_start_tag_): n += 1 + self.lengthString(len(self.start_tag_))
if (self.has_max_rows_): n += 1 + self.lengthVarInt64(self.max_rows_)
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_start_task_name()
self.clear_start_eta_usec()
self.clear_start_tag()
self.clear_max_rows()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_start_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.start_task_name_)
if (self.has_start_eta_usec_):
out.putVarInt32(32)
out.putVarInt64(self.start_eta_usec_)
if (self.has_max_rows_):
out.putVarInt32(40)
out.putVarInt32(self.max_rows_)
if (self.has_start_tag_):
out.putVarInt32(50)
out.putPrefixedString(self.start_tag_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_start_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.start_task_name_)
if (self.has_start_eta_usec_):
out.putVarInt32(32)
out.putVarInt64(self.start_eta_usec_)
if (self.has_max_rows_):
out.putVarInt32(40)
out.putVarInt32(self.max_rows_)
if (self.has_start_tag_):
out.putVarInt32(50)
out.putPrefixedString(self.start_tag_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 26:
self.set_start_task_name(d.getPrefixedString())
continue
if tt == 32:
self.set_start_eta_usec(d.getVarInt64())
continue
if tt == 40:
self.set_max_rows(d.getVarInt32())
continue
if tt == 50:
self.set_start_tag(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_start_task_name_: res+=prefix+("start_task_name: %s\n" % self.DebugFormatString(self.start_task_name_))
if self.has_start_eta_usec_: res+=prefix+("start_eta_usec: %s\n" % self.DebugFormatInt64(self.start_eta_usec_))
if self.has_start_tag_: res+=prefix+("start_tag: %s\n" % self.DebugFormatString(self.start_tag_))
if self.has_max_rows_: res+=prefix+("max_rows: %s\n" % self.DebugFormatInt32(self.max_rows_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
kstart_task_name = 3
kstart_eta_usec = 4
kstart_tag = 6
kmax_rows = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "start_task_name",
4: "start_eta_usec",
5: "max_rows",
6: "start_tag",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.STRING,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryTasksRequest'
class TaskQueueQueryTasksResponse_TaskHeader(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(66)
out.putPrefixedString(self.key_)
out.putVarInt32(74)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(66)
out.putPrefixedString(self.key_)
if (self.has_value_):
out.putVarInt32(74)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 60: break
if tt == 66:
self.set_key(d.getPrefixedString())
continue
if tt == 74:
self.set_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
class TaskQueueQueryTasksResponse_TaskCronTimetable(ProtocolBuffer.ProtocolMessage):
has_schedule_ = 0
schedule_ = ""
has_timezone_ = 0
timezone_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def schedule(self): return self.schedule_
def set_schedule(self, x):
self.has_schedule_ = 1
self.schedule_ = x
def clear_schedule(self):
if self.has_schedule_:
self.has_schedule_ = 0
self.schedule_ = ""
def has_schedule(self): return self.has_schedule_
def timezone(self): return self.timezone_
def set_timezone(self, x):
self.has_timezone_ = 1
self.timezone_ = x
def clear_timezone(self):
if self.has_timezone_:
self.has_timezone_ = 0
self.timezone_ = ""
def has_timezone(self): return self.has_timezone_
def MergeFrom(self, x):
assert x is not self
if (x.has_schedule()): self.set_schedule(x.schedule())
if (x.has_timezone()): self.set_timezone(x.timezone())
def Equals(self, x):
if x is self: return 1
if self.has_schedule_ != x.has_schedule_: return 0
if self.has_schedule_ and self.schedule_ != x.schedule_: return 0
if self.has_timezone_ != x.has_timezone_: return 0
if self.has_timezone_ and self.timezone_ != x.timezone_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_schedule_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: schedule not set.')
if (not self.has_timezone_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: timezone not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.schedule_))
n += self.lengthString(len(self.timezone_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_schedule_):
n += 1
n += self.lengthString(len(self.schedule_))
if (self.has_timezone_):
n += 1
n += self.lengthString(len(self.timezone_))
return n
def Clear(self):
self.clear_schedule()
self.clear_timezone()
def OutputUnchecked(self, out):
out.putVarInt32(114)
out.putPrefixedString(self.schedule_)
out.putVarInt32(122)
out.putPrefixedString(self.timezone_)
def OutputPartial(self, out):
if (self.has_schedule_):
out.putVarInt32(114)
out.putPrefixedString(self.schedule_)
if (self.has_timezone_):
out.putVarInt32(122)
out.putPrefixedString(self.timezone_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 108: break
if tt == 114:
self.set_schedule(d.getPrefixedString())
continue
if tt == 122:
self.set_timezone(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_schedule_: res+=prefix+("schedule: %s\n" % self.DebugFormatString(self.schedule_))
if self.has_timezone_: res+=prefix+("timezone: %s\n" % self.DebugFormatString(self.timezone_))
return res
class TaskQueueQueryTasksResponse_TaskRunLog(ProtocolBuffer.ProtocolMessage):
has_dispatched_usec_ = 0
dispatched_usec_ = 0
has_lag_usec_ = 0
lag_usec_ = 0
has_elapsed_usec_ = 0
elapsed_usec_ = 0
has_response_code_ = 0
response_code_ = 0
has_retry_reason_ = 0
retry_reason_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def dispatched_usec(self): return self.dispatched_usec_
def set_dispatched_usec(self, x):
self.has_dispatched_usec_ = 1
self.dispatched_usec_ = x
def clear_dispatched_usec(self):
if self.has_dispatched_usec_:
self.has_dispatched_usec_ = 0
self.dispatched_usec_ = 0
def has_dispatched_usec(self): return self.has_dispatched_usec_
def lag_usec(self): return self.lag_usec_
def set_lag_usec(self, x):
self.has_lag_usec_ = 1
self.lag_usec_ = x
def clear_lag_usec(self):
if self.has_lag_usec_:
self.has_lag_usec_ = 0
self.lag_usec_ = 0
def has_lag_usec(self): return self.has_lag_usec_
def elapsed_usec(self): return self.elapsed_usec_
def set_elapsed_usec(self, x):
self.has_elapsed_usec_ = 1
self.elapsed_usec_ = x
def clear_elapsed_usec(self):
if self.has_elapsed_usec_:
self.has_elapsed_usec_ = 0
self.elapsed_usec_ = 0
def has_elapsed_usec(self): return self.has_elapsed_usec_
def response_code(self): return self.response_code_
def set_response_code(self, x):
self.has_response_code_ = 1
self.response_code_ = x
def clear_response_code(self):
if self.has_response_code_:
self.has_response_code_ = 0
self.response_code_ = 0
def has_response_code(self): return self.has_response_code_
def retry_reason(self): return self.retry_reason_
def set_retry_reason(self, x):
self.has_retry_reason_ = 1
self.retry_reason_ = x
def clear_retry_reason(self):
if self.has_retry_reason_:
self.has_retry_reason_ = 0
self.retry_reason_ = ""
def has_retry_reason(self): return self.has_retry_reason_
def MergeFrom(self, x):
assert x is not self
if (x.has_dispatched_usec()): self.set_dispatched_usec(x.dispatched_usec())
if (x.has_lag_usec()): self.set_lag_usec(x.lag_usec())
if (x.has_elapsed_usec()): self.set_elapsed_usec(x.elapsed_usec())
if (x.has_response_code()): self.set_response_code(x.response_code())
if (x.has_retry_reason()): self.set_retry_reason(x.retry_reason())
def Equals(self, x):
if x is self: return 1
if self.has_dispatched_usec_ != x.has_dispatched_usec_: return 0
if self.has_dispatched_usec_ and self.dispatched_usec_ != x.dispatched_usec_: return 0
if self.has_lag_usec_ != x.has_lag_usec_: return 0
if self.has_lag_usec_ and self.lag_usec_ != x.lag_usec_: return 0
if self.has_elapsed_usec_ != x.has_elapsed_usec_: return 0
if self.has_elapsed_usec_ and self.elapsed_usec_ != x.elapsed_usec_: return 0
if self.has_response_code_ != x.has_response_code_: return 0
if self.has_response_code_ and self.response_code_ != x.response_code_: return 0
if self.has_retry_reason_ != x.has_retry_reason_: return 0
if self.has_retry_reason_ and self.retry_reason_ != x.retry_reason_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_dispatched_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: dispatched_usec not set.')
if (not self.has_lag_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: lag_usec not set.')
if (not self.has_elapsed_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: elapsed_usec not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.dispatched_usec_)
n += self.lengthVarInt64(self.lag_usec_)
n += self.lengthVarInt64(self.elapsed_usec_)
if (self.has_response_code_): n += 2 + self.lengthVarInt64(self.response_code_)
if (self.has_retry_reason_): n += 2 + self.lengthString(len(self.retry_reason_))
return n + 6
def ByteSizePartial(self):
n = 0
if (self.has_dispatched_usec_):
n += 2
n += self.lengthVarInt64(self.dispatched_usec_)
if (self.has_lag_usec_):
n += 2
n += self.lengthVarInt64(self.lag_usec_)
if (self.has_elapsed_usec_):
n += 2
n += self.lengthVarInt64(self.elapsed_usec_)
if (self.has_response_code_): n += 2 + self.lengthVarInt64(self.response_code_)
if (self.has_retry_reason_): n += 2 + self.lengthString(len(self.retry_reason_))
return n
def Clear(self):
self.clear_dispatched_usec()
self.clear_lag_usec()
self.clear_elapsed_usec()
self.clear_response_code()
self.clear_retry_reason()
def OutputUnchecked(self, out):
out.putVarInt32(136)
out.putVarInt64(self.dispatched_usec_)
out.putVarInt32(144)
out.putVarInt64(self.lag_usec_)
out.putVarInt32(152)
out.putVarInt64(self.elapsed_usec_)
if (self.has_response_code_):
out.putVarInt32(160)
out.putVarInt64(self.response_code_)
if (self.has_retry_reason_):
out.putVarInt32(218)
out.putPrefixedString(self.retry_reason_)
def OutputPartial(self, out):
if (self.has_dispatched_usec_):
out.putVarInt32(136)
out.putVarInt64(self.dispatched_usec_)
if (self.has_lag_usec_):
out.putVarInt32(144)
out.putVarInt64(self.lag_usec_)
if (self.has_elapsed_usec_):
out.putVarInt32(152)
out.putVarInt64(self.elapsed_usec_)
if (self.has_response_code_):
out.putVarInt32(160)
out.putVarInt64(self.response_code_)
if (self.has_retry_reason_):
out.putVarInt32(218)
out.putPrefixedString(self.retry_reason_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 132: break
if tt == 136:
self.set_dispatched_usec(d.getVarInt64())
continue
if tt == 144:
self.set_lag_usec(d.getVarInt64())
continue
if tt == 152:
self.set_elapsed_usec(d.getVarInt64())
continue
if tt == 160:
self.set_response_code(d.getVarInt64())
continue
if tt == 218:
self.set_retry_reason(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_dispatched_usec_: res+=prefix+("dispatched_usec: %s\n" % self.DebugFormatInt64(self.dispatched_usec_))
if self.has_lag_usec_: res+=prefix+("lag_usec: %s\n" % self.DebugFormatInt64(self.lag_usec_))
if self.has_elapsed_usec_: res+=prefix+("elapsed_usec: %s\n" % self.DebugFormatInt64(self.elapsed_usec_))
if self.has_response_code_: res+=prefix+("response_code: %s\n" % self.DebugFormatInt64(self.response_code_))
if self.has_retry_reason_: res+=prefix+("retry_reason: %s\n" % self.DebugFormatString(self.retry_reason_))
return res
class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
GET = 1
POST = 2
HEAD = 3
PUT = 4
DELETE = 5
_RequestMethod_NAMES = {
1: "GET",
2: "POST",
3: "HEAD",
4: "PUT",
5: "DELETE",
}
def RequestMethod_Name(cls, x): return cls._RequestMethod_NAMES.get(x, "")
RequestMethod_Name = classmethod(RequestMethod_Name)
has_task_name_ = 0
task_name_ = ""
has_eta_usec_ = 0
eta_usec_ = 0
has_url_ = 0
url_ = ""
has_method_ = 0
method_ = 0
has_retry_count_ = 0
retry_count_ = 0
has_body_size_ = 0
body_size_ = 0
has_body_ = 0
body_ = ""
has_creation_time_usec_ = 0
creation_time_usec_ = 0
has_crontimetable_ = 0
crontimetable_ = None
has_runlog_ = 0
runlog_ = None
has_description_ = 0
description_ = ""
has_payload_ = 0
payload_ = None
has_retry_parameters_ = 0
retry_parameters_ = None
has_first_try_usec_ = 0
first_try_usec_ = 0
has_tag_ = 0
tag_ = ""
has_execution_count_ = 0
execution_count_ = 0
def __init__(self, contents=None):
self.header_ = []
self.lazy_init_lock_ = _Lock()
if contents is not None: self.MergeFromString(contents)
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def eta_usec(self): return self.eta_usec_
def set_eta_usec(self, x):
self.has_eta_usec_ = 1
self.eta_usec_ = x
def clear_eta_usec(self):
if self.has_eta_usec_:
self.has_eta_usec_ = 0
self.eta_usec_ = 0
def has_eta_usec(self): return self.has_eta_usec_
def url(self): return self.url_
def set_url(self, x):
self.has_url_ = 1
self.url_ = x
def clear_url(self):
if self.has_url_:
self.has_url_ = 0
self.url_ = ""
def has_url(self): return self.has_url_
def method(self): return self.method_
def set_method(self, x):
self.has_method_ = 1
self.method_ = x
def clear_method(self):
if self.has_method_:
self.has_method_ = 0
self.method_ = 0
def has_method(self): return self.has_method_
def retry_count(self): return self.retry_count_
def set_retry_count(self, x):
self.has_retry_count_ = 1
self.retry_count_ = x
def clear_retry_count(self):
if self.has_retry_count_:
self.has_retry_count_ = 0
self.retry_count_ = 0
def has_retry_count(self): return self.has_retry_count_
def header_size(self): return len(self.header_)
def header_list(self): return self.header_
def header(self, i):
return self.header_[i]
def mutable_header(self, i):
return self.header_[i]
def add_header(self):
x = TaskQueueQueryTasksResponse_TaskHeader()
self.header_.append(x)
return x
def clear_header(self):
self.header_ = []
def body_size(self): return self.body_size_
def set_body_size(self, x):
self.has_body_size_ = 1
self.body_size_ = x
def clear_body_size(self):
if self.has_body_size_:
self.has_body_size_ = 0
self.body_size_ = 0
def has_body_size(self): return self.has_body_size_
def body(self): return self.body_
def set_body(self, x):
self.has_body_ = 1
self.body_ = x
def clear_body(self):
if self.has_body_:
self.has_body_ = 0
self.body_ = ""
def has_body(self): return self.has_body_
def creation_time_usec(self): return self.creation_time_usec_
def set_creation_time_usec(self, x):
self.has_creation_time_usec_ = 1
self.creation_time_usec_ = x
def clear_creation_time_usec(self):
if self.has_creation_time_usec_:
self.has_creation_time_usec_ = 0
self.creation_time_usec_ = 0
def has_creation_time_usec(self): return self.has_creation_time_usec_
def crontimetable(self):
if self.crontimetable_ is None:
self.lazy_init_lock_.acquire()
try:
if self.crontimetable_ is None: self.crontimetable_ = TaskQueueQueryTasksResponse_TaskCronTimetable()
finally:
self.lazy_init_lock_.release()
return self.crontimetable_
def mutable_crontimetable(self): self.has_crontimetable_ = 1; return self.crontimetable()
def clear_crontimetable(self):
if self.has_crontimetable_:
self.has_crontimetable_ = 0;
if self.crontimetable_ is not None: self.crontimetable_.Clear()
def has_crontimetable(self): return self.has_crontimetable_
def runlog(self):
if self.runlog_ is None:
self.lazy_init_lock_.acquire()
try:
if self.runlog_ is None: self.runlog_ = TaskQueueQueryTasksResponse_TaskRunLog()
finally:
self.lazy_init_lock_.release()
return self.runlog_
def mutable_runlog(self): self.has_runlog_ = 1; return self.runlog()
def clear_runlog(self):
if self.has_runlog_:
self.has_runlog_ = 0;
if self.runlog_ is not None: self.runlog_.Clear()
def has_runlog(self): return self.has_runlog_
def description(self): return self.description_
def set_description(self, x):
self.has_description_ = 1
self.description_ = x
def clear_description(self):
if self.has_description_:
self.has_description_ = 0
self.description_ = ""
def has_description(self): return self.has_description_
def payload(self):
if self.payload_ is None:
self.lazy_init_lock_.acquire()
try:
if self.payload_ is None: self.payload_ = MessageSet()
finally:
self.lazy_init_lock_.release()
return self.payload_
def mutable_payload(self): self.has_payload_ = 1; return self.payload()
def clear_payload(self):
if self.has_payload_:
self.has_payload_ = 0;
if self.payload_ is not None: self.payload_.Clear()
def has_payload(self): return self.has_payload_
def retry_parameters(self):
if self.retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.retry_parameters_ is None: self.retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.retry_parameters_
def mutable_retry_parameters(self): self.has_retry_parameters_ = 1; return self.retry_parameters()
def clear_retry_parameters(self):
if self.has_retry_parameters_:
self.has_retry_parameters_ = 0;
if self.retry_parameters_ is not None: self.retry_parameters_.Clear()
def has_retry_parameters(self): return self.has_retry_parameters_
def first_try_usec(self): return self.first_try_usec_
def set_first_try_usec(self, x):
self.has_first_try_usec_ = 1
self.first_try_usec_ = x
def clear_first_try_usec(self):
if self.has_first_try_usec_:
self.has_first_try_usec_ = 0
self.first_try_usec_ = 0
def has_first_try_usec(self): return self.has_first_try_usec_
def tag(self): return self.tag_
def set_tag(self, x):
self.has_tag_ = 1
self.tag_ = x
def clear_tag(self):
if self.has_tag_:
self.has_tag_ = 0
self.tag_ = ""
def has_tag(self): return self.has_tag_
def execution_count(self): return self.execution_count_
def set_execution_count(self, x):
self.has_execution_count_ = 1
self.execution_count_ = x
def clear_execution_count(self):
if self.has_execution_count_:
self.has_execution_count_ = 0
self.execution_count_ = 0
def has_execution_count(self): return self.has_execution_count_
def MergeFrom(self, x):
assert x is not self
if (x.has_task_name()): self.set_task_name(x.task_name())
if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
if (x.has_url()): self.set_url(x.url())
if (x.has_method()): self.set_method(x.method())
if (x.has_retry_count()): self.set_retry_count(x.retry_count())
for i in range(x.header_size()): self.add_header().CopyFrom(x.header(i))
if (x.has_body_size()): self.set_body_size(x.body_size())
if (x.has_body()): self.set_body(x.body())
if (x.has_creation_time_usec()): self.set_creation_time_usec(x.creation_time_usec())
if (x.has_crontimetable()): self.mutable_crontimetable().MergeFrom(x.crontimetable())
if (x.has_runlog()): self.mutable_runlog().MergeFrom(x.runlog())
if (x.has_description()): self.set_description(x.description())
if (x.has_payload()): self.mutable_payload().MergeFrom(x.payload())
if (x.has_retry_parameters()): self.mutable_retry_parameters().MergeFrom(x.retry_parameters())
if (x.has_first_try_usec()): self.set_first_try_usec(x.first_try_usec())
if (x.has_tag()): self.set_tag(x.tag())
if (x.has_execution_count()): self.set_execution_count(x.execution_count())
def Equals(self, x):
if x is self: return 1
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
if self.has_eta_usec_ != x.has_eta_usec_: return 0
if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
if self.has_url_ != x.has_url_: return 0
if self.has_url_ and self.url_ != x.url_: return 0
if self.has_method_ != x.has_method_: return 0
if self.has_method_ and self.method_ != x.method_: return 0
if self.has_retry_count_ != x.has_retry_count_: return 0
if self.has_retry_count_ and self.retry_count_ != x.retry_count_: return 0
if len(self.header_) != len(x.header_): return 0
for e1, e2 in zip(self.header_, x.header_):
if e1 != e2: return 0
if self.has_body_size_ != x.has_body_size_: return 0
if self.has_body_size_ and self.body_size_ != x.body_size_: return 0
if self.has_body_ != x.has_body_: return 0
if self.has_body_ and self.body_ != x.body_: return 0
if self.has_creation_time_usec_ != x.has_creation_time_usec_: return 0
if self.has_creation_time_usec_ and self.creation_time_usec_ != x.creation_time_usec_: return 0
if self.has_crontimetable_ != x.has_crontimetable_: return 0
if self.has_crontimetable_ and self.crontimetable_ != x.crontimetable_: return 0
if self.has_runlog_ != x.has_runlog_: return 0
if self.has_runlog_ and self.runlog_ != x.runlog_: return 0
if self.has_description_ != x.has_description_: return 0
if self.has_description_ and self.description_ != x.description_: return 0
if self.has_payload_ != x.has_payload_: return 0
if self.has_payload_ and self.payload_ != x.payload_: return 0
if self.has_retry_parameters_ != x.has_retry_parameters_: return 0
if self.has_retry_parameters_ and self.retry_parameters_ != x.retry_parameters_: return 0
if self.has_first_try_usec_ != x.has_first_try_usec_: return 0
if self.has_first_try_usec_ and self.first_try_usec_ != x.first_try_usec_: return 0
if self.has_tag_ != x.has_tag_: return 0
if self.has_tag_ and self.tag_ != x.tag_: return 0
if self.has_execution_count_ != x.has_execution_count_: return 0
if self.has_execution_count_ and self.execution_count_ != x.execution_count_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
if (not self.has_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
for p in self.header_:
if not p.IsInitialized(debug_strs): initialized=0
if (not self.has_creation_time_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: creation_time_usec not set.')
if (self.has_crontimetable_ and not self.crontimetable_.IsInitialized(debug_strs)): initialized = 0
if (self.has_runlog_ and not self.runlog_.IsInitialized(debug_strs)): initialized = 0
if (self.has_payload_ and not self.payload_.IsInitialized(debug_strs)): initialized = 0
if (self.has_retry_parameters_ and not self.retry_parameters_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
n += 2 * len(self.header_)
for i in range(len(self.header_)): n += self.header_[i].ByteSize()
if (self.has_body_size_): n += 1 + self.lengthVarInt64(self.body_size_)
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
n += self.lengthVarInt64(self.creation_time_usec_)
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSize()
if (self.has_runlog_): n += 4 + self.runlog_.ByteSize()
if (self.has_description_): n += 2 + self.lengthString(len(self.description_))
if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSize())
if (self.has_retry_parameters_): n += 2 + self.lengthString(self.retry_parameters_.ByteSize())
if (self.has_first_try_usec_): n += 2 + self.lengthVarInt64(self.first_try_usec_)
if (self.has_tag_): n += 2 + self.lengthString(len(self.tag_))
if (self.has_execution_count_): n += 2 + self.lengthVarInt64(self.execution_count_)
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
if (self.has_eta_usec_):
n += 1
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
n += 2 * len(self.header_)
for i in range(len(self.header_)): n += self.header_[i].ByteSizePartial()
if (self.has_body_size_): n += 1 + self.lengthVarInt64(self.body_size_)
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_creation_time_usec_):
n += 1
n += self.lengthVarInt64(self.creation_time_usec_)
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSizePartial()
if (self.has_runlog_): n += 4 + self.runlog_.ByteSizePartial()
if (self.has_description_): n += 2 + self.lengthString(len(self.description_))
if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSizePartial())
if (self.has_retry_parameters_): n += 2 + self.lengthString(self.retry_parameters_.ByteSizePartial())
if (self.has_first_try_usec_): n += 2 + self.lengthVarInt64(self.first_try_usec_)
if (self.has_tag_): n += 2 + self.lengthString(len(self.tag_))
if (self.has_execution_count_): n += 2 + self.lengthVarInt64(self.execution_count_)
return n
def Clear(self):
self.clear_task_name()
self.clear_eta_usec()
self.clear_url()
self.clear_method()
self.clear_retry_count()
self.clear_header()
self.clear_body_size()
self.clear_body()
self.clear_creation_time_usec()
self.clear_crontimetable()
self.clear_runlog()
self.clear_description()
self.clear_payload()
self.clear_retry_parameters()
self.clear_first_try_usec()
self.clear_tag()
self.clear_execution_count()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_url_):
out.putVarInt32(34)
out.putPrefixedString(self.url_)
if (self.has_method_):
out.putVarInt32(40)
out.putVarInt32(self.method_)
if (self.has_retry_count_):
out.putVarInt32(48)
out.putVarInt32(self.retry_count_)
for i in range(len(self.header_)):
out.putVarInt32(59)
self.header_[i].OutputUnchecked(out)
out.putVarInt32(60)
if (self.has_body_size_):
out.putVarInt32(80)
out.putVarInt32(self.body_size_)
if (self.has_body_):
out.putVarInt32(90)
out.putPrefixedString(self.body_)
out.putVarInt32(96)
out.putVarInt64(self.creation_time_usec_)
if (self.has_crontimetable_):
out.putVarInt32(107)
self.crontimetable_.OutputUnchecked(out)
out.putVarInt32(108)
if (self.has_runlog_):
out.putVarInt32(131)
self.runlog_.OutputUnchecked(out)
out.putVarInt32(132)
if (self.has_description_):
out.putVarInt32(170)
out.putPrefixedString(self.description_)
if (self.has_payload_):
out.putVarInt32(178)
out.putVarInt32(self.payload_.ByteSize())
self.payload_.OutputUnchecked(out)
if (self.has_retry_parameters_):
out.putVarInt32(186)
out.putVarInt32(self.retry_parameters_.ByteSize())
self.retry_parameters_.OutputUnchecked(out)
if (self.has_first_try_usec_):
out.putVarInt32(192)
out.putVarInt64(self.first_try_usec_)
if (self.has_tag_):
out.putVarInt32(202)
out.putPrefixedString(self.tag_)
if (self.has_execution_count_):
out.putVarInt32(208)
out.putVarInt32(self.execution_count_)
def OutputPartial(self, out):
if (self.has_task_name_):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
if (self.has_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_url_):
out.putVarInt32(34)
out.putPrefixedString(self.url_)
if (self.has_method_):
out.putVarInt32(40)
out.putVarInt32(self.method_)
if (self.has_retry_count_):
out.putVarInt32(48)
out.putVarInt32(self.retry_count_)
for i in range(len(self.header_)):
out.putVarInt32(59)
self.header_[i].OutputPartial(out)
out.putVarInt32(60)
if (self.has_body_size_):
out.putVarInt32(80)
out.putVarInt32(self.body_size_)
if (self.has_body_):
out.putVarInt32(90)
out.putPrefixedString(self.body_)
if (self.has_creation_time_usec_):
out.putVarInt32(96)
out.putVarInt64(self.creation_time_usec_)
if (self.has_crontimetable_):
out.putVarInt32(107)
self.crontimetable_.OutputPartial(out)
out.putVarInt32(108)
if (self.has_runlog_):
out.putVarInt32(131)
self.runlog_.OutputPartial(out)
out.putVarInt32(132)
if (self.has_description_):
out.putVarInt32(170)
out.putPrefixedString(self.description_)
if (self.has_payload_):
out.putVarInt32(178)
out.putVarInt32(self.payload_.ByteSizePartial())
self.payload_.OutputPartial(out)
if (self.has_retry_parameters_):
out.putVarInt32(186)
out.putVarInt32(self.retry_parameters_.ByteSizePartial())
self.retry_parameters_.OutputPartial(out)
if (self.has_first_try_usec_):
out.putVarInt32(192)
out.putVarInt64(self.first_try_usec_)
if (self.has_tag_):
out.putVarInt32(202)
out.putPrefixedString(self.tag_)
if (self.has_execution_count_):
out.putVarInt32(208)
out.putVarInt32(self.execution_count_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_task_name(d.getPrefixedString())
continue
if tt == 24:
self.set_eta_usec(d.getVarInt64())
continue
if tt == 34:
self.set_url(d.getPrefixedString())
continue
if tt == 40:
self.set_method(d.getVarInt32())
continue
if tt == 48:
self.set_retry_count(d.getVarInt32())
continue
if tt == 59:
self.add_header().TryMerge(d)
continue
if tt == 80:
self.set_body_size(d.getVarInt32())
continue
if tt == 90:
self.set_body(d.getPrefixedString())
continue
if tt == 96:
self.set_creation_time_usec(d.getVarInt64())
continue
if tt == 107:
self.mutable_crontimetable().TryMerge(d)
continue
if tt == 131:
self.mutable_runlog().TryMerge(d)
continue
if tt == 170:
self.set_description(d.getPrefixedString())
continue
if tt == 178:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_payload().TryMerge(tmp)
continue
if tt == 186:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_retry_parameters().TryMerge(tmp)
continue
if tt == 192:
self.set_first_try_usec(d.getVarInt64())
continue
if tt == 202:
self.set_tag(d.getPrefixedString())
continue
if tt == 208:
self.set_execution_count(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
if self.has_url_: res+=prefix+("url: %s\n" % self.DebugFormatString(self.url_))
if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatInt32(self.method_))
if self.has_retry_count_: res+=prefix+("retry_count: %s\n" % self.DebugFormatInt32(self.retry_count_))
cnt=0
for e in self.header_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Header%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_body_size_: res+=prefix+("body_size: %s\n" % self.DebugFormatInt32(self.body_size_))
if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
if self.has_creation_time_usec_: res+=prefix+("creation_time_usec: %s\n" % self.DebugFormatInt64(self.creation_time_usec_))
if self.has_crontimetable_:
res+=prefix+"CronTimetable {\n"
res+=self.crontimetable_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_runlog_:
res+=prefix+"RunLog {\n"
res+=self.runlog_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_description_: res+=prefix+("description: %s\n" % self.DebugFormatString(self.description_))
if self.has_payload_:
res+=prefix+"payload <\n"
res+=self.payload_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_retry_parameters_:
res+=prefix+"retry_parameters <\n"
res+=self.retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_first_try_usec_: res+=prefix+("first_try_usec: %s\n" % self.DebugFormatInt64(self.first_try_usec_))
if self.has_tag_: res+=prefix+("tag: %s\n" % self.DebugFormatString(self.tag_))
if self.has_execution_count_: res+=prefix+("execution_count: %s\n" % self.DebugFormatInt32(self.execution_count_))
return res
class TaskQueueQueryTasksResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.task_ = []
if contents is not None: self.MergeFromString(contents)
def task_size(self): return len(self.task_)
def task_list(self): return self.task_
def task(self, i):
return self.task_[i]
def mutable_task(self, i):
return self.task_[i]
def add_task(self):
x = TaskQueueQueryTasksResponse_Task()
self.task_.append(x)
return x
def clear_task(self):
self.task_ = []
def MergeFrom(self, x):
assert x is not self
for i in range(x.task_size()): self.add_task().CopyFrom(x.task(i))
def Equals(self, x):
if x is self: return 1
if len(self.task_) != len(x.task_): return 0
for e1, e2 in zip(self.task_, x.task_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.task_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.task_)
for i in range(len(self.task_)): n += self.task_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.task_)
for i in range(len(self.task_)): n += self.task_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_task()
def OutputUnchecked(self, out):
for i in range(len(self.task_)):
out.putVarInt32(11)
self.task_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in range(len(self.task_)):
out.putVarInt32(11)
self.task_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_task().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.task_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Task%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kTaskGroup = 1
kTasktask_name = 2
kTasketa_usec = 3
kTaskurl = 4
kTaskmethod = 5
kTaskretry_count = 6
kTaskHeaderGroup = 7
kTaskHeaderkey = 8
kTaskHeadervalue = 9
kTaskbody_size = 10
kTaskbody = 11
kTaskcreation_time_usec = 12
kTaskCronTimetableGroup = 13
kTaskCronTimetableschedule = 14
kTaskCronTimetabletimezone = 15
kTaskRunLogGroup = 16
kTaskRunLogdispatched_usec = 17
kTaskRunLoglag_usec = 18
kTaskRunLogelapsed_usec = 19
kTaskRunLogresponse_code = 20
kTaskRunLogretry_reason = 27
kTaskdescription = 21
kTaskpayload = 22
kTaskretry_parameters = 23
kTaskfirst_try_usec = 24
kTasktag = 25
kTaskexecution_count = 26
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Task",
2: "task_name",
3: "eta_usec",
4: "url",
5: "method",
6: "retry_count",
7: "Header",
8: "key",
9: "value",
10: "body_size",
11: "body",
12: "creation_time_usec",
13: "CronTimetable",
14: "schedule",
15: "timezone",
16: "RunLog",
17: "dispatched_usec",
18: "lag_usec",
19: "elapsed_usec",
20: "response_code",
21: "description",
22: "payload",
23: "retry_parameters",
24: "first_try_usec",
25: "tag",
26: "execution_count",
27: "retry_reason",
}, 27)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.STARTGROUP,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.STRING,
12: ProtocolBuffer.Encoder.NUMERIC,
13: ProtocolBuffer.Encoder.STARTGROUP,
14: ProtocolBuffer.Encoder.STRING,
15: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.STARTGROUP,
17: ProtocolBuffer.Encoder.NUMERIC,
18: ProtocolBuffer.Encoder.NUMERIC,
19: ProtocolBuffer.Encoder.NUMERIC,
20: ProtocolBuffer.Encoder.NUMERIC,
21: ProtocolBuffer.Encoder.STRING,
22: ProtocolBuffer.Encoder.STRING,
23: ProtocolBuffer.Encoder.STRING,
24: ProtocolBuffer.Encoder.NUMERIC,
25: ProtocolBuffer.Encoder.STRING,
26: ProtocolBuffer.Encoder.NUMERIC,
27: ProtocolBuffer.Encoder.STRING,
}, 27, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryTasksResponse'
class TaskQueueFetchTaskRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_task_name_ = 0
task_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_task_name()): self.set_task_name(x.task_name())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
n += self.lengthString(len(self.task_name_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_task_name()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(26)
out.putPrefixedString(self.task_name_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.task_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 26:
self.set_task_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
ktask_name = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "task_name",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchTaskRequest'
class TaskQueueFetchTaskResponse(ProtocolBuffer.ProtocolMessage):
has_task_ = 0
def __init__(self, contents=None):
self.task_ = TaskQueueQueryTasksResponse()
if contents is not None: self.MergeFromString(contents)
def task(self): return self.task_
def mutable_task(self): self.has_task_ = 1; return self.task_
def clear_task(self):self.has_task_ = 0; self.task_.Clear()
def has_task(self): return self.has_task_
def MergeFrom(self, x):
assert x is not self
if (x.has_task()): self.mutable_task().MergeFrom(x.task())
def Equals(self, x):
if x is self: return 1
if self.has_task_ != x.has_task_: return 0
if self.has_task_ and self.task_ != x.task_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_task_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task not set.')
elif not self.task_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.task_.ByteSize())
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_task_):
n += 1
n += self.lengthString(self.task_.ByteSizePartial())
return n
def Clear(self):
self.clear_task()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putVarInt32(self.task_.ByteSize())
self.task_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_task_):
out.putVarInt32(10)
out.putVarInt32(self.task_.ByteSizePartial())
self.task_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_task().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_task_:
res+=prefix+"task <\n"
res+=self.task_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
ktask = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "task",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchTaskResponse'
class TaskQueueUpdateStorageLimitRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_limit_ = 0
limit_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def limit(self): return self.limit_
def set_limit(self, x):
self.has_limit_ = 1
self.limit_ = x
def clear_limit(self):
if self.has_limit_:
self.has_limit_ = 0
self.limit_ = 0
def has_limit(self): return self.has_limit_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_limit()): self.set_limit(x.limit())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_limit_ != x.has_limit_: return 0
if self.has_limit_ and self.limit_ != x.limit_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
if (not self.has_limit_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: limit not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
n += self.lengthVarInt64(self.limit_)
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
if (self.has_limit_):
n += 1
n += self.lengthVarInt64(self.limit_)
return n
def Clear(self):
self.clear_app_id()
self.clear_limit()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(16)
out.putVarInt64(self.limit_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_limit_):
out.putVarInt32(16)
out.putVarInt64(self.limit_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 16:
self.set_limit(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt64(self.limit_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kapp_id = 1
klimit = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "limit",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateStorageLimitRequest'
class TaskQueueUpdateStorageLimitResponse(ProtocolBuffer.ProtocolMessage):
has_new_limit_ = 0
new_limit_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def new_limit(self): return self.new_limit_
def set_new_limit(self, x):
self.has_new_limit_ = 1
self.new_limit_ = x
def clear_new_limit(self):
if self.has_new_limit_:
self.has_new_limit_ = 0
self.new_limit_ = 0
def has_new_limit(self): return self.has_new_limit_
def MergeFrom(self, x):
assert x is not self
if (x.has_new_limit()): self.set_new_limit(x.new_limit())
def Equals(self, x):
if x is self: return 1
if self.has_new_limit_ != x.has_new_limit_: return 0
if self.has_new_limit_ and self.new_limit_ != x.new_limit_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_new_limit_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: new_limit not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.new_limit_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_new_limit_):
n += 1
n += self.lengthVarInt64(self.new_limit_)
return n
def Clear(self):
self.clear_new_limit()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.new_limit_)
def OutputPartial(self, out):
if (self.has_new_limit_):
out.putVarInt32(8)
out.putVarInt64(self.new_limit_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_new_limit(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_new_limit_: res+=prefix+("new_limit: %s\n" % self.DebugFormatInt64(self.new_limit_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
knew_limit = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "new_limit",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateStorageLimitResponse'
class TaskQueueQueryAndOwnTasksRequest(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
has_lease_seconds_ = 0
lease_seconds_ = 0.0
has_max_tasks_ = 0
max_tasks_ = 0
has_group_by_tag_ = 0
group_by_tag_ = 0
has_tag_ = 0
tag_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def lease_seconds(self): return self.lease_seconds_
def set_lease_seconds(self, x):
self.has_lease_seconds_ = 1
self.lease_seconds_ = x
def clear_lease_seconds(self):
if self.has_lease_seconds_:
self.has_lease_seconds_ = 0
self.lease_seconds_ = 0.0
def has_lease_seconds(self): return self.has_lease_seconds_
def max_tasks(self): return self.max_tasks_
def set_max_tasks(self, x):
self.has_max_tasks_ = 1
self.max_tasks_ = x
def clear_max_tasks(self):
if self.has_max_tasks_:
self.has_max_tasks_ = 0
self.max_tasks_ = 0
def has_max_tasks(self): return self.has_max_tasks_
def group_by_tag(self): return self.group_by_tag_
def set_group_by_tag(self, x):
self.has_group_by_tag_ = 1
self.group_by_tag_ = x
def clear_group_by_tag(self):
if self.has_group_by_tag_:
self.has_group_by_tag_ = 0
self.group_by_tag_ = 0
def has_group_by_tag(self): return self.has_group_by_tag_
def tag(self): return self.tag_
def set_tag(self, x):
self.has_tag_ = 1
self.tag_ = x
def clear_tag(self):
if self.has_tag_:
self.has_tag_ = 0
self.tag_ = ""
def has_tag(self): return self.has_tag_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_lease_seconds()): self.set_lease_seconds(x.lease_seconds())
if (x.has_max_tasks()): self.set_max_tasks(x.max_tasks())
if (x.has_group_by_tag()): self.set_group_by_tag(x.group_by_tag())
if (x.has_tag()): self.set_tag(x.tag())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_lease_seconds_ != x.has_lease_seconds_: return 0
if self.has_lease_seconds_ and self.lease_seconds_ != x.lease_seconds_: return 0
if self.has_max_tasks_ != x.has_max_tasks_: return 0
if self.has_max_tasks_ and self.max_tasks_ != x.max_tasks_: return 0
if self.has_group_by_tag_ != x.has_group_by_tag_: return 0
if self.has_group_by_tag_ and self.group_by_tag_ != x.group_by_tag_: return 0
if self.has_tag_ != x.has_tag_: return 0
if self.has_tag_ and self.tag_ != x.tag_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_lease_seconds_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: lease_seconds not set.')
if (not self.has_max_tasks_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: max_tasks not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
n += self.lengthVarInt64(self.max_tasks_)
if (self.has_group_by_tag_): n += 2
if (self.has_tag_): n += 1 + self.lengthString(len(self.tag_))
return n + 11
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_lease_seconds_):
n += 9
if (self.has_max_tasks_):
n += 1
n += self.lengthVarInt64(self.max_tasks_)
if (self.has_group_by_tag_): n += 2
if (self.has_tag_): n += 1 + self.lengthString(len(self.tag_))
return n
def Clear(self):
self.clear_queue_name()
self.clear_lease_seconds()
self.clear_max_tasks()
self.clear_group_by_tag()
self.clear_tag()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(17)
out.putDouble(self.lease_seconds_)
out.putVarInt32(24)
out.putVarInt64(self.max_tasks_)
if (self.has_group_by_tag_):
out.putVarInt32(32)
out.putBoolean(self.group_by_tag_)
if (self.has_tag_):
out.putVarInt32(42)
out.putPrefixedString(self.tag_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
if (self.has_lease_seconds_):
out.putVarInt32(17)
out.putDouble(self.lease_seconds_)
if (self.has_max_tasks_):
out.putVarInt32(24)
out.putVarInt64(self.max_tasks_)
if (self.has_group_by_tag_):
out.putVarInt32(32)
out.putBoolean(self.group_by_tag_)
if (self.has_tag_):
out.putVarInt32(42)
out.putPrefixedString(self.tag_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 17:
self.set_lease_seconds(d.getDouble())
continue
if tt == 24:
self.set_max_tasks(d.getVarInt64())
continue
if tt == 32:
self.set_group_by_tag(d.getBoolean())
continue
if tt == 42:
self.set_tag(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_lease_seconds_: res+=prefix+("lease_seconds: %s\n" % self.DebugFormat(self.lease_seconds_))
if self.has_max_tasks_: res+=prefix+("max_tasks: %s\n" % self.DebugFormatInt64(self.max_tasks_))
if self.has_group_by_tag_: res+=prefix+("group_by_tag: %s\n" % self.DebugFormatBool(self.group_by_tag_))
if self.has_tag_: res+=prefix+("tag: %s\n" % self.DebugFormatString(self.tag_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kqueue_name = 1
klease_seconds = 2
kmax_tasks = 3
kgroup_by_tag = 4
ktag = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "queue_name",
2: "lease_seconds",
3: "max_tasks",
4: "group_by_tag",
5: "tag",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.DOUBLE,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryAndOwnTasksRequest'
class TaskQueueQueryAndOwnTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
has_task_name_ = 0
task_name_ = ""
has_eta_usec_ = 0
eta_usec_ = 0
has_retry_count_ = 0
retry_count_ = 0
has_body_ = 0
body_ = ""
has_tag_ = 0
tag_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def eta_usec(self): return self.eta_usec_
def set_eta_usec(self, x):
self.has_eta_usec_ = 1
self.eta_usec_ = x
def clear_eta_usec(self):
if self.has_eta_usec_:
self.has_eta_usec_ = 0
self.eta_usec_ = 0
def has_eta_usec(self): return self.has_eta_usec_
def retry_count(self): return self.retry_count_
def set_retry_count(self, x):
self.has_retry_count_ = 1
self.retry_count_ = x
def clear_retry_count(self):
if self.has_retry_count_:
self.has_retry_count_ = 0
self.retry_count_ = 0
def has_retry_count(self): return self.has_retry_count_
def body(self): return self.body_
def set_body(self, x):
self.has_body_ = 1
self.body_ = x
def clear_body(self):
if self.has_body_:
self.has_body_ = 0
self.body_ = ""
def has_body(self): return self.has_body_
def tag(self): return self.tag_
def set_tag(self, x):
self.has_tag_ = 1
self.tag_ = x
def clear_tag(self):
if self.has_tag_:
self.has_tag_ = 0
self.tag_ = ""
def has_tag(self): return self.has_tag_
def MergeFrom(self, x):
assert x is not self
if (x.has_task_name()): self.set_task_name(x.task_name())
if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
if (x.has_retry_count()): self.set_retry_count(x.retry_count())
if (x.has_body()): self.set_body(x.body())
if (x.has_tag()): self.set_tag(x.tag())
def Equals(self, x):
if x is self: return 1
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
if self.has_eta_usec_ != x.has_eta_usec_: return 0
if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
if self.has_retry_count_ != x.has_retry_count_: return 0
if self.has_retry_count_ and self.retry_count_ != x.retry_count_: return 0
if self.has_body_ != x.has_body_: return 0
if self.has_body_ and self.body_ != x.body_: return 0
if self.has_tag_ != x.has_tag_: return 0
if self.has_tag_ and self.tag_ != x.tag_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
if (not self.has_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_tag_): n += 1 + self.lengthString(len(self.tag_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
if (self.has_eta_usec_):
n += 1
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_tag_): n += 1 + self.lengthString(len(self.tag_))
return n
def Clear(self):
self.clear_task_name()
self.clear_eta_usec()
self.clear_retry_count()
self.clear_body()
self.clear_tag()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_retry_count_):
out.putVarInt32(32)
out.putVarInt32(self.retry_count_)
if (self.has_body_):
out.putVarInt32(42)
out.putPrefixedString(self.body_)
if (self.has_tag_):
out.putVarInt32(50)
out.putPrefixedString(self.tag_)
def OutputPartial(self, out):
if (self.has_task_name_):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
if (self.has_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_retry_count_):
out.putVarInt32(32)
out.putVarInt32(self.retry_count_)
if (self.has_body_):
out.putVarInt32(42)
out.putPrefixedString(self.body_)
if (self.has_tag_):
out.putVarInt32(50)
out.putPrefixedString(self.tag_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_task_name(d.getPrefixedString())
continue
if tt == 24:
self.set_eta_usec(d.getVarInt64())
continue
if tt == 32:
self.set_retry_count(d.getVarInt32())
continue
if tt == 42:
self.set_body(d.getPrefixedString())
continue
if tt == 50:
self.set_tag(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
if self.has_retry_count_: res+=prefix+("retry_count: %s\n" % self.DebugFormatInt32(self.retry_count_))
if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
if self.has_tag_: res+=prefix+("tag: %s\n" % self.DebugFormatString(self.tag_))
return res
class TaskQueueQueryAndOwnTasksResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.task_ = []
if contents is not None: self.MergeFromString(contents)
def task_size(self): return len(self.task_)
def task_list(self): return self.task_
def task(self, i):
return self.task_[i]
def mutable_task(self, i):
return self.task_[i]
def add_task(self):
x = TaskQueueQueryAndOwnTasksResponse_Task()
self.task_.append(x)
return x
def clear_task(self):
self.task_ = []
def MergeFrom(self, x):
assert x is not self
for i in range(x.task_size()): self.add_task().CopyFrom(x.task(i))
def Equals(self, x):
if x is self: return 1
if len(self.task_) != len(x.task_): return 0
for e1, e2 in zip(self.task_, x.task_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.task_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.task_)
for i in range(len(self.task_)): n += self.task_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.task_)
for i in range(len(self.task_)): n += self.task_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_task()
def OutputUnchecked(self, out):
for i in range(len(self.task_)):
out.putVarInt32(11)
self.task_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in range(len(self.task_)):
out.putVarInt32(11)
self.task_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_task().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.task_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Task%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kTaskGroup = 1
kTasktask_name = 2
kTasketa_usec = 3
kTaskretry_count = 4
kTaskbody = 5
kTasktag = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Task",
2: "task_name",
3: "eta_usec",
4: "retry_count",
5: "body",
6: "tag",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryAndOwnTasksResponse'
class TaskQueueModifyTaskLeaseRequest(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
has_task_name_ = 0
task_name_ = ""
has_eta_usec_ = 0
eta_usec_ = 0
has_lease_seconds_ = 0
lease_seconds_ = 0.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def eta_usec(self): return self.eta_usec_
def set_eta_usec(self, x):
self.has_eta_usec_ = 1
self.eta_usec_ = x
def clear_eta_usec(self):
if self.has_eta_usec_:
self.has_eta_usec_ = 0
self.eta_usec_ = 0
def has_eta_usec(self): return self.has_eta_usec_
def lease_seconds(self): return self.lease_seconds_
def set_lease_seconds(self, x):
self.has_lease_seconds_ = 1
self.lease_seconds_ = x
def clear_lease_seconds(self):
if self.has_lease_seconds_:
self.has_lease_seconds_ = 0
self.lease_seconds_ = 0.0
def has_lease_seconds(self): return self.has_lease_seconds_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_task_name()): self.set_task_name(x.task_name())
if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
if (x.has_lease_seconds()): self.set_lease_seconds(x.lease_seconds())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
if self.has_eta_usec_ != x.has_eta_usec_: return 0
if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
if self.has_lease_seconds_ != x.has_lease_seconds_: return 0
if self.has_lease_seconds_ and self.lease_seconds_ != x.lease_seconds_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
if (not self.has_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
if (not self.has_lease_seconds_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: lease_seconds not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
return n + 12
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
if (self.has_eta_usec_):
n += 1
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_lease_seconds_):
n += 9
return n
def Clear(self):
self.clear_queue_name()
self.clear_task_name()
self.clear_eta_usec()
self.clear_lease_seconds()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
out.putVarInt32(33)
out.putDouble(self.lease_seconds_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
if (self.has_task_name_):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
if (self.has_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_lease_seconds_):
out.putVarInt32(33)
out.putDouble(self.lease_seconds_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 18:
self.set_task_name(d.getPrefixedString())
continue
if tt == 24:
self.set_eta_usec(d.getVarInt64())
continue
if tt == 33:
self.set_lease_seconds(d.getDouble())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
if self.has_lease_seconds_: res+=prefix+("lease_seconds: %s\n" % self.DebugFormat(self.lease_seconds_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kqueue_name = 1
ktask_name = 2
keta_usec = 3
klease_seconds = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "queue_name",
2: "task_name",
3: "eta_usec",
4: "lease_seconds",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.DOUBLE,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueModifyTaskLeaseRequest'
class TaskQueueModifyTaskLeaseResponse(ProtocolBuffer.ProtocolMessage):
has_updated_eta_usec_ = 0
updated_eta_usec_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def updated_eta_usec(self): return self.updated_eta_usec_
def set_updated_eta_usec(self, x):
self.has_updated_eta_usec_ = 1
self.updated_eta_usec_ = x
def clear_updated_eta_usec(self):
if self.has_updated_eta_usec_:
self.has_updated_eta_usec_ = 0
self.updated_eta_usec_ = 0
def has_updated_eta_usec(self): return self.has_updated_eta_usec_
def MergeFrom(self, x):
assert x is not self
if (x.has_updated_eta_usec()): self.set_updated_eta_usec(x.updated_eta_usec())
def Equals(self, x):
if x is self: return 1
if self.has_updated_eta_usec_ != x.has_updated_eta_usec_: return 0
if self.has_updated_eta_usec_ and self.updated_eta_usec_ != x.updated_eta_usec_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_updated_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: updated_eta_usec not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.updated_eta_usec_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_updated_eta_usec_):
n += 1
n += self.lengthVarInt64(self.updated_eta_usec_)
return n
def Clear(self):
self.clear_updated_eta_usec()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.updated_eta_usec_)
def OutputPartial(self, out):
if (self.has_updated_eta_usec_):
out.putVarInt32(8)
out.putVarInt64(self.updated_eta_usec_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_updated_eta_usec(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError()
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_updated_eta_usec_: res+=prefix+("updated_eta_usec: %s\n" % self.DebugFormatInt64(self.updated_eta_usec_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in range(0, 1+maxtag)])
kupdated_eta_usec = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "updated_eta_usec",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueModifyTaskLeaseResponse'
if _extension_runtime:
pass
__all__ = ['TaskQueueServiceError','TaskQueueRetryParameters','TaskQueueAcl','TaskQueueHttpHeader','TaskQueueMode','TaskQueueAddRequest','TaskQueueAddRequest_Header','TaskQueueAddRequest_CronTimetable','TaskQueueAddResponse','TaskQueueBulkAddRequest','TaskQueueBulkAddResponse','TaskQueueBulkAddResponse_TaskResult','TaskQueueDeleteRequest','TaskQueueDeleteResponse','TaskQueueForceRunRequest','TaskQueueForceRunResponse','TaskQueueUpdateQueueRequest','TaskQueueUpdateQueueResponse','TaskQueueFetchQueuesRequest','TaskQueueFetchQueuesResponse','TaskQueueFetchQueuesResponse_Queue','TaskQueueFetchQueueStatsRequest','TaskQueueScannerQueueInfo','TaskQueueFetchQueueStatsResponse','TaskQueueFetchQueueStatsResponse_QueueStats','TaskQueuePauseQueueRequest','TaskQueuePauseQueueResponse','TaskQueuePurgeQueueRequest','TaskQueuePurgeQueueResponse','TaskQueueDeleteQueueRequest','TaskQueueDeleteQueueResponse','TaskQueueDeleteGroupRequest','TaskQueueDeleteGroupResponse','TaskQueueQueryTasksRequest','TaskQueueQueryTasksResponse','TaskQueueQueryTasksResponse_TaskHeader','TaskQueueQueryTasksResponse_TaskCronTimetable','TaskQueueQueryTasksResponse_TaskRunLog','TaskQueueQueryTasksResponse_Task','TaskQueueFetchTaskRequest','TaskQueueFetchTaskResponse','TaskQueueUpdateStorageLimitRequest','TaskQueueUpdateStorageLimitResponse','TaskQueueQueryAndOwnTasksRequest','TaskQueueQueryAndOwnTasksResponse','TaskQueueQueryAndOwnTasksResponse_Task','TaskQueueModifyTaskLeaseRequest','TaskQueueModifyTaskLeaseResponse']
| 29.936153
| 1,501
| 0.687096
|
e7b873a2aa86a6288e062e6f88d37b12e199cf47
| 114
|
py
|
Python
|
1002.py
|
victorcaram/URI
|
df11f17033744a5c42eba718bdbe225b0f03a3af
|
[
"MIT"
] | null | null | null |
1002.py
|
victorcaram/URI
|
df11f17033744a5c42eba718bdbe225b0f03a3af
|
[
"MIT"
] | null | null | null |
1002.py
|
victorcaram/URI
|
df11f17033744a5c42eba718bdbe225b0f03a3af
|
[
"MIT"
] | null | null | null |
import sys
pi = 3.14159;
raio = input();
area = raio * raio * pi
print("A=" + "{0:.4f}".format(round(area,4)))
| 12.666667
| 45
| 0.570175
|
f5907776a9b2af8d524ce25b51d9360dcd92664a
| 5,047
|
py
|
Python
|
python/GafferUITest/TextWidgetTest.py
|
goddardl/gaffer
|
be7fca3a70d3cafab3c1ae800b4548cd248f80a1
|
[
"BSD-3-Clause"
] | null | null | null |
python/GafferUITest/TextWidgetTest.py
|
goddardl/gaffer
|
be7fca3a70d3cafab3c1ae800b4548cd248f80a1
|
[
"BSD-3-Clause"
] | null | null | null |
python/GafferUITest/TextWidgetTest.py
|
goddardl/gaffer
|
be7fca3a70d3cafab3c1ae800b4548cd248f80a1
|
[
"BSD-3-Clause"
] | null | null | null |
##########################################################################
#
# Copyright (c) 2011-2012, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import weakref
import gc
import GafferUI
import GafferTest
import GafferUITest
class TextWidgetTest( GafferUITest.TestCase ) :
def testLifespan( self ) :
w = GafferUI.TextWidget()
r = weakref.ref( w )
self.failUnless( r() is w )
del w
self.failUnless( r() is None )
def testTextChangedSignal( self ) :
self.emissions = 0
def f( w ) :
self.emissions += 1
w = GafferUI.TextWidget()
c = w.textChangedSignal().connect( f )
w.setText( "hello" )
self.assertEqual( w.getText(), "hello" )
self.assertEqual( self.emissions, 1 )
def testDisplayMode( self ) :
w = GafferUI.TextWidget()
self.assertEqual( w.getDisplayMode(), w.DisplayMode.Normal )
w = GafferUI.TextWidget( displayMode = GafferUI.TextWidget.DisplayMode.Password )
self.assertEqual( w.getDisplayMode(), w.DisplayMode.Password )
w.setDisplayMode( GafferUI.TextWidget.DisplayMode.Normal )
self.assertEqual( w.getDisplayMode(), w.DisplayMode.Normal )
def testSelection( self ) :
w = GafferUI.TextWidget()
self.assertEqual( w.getSelection(), ( 0, 0 ) )
w.setText( "hello" )
w.setSelection( 1, 4 )
self.assertEqual( w.getText()[slice( *w.getSelection() )], "hello"[1:4] )
w.setSelection( 0, -2 )
self.assertEqual( w.getText()[slice( *w.getSelection() )], "hello"[0:-2] )
w.setSelection( 0, None )
self.assertEqual( w.getText()[slice( *w.getSelection() )], "hello"[0:] )
w.setSelection( None, -2 )
self.assertEqual( w.getText()[slice( *w.getSelection() )], "hello"[:-2] )
w.setSelection( 0, 0 )
self.assertEqual( w.getText()[slice( *w.getSelection() )], "" )
self.assertEqual( w.getSelection(), ( 0, 0 ) )
c = GafferTest.CapturingSlot( w.selectionChangedSignal() )
w.setSelection( 0, 2 )
self.assertEqual( len( c ), 1 )
self.failUnless( c[0][0] is w )
def testCharacterWidth( self ) :
w = GafferUI.TextWidget()
self.assertEqual( w.getPreferredCharacterWidth(), 20 )
self.assertEqual( w.getFixedCharacterWidth(), None )
w.setFixedCharacterWidth( 4 )
self.assertEqual( w.getPreferredCharacterWidth(), 20 )
self.assertEqual( w.getFixedCharacterWidth(), 4 )
w.setPreferredCharacterWidth( 10 )
self.assertEqual( w.getPreferredCharacterWidth(), 10 )
self.assertEqual( w.getFixedCharacterWidth(), 4 )
w.setFixedCharacterWidth( None )
self.assertEqual( w.getPreferredCharacterWidth(), 10 )
self.assertEqual( w.getFixedCharacterWidth(), None )
def testErrored( self ) :
w = GafferUI.TextWidget()
self.assertEqual( w.getErrored(), False )
w.setErrored( True )
self.assertEqual( w.getErrored(), True )
w.setErrored( False )
self.assertEqual( w.getErrored(), False )
def testFixedCharacterWidth( self ) :
window = GafferUI.Window()
textWidget = GafferUI.TextWidget()
window.addChild( textWidget )
window.setVisible( True )
# initial value
textWidget.setFixedCharacterWidth( 5 )
oldWidth = textWidget.size().x
# changing the initial value
textWidget.setFixedCharacterWidth( 2 )
self.waitForIdle()
newWidth = textWidget.size().x
# checking if the geometry has been updated for the new character width
self.assertEqual( newWidth == oldWidth, False )
if __name__ == "__main__":
unittest.main()
| 30.77439
| 83
| 0.690311
|
abb249d7a620e5ed58487043500dd3f011740956
| 41,651
|
py
|
Python
|
flintrock/flintrock.py
|
tvision-insights/flintrock
|
1ac1dba5019a40f4e79c24ac0992cdd6130aceae
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
flintrock/flintrock.py
|
tvision-insights/flintrock
|
1ac1dba5019a40f4e79c24ac0992cdd6130aceae
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
flintrock/flintrock.py
|
tvision-insights/flintrock
|
1ac1dba5019a40f4e79c24ac0992cdd6130aceae
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import os
import posixpath
import errno
import json
import resource
import sys
import shutil
import textwrap
import urllib.parse
import urllib.request
import warnings
import logging
# External modules
import click
import yaml
# We import botocore here so we can catch when the user tries to
# access AWS without having their credentials configured and provide
# a friendly error message. Apart from that, flintrock.py should
# not really know anything about EC2 or boto since that is delegated
# to ec2.py.
import botocore
# Flintrock modules
from . import ec2
from .exceptions import (
UsageError,
UnsupportedProviderError,
NothingToDo,
Error)
from flintrock import __version__
from .services import HDFS, Spark # TODO: Remove this dependency.
FROZEN = getattr(sys, 'frozen', False)
if FROZEN:
THIS_DIR = sys._MEIPASS
else:
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
logger = logging.getLogger('flintrock.flintrock')
def format_message(*, message: str, indent: int=4, wrap: int=70):
"""
Format a lengthy message for printing to screen.
"""
return textwrap.indent(
textwrap.fill(
textwrap.dedent(text=message),
width=wrap),
prefix=' ' * indent)
def option_name_to_variable_name(option: str):
"""
Convert an option name like `--ec2-user` to the Python name it gets mapped to,
like `ec2_user`.
"""
return option.replace('--', '', 1).replace('-', '_')
def variable_name_to_option_name(variable: str):
"""
Convert a variable name like `ec2_user` to the Click option name it gets mapped to,
like `--ec2-user`.
"""
return '--' + variable.replace('_', '-')
def option_requires(
*,
option: str,
conditional_value=None,
requires_all: list=[],
requires_any: list=[],
scope: dict):
"""
Raise an exception if an option's requirements are not met.
The option's requirements are checked only if the option has a "truthy" value
(i.e. it's not a "falsy" value like '', None, or False), and if its value is
equal to conditional_value, if conditional_value is not None.
requires_all: Every option in this list must be defined.
requires_any: At least one option in this list must be defined.
This function looks for values by converting the option names to their
corresponding variable names (e.g. --option-a becomes option_a) and looking them
up in the provided scope.
"""
option_value = scope[option_name_to_variable_name(option)]
if option_value and \
(conditional_value is None or option_value == conditional_value):
if requires_all:
for required_option in requires_all:
required_name = option_name_to_variable_name(required_option)
if required_name not in scope or not scope[required_name]:
raise UsageError(
"Error: Missing option \"{missing_option}\" is required by "
"\"{option}{space}{conditional_value}\"."
.format(
missing_option=required_option,
option=option,
space=' ' if conditional_value is not None else '',
conditional_value=conditional_value if conditional_value is not None else ''))
if requires_any:
for required_option in requires_any:
required_name = option_name_to_variable_name(required_option)
if required_name in scope and scope[required_name] is not None:
break
else:
raise UsageError(
"Error: \"{option}{space}{conditional_value}\" requires at least "
"one of the following options to be set: {at_least}"
.format(
option=option,
space=' ' if conditional_value is not None else '',
conditional_value=conditional_value if conditional_value is not None else '',
at_least=', '.join(['"' + ra + '"' for ra in requires_any])))
def mutually_exclusive(*, options: list, scope: dict):
"""
Raise an exception if more than one of the provided options is specified.
This function looks for values by converting the option names to their
corresponding variable names (e.g. --option-a becomes option_a) and looking them
up in the provided scope.
"""
mutually_exclusive_names = [option_name_to_variable_name(o) for o in options]
used_options = set()
for name, value in scope.items():
if name in mutually_exclusive_names and scope[name]: # is not None:
used_options.add(name)
if len(used_options) > 1:
bad_option1 = used_options.pop()
bad_option2 = used_options.pop()
raise UsageError(
"Error: \"{option1}\" and \"{option2}\" are mutually exclusive.\n"
" {option1}: {value1}\n"
" {option2}: {value2}"
.format(
option1=variable_name_to_option_name(bad_option1),
value1=scope[bad_option1],
option2=variable_name_to_option_name(bad_option2),
value2=scope[bad_option2]))
def get_config_file() -> str:
"""
Get the path to Flintrock's default configuration file.
"""
config_dir = click.get_app_dir(app_name='Flintrock')
config_file = os.path.join(config_dir, 'config.yaml')
return config_file
def configure_log(debug: bool):
root_logger = logging.getLogger('flintrock')
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
if debug:
root_logger.setLevel(logging.DEBUG)
handler.setFormatter(logging.Formatter('%(asctime)s - flintrock.%(module)-9s - %(levelname)-5s - %(message)s'))
else:
root_logger.setLevel(logging.INFO)
handler.setFormatter(logging.Formatter('%(message)s'))
root_logger.addHandler(handler)
def build_hdfs_download_url(ctx, param, value):
hdfs_download_url = value.format(v=ctx.params['hdfs_version'])
return hdfs_download_url
def build_spark_download_url(ctx, param, value):
spark_download_url = value.format(v=ctx.params['spark_version'])
return spark_download_url
def validate_download_source(url):
if 'spark' in url:
software = 'Spark'
elif 'hadoop' in url:
software = 'Hadoop'
else:
software = 'software'
parsed_url = urllib.parse.urlparse(url)
if parsed_url.netloc == 'www.apache.org' and parsed_url.path == '/dyn/closer.lua':
logger.warning(
"Warning: "
"Downloading {software} from an Apache mirror. Apache mirrors are "
"often slow and unreliable, and typically only serve the most recent releases. "
"We strongly recommend you specify a custom download source. "
"For more background on this issue, please see: https://github.com/nchammas/flintrock/issues/238"
.format(
software=software,
)
)
try:
urllib.request.urlopen(url)
except urllib.error.HTTPError as e:
raise Error(
"Error: Could not access {software} download. Maybe try a more recent release?\n"
" - Automatically redirected to: {url}\n"
" - HTTP error: {code}"
.format(
software=software,
url=e.url,
code=e.code,
)
)
@click.group()
@click.option(
'--config',
help="Path to a Flintrock configuration file.",
default=get_config_file())
@click.option('--provider', default='ec2', type=click.Choice(['ec2']))
@click.version_option(version=__version__)
# TODO: implement some solution like in https://github.com/pallets/click/issues/108
@click.option('--debug/--no-debug', default=False, help="Show debug information.")
@click.pass_context
def cli(cli_context, config, provider, debug):
"""
Flintrock
A command-line tool for launching Apache Spark clusters.
"""
cli_context.obj['provider'] = provider
if os.path.isfile(config):
with open(config) as f:
config_raw = yaml.safe_load(f)
debug = config_raw.get('debug') or debug
config_map = config_to_click(normalize_keys(config_raw))
cli_context.default_map = config_map
else:
if config != get_config_file():
raise FileNotFoundError(errno.ENOENT, 'No such file', config)
configure_log(debug=debug)
@cli.command()
@click.argument('cluster-name')
@click.option('--num-slaves', type=click.IntRange(min=1), required=True)
@click.option('--install-hdfs/--no-install-hdfs', default=False)
@click.option('--hdfs-version', default='2.8.5')
@click.option('--hdfs-download-source',
help="URL to download Hadoop from.",
default='https://www.apache.org/dyn/closer.lua?action=download&filename=hadoop/common/hadoop-{v}/hadoop-{v}.tar.gz',
show_default=True,
callback=build_hdfs_download_url)
@click.option('--install-spark/--no-install-spark', default=True)
@click.option('--spark-executor-instances', default=1,
help="How many executor instances per worker.")
@click.option('--spark-version',
# Don't set a default here because it will conflict with
# the config file if the git commit is set.
# See: https://github.com/nchammas/flintrock/issues/190
# default=,
help="Spark release version to install.")
@click.option('--spark-download-source',
help="URL to download a release of Spark from.",
default='https://www.apache.org/dyn/closer.lua?action=download&filename=spark/spark-{v}/spark-{v}-bin-hadoop2.7.tgz',
show_default=True,
callback=build_spark_download_url)
@click.option('--spark-git-commit',
help="Git commit to build Spark from. "
"Set to 'latest' to build Spark from the latest commit on the "
"repository's default branch.")
@click.option('--spark-git-repository',
help="Git repository to clone Spark from.",
default='https://github.com/apache/spark',
show_default=True)
@click.option('--assume-yes/--no-assume-yes', default=False)
@click.option('--ec2-key-name')
@click.option('--ec2-identity-file',
type=click.Path(exists=True, dir_okay=False),
help="Path to SSH .pem file for accessing nodes.")
@click.option('--ec2-instance-type', default='m3.medium', show_default=True)
@click.option('--ec2-region', default='us-east-1', show_default=True)
# We set some of these defaults to empty strings because of boto3's parameter validation.
# See: https://github.com/boto/boto3/issues/400
@click.option('--ec2-availability-zone', default='')
@click.option('--ec2-ami')
@click.option('--ec2-user')
@click.option('--ec2-security-group', 'ec2_security_groups',
multiple=True,
help="Additional security groups names to assign to the instances. "
"You can specify this option multiple times.")
@click.option('--ec2-spot-price', type=float)
@click.option('--ec2-min-root-ebs-size-gb', type=int, default=30)
@click.option('--ec2-vpc-id', default='', help="Leave empty for default VPC.")
@click.option('--ec2-subnet-id', default='')
@click.option('--ec2-instance-profile-name', default='')
@click.option('--ec2-placement-group', default='')
@click.option('--ec2-tenancy', default='default')
@click.option('--ec2-ebs-optimized/--no-ec2-ebs-optimized', default=False)
@click.option('--ec2-instance-initiated-shutdown-behavior', default='stop',
type=click.Choice(['stop', 'terminate']))
@click.option('--ec2-user-data',
type=click.File(mode='r', encoding='utf-8'),
help="Path to EC2 user data script that will run on instance launch.")
@click.option('--ec2-tag', 'ec2_tags',
callback=ec2.cli_validate_tags,
multiple=True,
help="Additional tags (e.g. 'Key,Value') to assign to the instances. "
"You can specify this option multiple times.")
@click.pass_context
def launch(
cli_context,
cluster_name,
num_slaves,
install_hdfs,
hdfs_version,
hdfs_download_source,
install_spark,
spark_executor_instances,
spark_version,
spark_git_commit,
spark_git_repository,
spark_download_source,
assume_yes,
ec2_key_name,
ec2_identity_file,
ec2_instance_type,
ec2_region,
ec2_availability_zone,
ec2_ami,
ec2_user,
ec2_security_groups,
ec2_spot_price,
ec2_min_root_ebs_size_gb,
ec2_vpc_id,
ec2_subnet_id,
ec2_instance_profile_name,
ec2_placement_group,
ec2_tenancy,
ec2_ebs_optimized,
ec2_instance_initiated_shutdown_behavior,
ec2_user_data,
ec2_tags):
"""
Launch a new cluster.
"""
provider = cli_context.obj['provider']
services = []
option_requires(
option='--install-hdfs',
requires_all=['--hdfs-version'],
scope=locals())
option_requires(
option='--install-spark',
requires_any=[
'--spark-version',
'--spark-git-commit'],
scope=locals())
mutually_exclusive(
options=[
'--spark-version',
'--spark-git-commit'],
scope=locals())
option_requires(
option='--install-spark',
requires_all=[
'--hdfs-version'],
scope=locals())
option_requires(
option='--provider',
conditional_value='ec2',
requires_all=[
'--ec2-key-name',
'--ec2-identity-file',
'--ec2-instance-type',
'--ec2-region',
'--ec2-ami',
'--ec2-user'],
scope=locals())
# The subnet is required for non-default VPCs because EC2 does not
# support user-defined default subnets.
# See: https://forums.aws.amazon.com/thread.jspa?messageID=707417
# https://github.com/mitchellh/packer/issues/1935#issuecomment-111235752
option_requires(
option='--ec2-vpc-id',
requires_all=['--ec2-subnet-id'],
scope=locals())
check_external_dependency('ssh-keygen')
if install_hdfs:
validate_download_source(hdfs_download_source)
hdfs = HDFS(
version=hdfs_version,
download_source=hdfs_download_source,
)
services += [hdfs]
if install_spark:
if spark_version:
validate_download_source(spark_download_source)
spark = Spark(
spark_executor_instances=spark_executor_instances,
version=spark_version,
hadoop_version=hdfs_version,
download_source=spark_download_source,
)
elif spark_git_commit:
logger.warning(
"Warning: Building Spark takes a long time. "
"e.g. 15-20 minutes on an m3.xlarge instance on EC2.")
if spark_git_commit == 'latest':
spark_git_commit = get_latest_commit(spark_git_repository)
logger.info("Building Spark at latest commit: {c}".format(c=spark_git_commit))
spark = Spark(
spark_executor_instances=spark_executor_instances,
git_commit=spark_git_commit,
git_repository=spark_git_repository,
hadoop_version=hdfs_version,
)
services += [spark]
if provider == 'ec2':
cluster = ec2.launch(
cluster_name=cluster_name,
num_slaves=num_slaves,
services=services,
assume_yes=assume_yes,
key_name=ec2_key_name,
identity_file=ec2_identity_file,
instance_type=ec2_instance_type,
region=ec2_region,
availability_zone=ec2_availability_zone,
ami=ec2_ami,
user=ec2_user,
security_groups=ec2_security_groups,
spot_price=ec2_spot_price,
min_root_ebs_size_gb=ec2_min_root_ebs_size_gb,
vpc_id=ec2_vpc_id,
subnet_id=ec2_subnet_id,
instance_profile_name=ec2_instance_profile_name,
placement_group=ec2_placement_group,
tenancy=ec2_tenancy,
ebs_optimized=ec2_ebs_optimized,
instance_initiated_shutdown_behavior=ec2_instance_initiated_shutdown_behavior,
user_data=ec2_user_data,
tags=ec2_tags)
else:
raise UnsupportedProviderError(provider)
print("Cluster master: {}".format(cluster.master_host))
print("Login with: flintrock login {}".format(cluster.name))
def get_latest_commit(github_repository: str):
"""
Get the latest commit on the default branch of a repository hosted on GitHub.
"""
parsed_url = urllib.parse.urlparse(github_repository)
repo_domain, repo_path = parsed_url.netloc, parsed_url.path.strip('/')
if repo_domain != 'github.com':
raise UsageError(
"Error: Getting the latest commit is only supported "
"for repositories hosted on GitHub. "
"Provided repository domain was: {d}".format(d=repo_domain))
url = "https://api.github.com/repos/{rp}/commits".format(rp=repo_path)
try:
with urllib.request.urlopen(url) as response:
result = json.loads(response.read().decode('utf-8'))
return result[0]['sha']
except Exception as e:
raise Exception(
"Could not get latest commit for repository: {r}"
.format(r=repo_path)) from e
@cli.command()
@click.argument('cluster-name')
@click.option('--assume-yes/--no-assume-yes', default=False)
@click.option('--ec2-region', default='us-east-1', show_default=True)
@click.option('--ec2-vpc-id', default='', help="Leave empty for default VPC.")
@click.pass_context
def destroy(cli_context, cluster_name, assume_yes, ec2_region, ec2_vpc_id):
"""
Destroy a cluster.
"""
provider = cli_context.obj['provider']
option_requires(
option='--provider',
conditional_value='ec2',
requires_all=['--ec2-region'],
scope=locals())
if provider == 'ec2':
cluster = ec2.get_cluster(
cluster_name=cluster_name,
region=ec2_region,
vpc_id=ec2_vpc_id)
else:
raise UnsupportedProviderError(provider)
if not assume_yes:
cluster.print()
click.confirm(
text="Are you sure you want to destroy this cluster?",
abort=True)
logger.info("Destroying {c}...".format(c=cluster.name))
cluster.destroy()
@cli.command()
@click.argument('cluster-name', required=False)
@click.option('--master-hostname-only', is_flag=True, default=False)
@click.option('--ec2-region', default='us-east-1', show_default=True)
@click.option('--ec2-vpc-id', default='', help="Leave empty for default VPC.")
@click.pass_context
def describe(
cli_context,
cluster_name,
master_hostname_only,
ec2_region,
ec2_vpc_id):
"""
Describe an existing cluster.
Leave out the cluster name to find all Flintrock-managed clusters.
The output of this command is both human- and machine-friendly. Full cluster
descriptions are output in YAML.
"""
provider = cli_context.obj['provider']
search_area = ""
option_requires(
option='--provider',
conditional_value='ec2',
requires_all=['--ec2-region'],
scope=locals())
if cluster_name:
cluster_names = [cluster_name]
else:
cluster_names = []
if provider == 'ec2':
search_area = "in region {r}".format(r=ec2_region)
clusters = ec2.get_clusters(
cluster_names=cluster_names,
region=ec2_region,
vpc_id=ec2_vpc_id)
else:
raise UnsupportedProviderError(provider)
if cluster_name:
cluster = clusters[0]
if master_hostname_only:
logger.info(cluster.master_host)
else:
cluster.print()
else:
if master_hostname_only:
for cluster in sorted(clusters, key=lambda x: x.name):
logger.info("{}: {}".format(cluster.name, cluster.master_host))
else:
logger.info("Found {n} cluster{s}{space}{search_area}.".format(
n=len(clusters),
s='' if len(clusters) == 1 else 's',
space=' ' if search_area else '',
search_area=search_area))
if clusters:
logger.info('---')
for cluster in sorted(clusters, key=lambda x: x.name):
cluster.print()
# TODO: Provide different command or option for going straight to Spark Shell. (?)
@cli.command()
@click.argument('cluster-name')
@click.option('--ec2-region', default='us-east-1', show_default=True)
@click.option('--ec2-vpc-id', default='', help="Leave empty for default VPC.")
# TODO: Move identity-file to global, non-provider-specific option. (?)
@click.option('--ec2-identity-file',
type=click.Path(exists=True, dir_okay=False),
help="Path to SSH .pem file for accessing nodes.")
@click.option('--ec2-user')
@click.pass_context
def login(cli_context, cluster_name, ec2_region, ec2_vpc_id, ec2_identity_file, ec2_user):
"""
Login to the master of an existing cluster.
"""
provider = cli_context.obj['provider']
option_requires(
option='--provider',
conditional_value='ec2',
requires_all=[
'--ec2-region',
'--ec2-identity-file',
'--ec2-user'],
scope=locals())
check_external_dependency('ssh')
if provider == 'ec2':
cluster = ec2.get_cluster(
cluster_name=cluster_name,
region=ec2_region,
vpc_id=ec2_vpc_id)
user = ec2_user
identity_file = ec2_identity_file
else:
raise UnsupportedProviderError(provider)
# TODO: Check that master up first and error out cleanly if not
# via ClusterInvalidState.
cluster.login(user=user, identity_file=identity_file)
@cli.command()
@click.argument('cluster-name')
@click.option('--ec2-region', default='us-east-1', show_default=True)
@click.option('--ec2-vpc-id', default='', help="Leave empty for default VPC.")
# TODO: Move identity-file to global, non-provider-specific option. (?)
@click.option('--ec2-identity-file',
type=click.Path(exists=True, dir_okay=False),
help="Path to SSH .pem file for accessing nodes.")
@click.option('--ec2-user')
@click.pass_context
def start(cli_context, cluster_name, ec2_region, ec2_vpc_id, ec2_identity_file, ec2_user):
"""
Start an existing, stopped cluster.
"""
provider = cli_context.obj['provider']
option_requires(
option='--provider',
conditional_value='ec2',
requires_all=[
'--ec2-region',
'--ec2-identity-file',
'--ec2-user'],
scope=locals())
if provider == 'ec2':
cluster = ec2.get_cluster(
cluster_name=cluster_name,
region=ec2_region,
vpc_id=ec2_vpc_id)
user = ec2_user
identity_file = ec2_identity_file
else:
raise UnsupportedProviderError(provider)
cluster.start_check()
logger.info("Starting {c}...".format(c=cluster_name))
cluster.start(user=user, identity_file=identity_file)
@cli.command()
@click.argument('cluster-name')
@click.option('--ec2-region', default='us-east-1', show_default=True)
@click.option('--ec2-vpc-id', default='', help="Leave empty for default VPC.")
@click.option('--assume-yes/--no-assume-yes', default=False)
@click.pass_context
def stop(cli_context, cluster_name, ec2_region, ec2_vpc_id, assume_yes):
"""
Stop an existing, running cluster.
"""
provider = cli_context.obj['provider']
option_requires(
option='--provider',
conditional_value='ec2',
requires_all=['--ec2-region'],
scope=locals())
if provider == 'ec2':
cluster = ec2.get_cluster(
cluster_name=cluster_name,
region=ec2_region,
vpc_id=ec2_vpc_id)
else:
raise UnsupportedProviderError(provider)
cluster.stop_check()
if not assume_yes:
cluster.print()
click.confirm(
text="Are you sure you want to stop this cluster?",
abort=True)
logger.info("Stopping {c}...".format(c=cluster_name))
cluster.stop()
logger.info("{c} is now stopped.".format(c=cluster_name))
@cli.command(name='add-slaves')
@click.argument('cluster-name')
@click.option('--num-slaves', type=click.IntRange(min=1), required=True)
@click.option('--ec2-region', default='us-east-1', show_default=True)
@click.option('--ec2-vpc-id', default='', help="Leave empty for default VPC.")
@click.option('--ec2-identity-file',
type=click.Path(exists=True, dir_okay=False),
help="Path to SSH .pem file for accessing nodes.")
@click.option('--ec2-user')
@click.option('--ec2-spot-price', type=float)
@click.option('--ec2-min-root-ebs-size-gb', type=int, default=30)
@click.option('--assume-yes/--no-assume-yes', default=False)
@click.option('--ec2-tag', 'ec2_tags',
callback=ec2.cli_validate_tags,
multiple=True,
help="Additional tags (e.g. 'Key,Value') to assign to the instances. "
"You can specify this option multiple times.")
@click.pass_context
def add_slaves(
cli_context,
cluster_name,
num_slaves,
ec2_region,
ec2_vpc_id,
ec2_identity_file,
ec2_user,
ec2_spot_price,
ec2_min_root_ebs_size_gb,
ec2_tags,
assume_yes):
"""
Add slaves to an existing cluster.
Flintrock will configure new slaves based on information queried
automatically from the master.
"""
provider = cli_context.obj['provider']
option_requires(
option='--provider',
conditional_value='ec2',
requires_all=[
'--ec2-region',
'--ec2-identity-file',
'--ec2-user'],
scope=locals())
if provider == 'ec2':
cluster = ec2.get_cluster(
cluster_name=cluster_name,
region=ec2_region,
vpc_id=ec2_vpc_id)
user = ec2_user
identity_file = ec2_identity_file
provider_options = {
'min_root_ebs_size_gb': ec2_min_root_ebs_size_gb,
'spot_price': ec2_spot_price,
'tags': ec2_tags
}
else:
raise UnsupportedProviderError(provider)
if cluster.num_masters == 0:
raise Error(
"Cannot add slaves to cluster '{c}' since it does not "
"appear to have a master."
.format(
c=cluster_name))
cluster.load_manifest(
user=user,
identity_file=identity_file)
cluster.add_slaves_check()
if provider == 'ec2':
cluster.add_slaves(
user=user,
identity_file=identity_file,
num_slaves=num_slaves,
assume_yes=assume_yes,
**provider_options)
@cli.command(name='remove-slaves')
@click.argument('cluster-name')
@click.option('--num-slaves', type=click.IntRange(min=1), required=True)
@click.option('--ec2-region', default='us-east-1', show_default=True)
@click.option('--ec2-vpc-id', default='', help="Leave empty for default VPC.")
@click.option('--ec2-user')
@click.option('--ec2-identity-file',
type=click.Path(exists=True, dir_okay=False),
help="Path to SSH .pem file for accessing nodes.")
@click.option('--assume-yes/--no-assume-yes', default=False)
@click.pass_context
def remove_slaves(
cli_context,
cluster_name,
num_slaves,
ec2_region,
ec2_vpc_id,
ec2_user,
ec2_identity_file,
assume_yes):
"""
Remove slaves from an existing cluster.
"""
provider = cli_context.obj['provider']
option_requires(
option='--provider',
conditional_value='ec2',
requires_all=[
'--ec2-region',
'--ec2-user',
'--ec2-identity-file'],
scope=locals())
if provider == 'ec2':
cluster = ec2.get_cluster(
cluster_name=cluster_name,
region=ec2_region,
vpc_id=ec2_vpc_id)
user = ec2_user
identity_file = ec2_identity_file
else:
raise UnsupportedProviderError(provider)
if num_slaves > cluster.num_slaves:
logger.warning(
"Warning: Cluster has {c} slave{cs}. "
"You asked to remove {n} slave{ns}."
.format(
c=cluster.num_slaves,
cs='' if cluster.num_slaves == 1 else 's',
n=num_slaves,
ns='' if num_slaves == 1 else 's'))
num_slaves = cluster.num_slaves
if not assume_yes:
cluster.print()
click.confirm(
text=("Are you sure you want to remove {n} slave{s} from this cluster?"
.format(
n=num_slaves,
s='' if num_slaves == 1 else 's')),
abort=True)
logger.info("Removing {n} slave{s}..."
.format(
n=num_slaves,
s='' if num_slaves == 1 else 's'))
cluster.remove_slaves(
user=user,
identity_file=identity_file,
num_slaves=num_slaves)
@cli.command(name='run-command')
@click.argument('cluster-name')
@click.argument('command', nargs=-1)
@click.option('--master-only', help="Run on the master only.", is_flag=True)
@click.option('--ec2-region', default='us-east-1', show_default=True)
@click.option('--ec2-vpc-id', default='', help="Leave empty for default VPC.")
@click.option('--ec2-identity-file',
type=click.Path(exists=True, dir_okay=False),
help="Path to SSH .pem file for accessing nodes.")
@click.option('--ec2-user')
@click.pass_context
def run_command(
cli_context,
cluster_name,
command,
master_only,
ec2_region,
ec2_vpc_id,
ec2_identity_file,
ec2_user):
"""
Run a shell command on a cluster.
Examples:
flintrock run-command my-cluster 'touch /tmp/flintrock'
flintrock run-command my-cluster -- yum install -y package
Flintrock will return a non-zero code if any of the cluster nodes raises an error
while running the command.
"""
provider = cli_context.obj['provider']
option_requires(
option='--provider',
conditional_value='ec2',
requires_all=[
'--ec2-region',
'--ec2-identity-file',
'--ec2-user'],
scope=locals())
if provider == 'ec2':
cluster = ec2.get_cluster(
cluster_name=cluster_name,
region=ec2_region,
vpc_id=ec2_vpc_id)
user = ec2_user
identity_file = ec2_identity_file
else:
raise UnsupportedProviderError(provider)
cluster.run_command_check()
logger.info("Running command on {target}...".format(
target="master only" if master_only else "cluster"))
cluster.run_command(
command=command,
master_only=master_only,
user=user,
identity_file=identity_file)
@cli.command(name='copy-file')
@click.argument('cluster-name')
@click.argument('local_path', type=click.Path(exists=True, dir_okay=False))
@click.argument('remote_path', type=click.Path())
@click.option('--master-only', help="Copy to the master only.", is_flag=True)
@click.option('--ec2-region', default='us-east-1', show_default=True)
@click.option('--ec2-vpc-id', default='', help="Leave empty for default VPC.")
@click.option('--ec2-identity-file',
type=click.Path(exists=True, dir_okay=False),
help="Path to SSH .pem file for accessing nodes.")
@click.option('--ec2-user')
@click.option('--assume-yes/--no-assume-yes', default=False, help="Prompt before large uploads.")
@click.pass_context
def copy_file(
cli_context,
cluster_name,
local_path,
remote_path,
master_only,
ec2_region,
ec2_vpc_id,
ec2_identity_file,
ec2_user,
assume_yes):
"""
Copy a local file up to a cluster.
This will copy the file to the same path on each node of the cluster.
Examples:
flintrock copy-file my-cluster /tmp/file.102.txt /tmp/file.txt
flintrock copy-file my-cluster /tmp/spark-defaults.conf /tmp/
Flintrock will return a non-zero code if any of the cluster nodes raises an error.
"""
provider = cli_context.obj['provider']
option_requires(
option='--provider',
conditional_value='ec2',
requires_all=[
'--ec2-region',
'--ec2-identity-file',
'--ec2-user'],
scope=locals())
# We assume POSIX for the remote path since Flintrock
# only supports clusters running CentOS / Amazon Linux.
if not posixpath.basename(remote_path):
remote_path = posixpath.join(remote_path, os.path.basename(local_path))
if provider == 'ec2':
cluster = ec2.get_cluster(
cluster_name=cluster_name,
region=ec2_region,
vpc_id=ec2_vpc_id)
user = ec2_user
identity_file = ec2_identity_file
else:
raise UnsupportedProviderError(provider)
cluster.copy_file_check()
if not assume_yes and not master_only:
file_size_bytes = os.path.getsize(local_path)
num_nodes = len(cluster.slave_ips) + 1 # TODO: cluster.num_nodes
total_size_bytes = file_size_bytes * num_nodes
if total_size_bytes > 10 ** 6:
logger.warning("WARNING:")
logger.warning(
format_message(
message="""\
You are trying to upload {total_size} bytes ({size} bytes x {count}
nodes in {cluster}). Depending on your upload bandwidth, this may take
a long time.
You may be better off uploading this file to a storage service like
Amazon S3 and downloading it from there to the cluster using
`flintrock run-command ...`.
""".format(
size=file_size_bytes,
count=num_nodes,
cluster=cluster_name,
total_size=total_size_bytes),
wrap=60))
click.confirm(
text="Are you sure you want to continue?",
default=True,
abort=True)
logger.info("Copying file to {target}...".format(
target="master only" if master_only else "cluster"))
cluster.copy_file(
local_path=local_path,
remote_path=remote_path,
master_only=master_only,
user=user,
identity_file=identity_file)
def normalize_keys(obj):
"""
Used to map keys from config files to Python parameter names.
"""
if type(obj) != dict:
return obj
else:
return {k.replace('-', '_'): normalize_keys(v) for k, v in obj.items()}
def config_to_click(config: dict) -> dict:
"""
Convert a dictionary of configurations loaded from a Flintrock config file
to a dictionary that Click can use to set default options.
"""
service_configs = {}
if 'services' in config:
for service in config['services']:
if config['services'][service]:
service_configs.update(
{service + '_' + k: v for (k, v) in config['services'][service].items()})
ec2_configs = {
'ec2_' + k: v for (k, v) in config['providers']['ec2'].items()}
click_map = {
'launch': dict(
list(config['launch'].items()) +
list(ec2_configs.items()) +
list(service_configs.items())),
'describe': ec2_configs,
'destroy': ec2_configs,
'login': ec2_configs,
'start': ec2_configs,
'stop': ec2_configs,
'add-slaves': ec2_configs,
'remove-slaves': ec2_configs,
'run-command': ec2_configs,
'copy-file': ec2_configs,
}
return click_map
@cli.command()
@click.option('--locate', is_flag=True, default=False,
help="Don't open an editor. "
"Just open the folder containing the configuration file.")
@click.pass_context
def configure(cli_context, locate):
"""
Configure Flintrock's defaults.
This will open Flintrock's configuration file in your default YAML editor so
you can set your defaults.
"""
config_file = get_config_file()
if not os.path.isfile(config_file):
logger.info("Initializing config file from template...")
os.makedirs(os.path.dirname(config_file), exist_ok=True)
shutil.copyfile(
src=os.path.join(THIS_DIR, 'config.yaml.template'),
dst=config_file)
os.chmod(config_file, mode=0o644)
ret = click.launch(config_file, locate=locate)
if ret != 0:
raise Error(
"Flintrock could not launch an application to {action} "
"the config file at '{location}'. You may want to manually "
"find and edit this file."
.format(
action="locate" if locate else "edit",
location=config_file
)
)
def flintrock_is_in_development_mode() -> bool:
"""
Check if Flintrock was installed in development mode.
Use this function to toggle behavior that only Flintrock developers should
see.
"""
# This esoteric technique was pulled from pip.
# See: https://github.com/pypa/pip/pull/3258/files#diff-ab583908279e865537dec218246edcfcR310
for path_item in sys.path:
egg_link = os.path.join(path_item, 'Flintrock.egg-link')
if os.path.isfile(egg_link):
return True
else:
return False
def set_open_files_limit(desired_limit):
"""
On POSIX systems, set the open files limit to the desired number, unless
it is already equal to or higher than that.
Setting a high limit enables Flintrock to launch or interact with really
large clusters.
Background discussion: https://github.com/nchammas/flintrock/issues/81
"""
soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
if soft_limit < desired_limit:
if desired_limit > hard_limit:
warnings.warn(
"Flintrock cannot set the open files limit to {desired} "
"because the OS hard limit is {hard}. Going with {hard}. "
"You may have problems launching or interacting with "
"really large clusters."
.format(
desired=desired_limit,
hard=hard_limit),
category=RuntimeWarning,
stacklevel=2)
resource.setrlimit(
resource.RLIMIT_NOFILE,
(min(desired_limit, hard_limit), hard_limit))
def check_external_dependency(executable_name: str):
if shutil.which(executable_name) is None:
raise Error(
"Error: Flintrock could not find '{executable}' on your PATH. "
"Flintrock needs this executable to carry out the operation you "
"requested. Please install it and try again."
.format(
executable=executable_name
)
)
def main() -> int:
# Starting in Python 3.7, deprecation warnings are shown by default. We
# don't want to show these to end-users.
# See: https://docs.python.org/3/library/warnings.html#default-warning-filter
if not flintrock_is_in_development_mode():
warnings.simplefilter(action='ignore', category=DeprecationWarning)
set_open_files_limit(4096)
try:
try:
# We pass in obj so we can add attributes to it, like provider, which
# get shared by all commands.
# See: http://click.pocoo.org/6/api/#click.Context
cli(obj={})
except botocore.exceptions.NoCredentialsError:
raise Error(
"Flintrock could not find your AWS credentials. "
"You can fix this is by providing your credentials "
"via environment variables or by creating a shared "
"credentials file.\n"
"For more information see:\n"
" * https://boto3.readthedocs.io/en/latest/guide/configuration.html#environment-variables\n"
" * https://boto3.readthedocs.io/en/latest/guide/configuration.html#shared-credentials-file"
)
except NothingToDo as e:
print(e)
return 0
except UsageError as e:
print(e, file=sys.stderr)
return 2
except Error as e:
print(e, file=sys.stderr)
return 1
| 34.507871
| 131
| 0.613503
|
ef58a83ec9c2a4b49671347c2a86b6c41f5fd08b
| 11,014
|
py
|
Python
|
scalyr_agent/tests/scalyr_monitor_test.py
|
slomo/scalyr-agent-2
|
8726d6c2483c9ceb19a37697f266261c31e00b0b
|
[
"Apache-2.0"
] | null | null | null |
scalyr_agent/tests/scalyr_monitor_test.py
|
slomo/scalyr-agent-2
|
8726d6c2483c9ceb19a37697f266261c31e00b0b
|
[
"Apache-2.0"
] | null | null | null |
scalyr_agent/tests/scalyr_monitor_test.py
|
slomo/scalyr-agent-2
|
8726d6c2483c9ceb19a37697f266261c31e00b0b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 Scalyr Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------
#
# author: Steven Czerwinski <czerwin@scalyr.com>
__author__ = "czerwin@scalyr.com"
from scalyr_agent.json_lib import serialize
from scalyr_agent.json_lib import parse as json_lib_parse
from scalyr_agent.json_lib.objects import JsonArray, JsonObject, ArrayOfStrings
from scalyr_agent.scalyr_monitor import (
MonitorConfig,
BadMonitorConfiguration,
define_config_option,
)
from scalyr_agent.test_base import ScalyrTestCase
class MonitorConfigTest(ScalyrTestCase):
def test_base(self):
test_array = ["a", 1, False]
test_obj = {"a": 100, "b": 200}
config = MonitorConfig(
{
"int": 1,
"bool": True,
"string": "hi",
"unicode": u"bye",
"float": 1.4,
"long": 1L,
"JsonArray": JsonArray(*test_array),
"JsonObject": JsonObject(**test_obj),
}
)
self.assertEquals(len(config), 8)
self.assertTrue("int" in config)
self.assertFalse("foo" in config)
self.assertEquals(config["int"], 1)
self.assertEquals(config["bool"], True)
self.assertEquals(config["string"], "hi")
self.assertEquals(config["unicode"], u"bye")
self.assertEquals(config["float"], 1.4)
self.assertEquals(config["long"], 1L)
self.assertEquals(config["JsonArray"], JsonArray(*test_array))
self.assertEquals(config["JsonObject"], JsonObject(**test_obj))
count = 0
for _ in config:
count += 1
self.assertEquals(count, 8)
def test_int_conversion(self):
self.assertEquals(self.get(1, convert_to=int), 1)
self.assertEquals(self.get("12", convert_to=int), 12)
self.assertEquals(self.get(u"13", convert_to=int), 13)
self.assertRaises(BadMonitorConfiguration, self.get, 2.0, convert_to=int)
self.assertRaises(BadMonitorConfiguration, self.get, True, convert_to=int)
self.assertRaises(BadMonitorConfiguration, self.get, "12a", convert_to=int)
self.assertRaises(BadMonitorConfiguration, self.get, 4L, convert_to=int)
def test_str_conversion(self):
self.assertEquals(self.get(1, convert_to=str), "1")
self.assertEquals(self.get("ah", convert_to=str), "ah")
self.assertEquals(self.get(False, convert_to=str), "False")
self.assertEquals(self.get(1.3, convert_to=str), "1.3")
self.assertEquals(self.get(1L, convert_to=str), "1")
test_array = ["a", "b", "c"]
# str -> ArrayOfStrings (must support different variations)
arr = ArrayOfStrings(test_array)
self.assertEquals(self.get("a,b,c", convert_to=ArrayOfStrings), arr)
self.assertEquals(self.get("a,b, c", convert_to=ArrayOfStrings), arr)
self.assertEquals(self.get('"a", "b", "c"', convert_to=ArrayOfStrings), arr)
self.assertEquals(self.get("'a', 'b', 'c'", convert_to=ArrayOfStrings), arr)
self.assertEquals(self.get("[a, b, c]", convert_to=ArrayOfStrings), arr)
self.assertEquals(self.get("['a', \"b\", c]", convert_to=ArrayOfStrings), arr)
# str -> JsonArray
self.assertEquals(
self.get(serialize(test_array), convert_to=JsonArray),
JsonArray(*test_array),
)
self.assertRaises(
BadMonitorConfiguration,
# single quotes are invalid JSON
lambda: self.assertEquals(
self.get(str(test_array), convert_to=JsonArray), JsonArray(*test_array)
),
)
# str -> JsonObject
test_obj = {"a": 1, "b": "two", "c": [1, 2, 3]}
self.assertEquals(
self.get(serialize(test_obj), convert_to=JsonObject),
json_lib_parse(serialize(test_obj)),
)
def test_unicode_conversion(self):
self.assertEquals(self.get(1, convert_to=unicode), u"1")
self.assertEquals(self.get("ah", convert_to=unicode), u"ah")
self.assertEquals(self.get(False, convert_to=unicode), u"False")
self.assertEquals(self.get(1.3, convert_to=unicode), u"1.3")
self.assertEquals(self.get(1L, convert_to=unicode), u"1")
def test_long_conversion(self):
self.assertEquals(self.get(2, convert_to=long), 2L)
self.assertEquals(self.get("3", convert_to=long), 3L)
self.assertEquals(self.get(1L, convert_to=long), 1L)
self.assertRaises(BadMonitorConfiguration, self.get, True, convert_to=long)
self.assertRaises(BadMonitorConfiguration, self.get, "12a", convert_to=long)
def test_float_conversion(self):
self.assertEquals(self.get(2, convert_to=float), 2.0)
self.assertEquals(self.get("3.2", convert_to=float), 3.2)
self.assertEquals(self.get(1L, convert_to=float), 1.0)
self.assertRaises(BadMonitorConfiguration, self.get, True, convert_to=float)
self.assertRaises(BadMonitorConfiguration, self.get, "12a", convert_to=float)
def test_bool_conversion(self):
self.assertEquals(self.get(True, convert_to=bool), True)
self.assertEquals(self.get(False, convert_to=bool), False)
self.assertEquals(self.get("true", convert_to=bool), True)
self.assertEquals(self.get("false", convert_to=bool), False)
self.assertRaises(BadMonitorConfiguration, self.get, 1, convert_to=bool)
self.assertRaises(BadMonitorConfiguration, self.get, 2.1, convert_to=bool)
self.assertRaises(BadMonitorConfiguration, self.get, 3L, convert_to=bool)
def test_list_conversion(self):
# list -> JsonArray supported
test_array = ["a", 1, False]
json_arr = JsonArray(*test_array)
self.assertEquals(
self.get(list(json_arr), convert_to=JsonArray), JsonArray(*test_array),
)
# list -> ArrayOfStrings not supported
test_array = ["a", "b", "c"]
self.assertEquals(
self.get(test_array, convert_to=ArrayOfStrings), ArrayOfStrings(test_array)
)
def test_jsonarray_conversion(self):
# JsonArray -> list not supported
test_array = ["a", "b", "c"]
json_arr = JsonArray(*test_array)
self.assertRaises(
BadMonitorConfiguration, lambda: self.get(json_arr, convert_to=list)
)
# JsonArray -> ArrayOfStrings supported
test_array = ["a", "b", "c"]
json_arr = JsonArray(*test_array)
self.assertEquals(
self.get(json_arr, convert_to=ArrayOfStrings), ArrayOfStrings(test_array)
)
# JsonArray -> invalid ArrayOfStrings
test_array = ["a", "b", 3]
json_arr = JsonArray(*test_array)
self.assertRaises(
BadMonitorConfiguration,
lambda: self.get(json_arr, convert_to=ArrayOfStrings),
)
def test_arrayofstrings_conversion(self):
# JsonArray -> list not supported
test_array = ["a", "b", "c"]
json_arr = ArrayOfStrings(test_array)
self.assertRaises(
BadMonitorConfiguration, lambda: self.get(json_arr, convert_to=list)
)
# ArrayOfStrings -> JsonArray supported
test_array = ["a", "b", "c"]
json_arr = JsonArray(*test_array)
self.assertEquals(
self.get(json_arr, convert_to=JsonArray), JsonArray(*test_array)
)
def test_required_field(self):
config = MonitorConfig({"foo": 10})
self.assertEquals(config.get("foo", required_field=True), 10)
self.assertRaises(
BadMonitorConfiguration, config.get, "fo", required_field=True
)
def test_max_value(self):
self.assertRaises(BadMonitorConfiguration, self.get, 5, max_value=4)
self.assertEquals(self.get(2, max_value=3), 2)
def test_min_value(self):
self.assertRaises(BadMonitorConfiguration, self.get, 5, min_value=6)
self.assertEquals(self.get(4, min_value=3), 4)
def test_default_value(self):
config = MonitorConfig({"foo": 10})
self.assertEquals(config.get("foo", default=20), 10)
self.assertEquals(config.get("fee", default=20), 20)
def test_define_config_option(self):
define_config_option(
"foo", "a", "Description", required_option=True, convert_to=int
)
self.assertRaises(
BadMonitorConfiguration, MonitorConfig, {"b": 1}, monitor_module="foo"
)
config = MonitorConfig({"a": "5"}, monitor_module="foo")
self.assertEquals(config.get("a"), 5)
define_config_option(
"foo",
"b",
"Description",
min_value=5,
max_value=10,
default=7,
convert_to=int,
)
config = MonitorConfig({"a": 5}, monitor_module="foo")
self.assertEquals(config.get("b"), 7)
self.assertRaises(
BadMonitorConfiguration,
MonitorConfig,
{"a": 5, "b": 1},
monitor_module="foo",
)
self.assertRaises(
BadMonitorConfiguration,
MonitorConfig,
{"a": 5, "b": 11},
monitor_module="foo",
)
# Test case where no value in config for option with no default value should result in no value in
# MonitorConfig object
define_config_option(
"foo", "c", "Description", min_value=5, max_value=10, convert_to=int
)
config = MonitorConfig({"a": 5}, monitor_module="foo")
self.assertTrue("c" not in config)
def test_list_of_strings(self):
define_config_option(
"foo", "some_param", "A list of strings", default=["a", "b", "c", "d"]
)
self.assertEquals(self.get(["x", "y", "z"], convert_to=None), ["x", "y", "z"])
self.assertRaises(
Exception,
lambda: self.get("['x', 'y', 'z']", convert_to=list),
["x", "y", "z"],
)
def get(
self,
original_value,
convert_to=None,
required_field=False,
max_value=None,
min_value=None,
):
config = MonitorConfig({"foo": original_value})
return config.get(
"foo",
convert_to=convert_to,
required_field=required_field,
max_value=max_value,
min_value=min_value,
)
| 37.719178
| 106
| 0.614309
|
6482c7a8679e06892962b658a517e9d877e869ca
| 10,919
|
py
|
Python
|
padpo.py
|
awecx/padpo
|
82604c068cf57b398dec048c8e37484081b2086c
|
[
"BSD-3-Clause"
] | null | null | null |
padpo.py
|
awecx/padpo
|
82604c068cf57b398dec048c8e37484081b2086c
|
[
"BSD-3-Clause"
] | null | null | null |
padpo.py
|
awecx/padpo
|
82604c068cf57b398dec048c8e37484081b2086c
|
[
"BSD-3-Clause"
] | null | null | null |
import argparse
import json
import re
import subprocess
import sys
import tempfile
from abc import ABC, abstractmethod
from pathlib import Path
import simplelogging
# log = simplelogging.get_logger(console_level=simplelogging.DEBUG)
log = simplelogging.get_logger()
class PoItem:
def __init__(self, path, lineno):
self.path = path[3:]
self.lineno = lineno
self.parsing_msgid = None
self.msgid = []
self.msgstr = []
self.fuzzy = False
self.warnings = []
def append_line(self, line):
if line.startswith("msgid"):
self.parsing_msgid = True
self.msgid.append(line[7:-2])
elif line.startswith("msgstr"):
self.parsing_msgid = False
self.msgstr.append(line[8:-2])
elif line.startswith("#, fuzzy"):
self.fuzzy = True
elif line.startswith('"'):
if self.parsing_msgid:
self.msgid.append(line[1:-2])
elif not self.parsing_msgid is None:
self.msgstr.append(line[1:-2])
def __str__(self):
return (
f" - {self.msgid_full_content}\n"
f" => {self.msgstr_full_content}\n"
f" => {self.msgstr_rst2txt}\n"
)
@property
def msgid_full_content(self):
return "".join(self.msgid)
@property
def msgstr_full_content(self):
return "".join(self.msgstr)
@property
def msgid_rst2txt(self):
return self.rst2txt(self.msgid_full_content)
@property
def msgstr_rst2txt(self):
return self.rst2txt(self.msgstr_full_content)
@staticmethod
def rst2txt(text):
text = re.sub(r"::", r":", text)
text = re.sub(r"``(.*?)``", r"« \1 »", text)
text = re.sub(r":pep:`(.*?)`", r"PEP \1", text)
for term in (
"attr",
"class",
"const",
"data",
"dfn",
"exc",
"file",
"func",
"keyword",
"meth",
"mod",
"ref",
"source",
"term",
):
text = re.sub(rf":{term}:`(.*?)`", r"« \1 »", text)
text = re.sub(r"\*\*(.*?)\*\*", r"« \1 »", text)
text = re.sub(
r"\*(.*?)\*", r"« \1 »", text
) # TODO sauf si déjà entre «»
return text
def add_warning(self, checker_name: str, text: str) -> None:
self.warnings.append(Warning(checker_name, text))
def add_error(self, checker_name: str, text: str) -> None:
self.warnings.append(Error(checker_name, text))
class PoFile:
def __init__(self, path=None):
self.content = []
self.path = path
if path:
self.parse_file(path)
def parse_file(self, path):
# TODO assert path is a file, not a dir
item = None
with open(path, encoding="utf8") as f:
for lineno, line in enumerate(f):
if line.startswith("#: "):
if item:
self.content.append(item)
item = PoItem(line, lineno + 1)
elif item:
item.append_line(line)
if item:
self.content.append(item)
def __str__(self):
ret = f"Po file: {self.path}\n"
ret += "\n".join(str(item) for item in self.content)
return ret
def rst2txt(self):
return "\n\n".join(item.msgstr_rst2txt for item in self.content)
def display_warnings(self):
any_error = False
for item in self.content:
prefix = f"{self.path}:{item.lineno:-4} %s"
log.debug(prefix, "")
for message in item.warnings:
if isinstance(message, Error):
log.error(prefix, message)
any_error = True
elif isinstance(message, Warning):
log.warning(prefix, message)
return any_error
class Message:
def __init__(self, checker_name: str, text: str):
self.checker_name = checker_name
self.text = text
def __str__(self):
return f"[{self.checker_name:^14}] {self.text}"
class Warning(Message):
pass
class Error(Message):
pass
class Checker(ABC):
def __init__(self, name):
self.name = name
def check_file(self, pofile: PoFile):
if not isinstance(pofile, PoFile):
log.error("%s is not an instance of PoFile", str(pofile))
for item in pofile.content:
self.check_item(item)
@abstractmethod
def check_item(self, item: PoItem):
pass
class DoubleSpaceChecker(Checker):
def __init__(self):
super().__init__(name="Double space")
def check_item(self, item: PoItem):
for match in re.finditer(
r"(.{0,30})\s\s(.{0,30})", item.msgstr_full_content
):
item.add_warning(
self.name,
f"Double spaces detected between ###{match.group(1)}### and ###{match.group(2)}###",
)
class LineLengthChecker(Checker):
def __init__(self):
super().__init__(name="Line length")
def check_item(self, item: PoItem):
for line in item.msgstr:
if len(line) > 77: # 77 + 2 ("")
item.add_error(
self.name, f"Line too long ({len(line) + 2} > 79): {line}"
)
class FuzzyChecker(Checker):
def __init__(self):
super().__init__(name="Fuzzy")
def check_item(self, item: PoItem):
if item.fuzzy:
item.add_warning(self.name, "This entry is tagged as fuzzy.")
class EmptyChecker(Checker):
def __init__(self):
super().__init__(name="Empty")
def check_item(self, item: PoItem):
if not item.msgstr_full_content and item.msgid_full_content:
item.add_warning(self.name, "This entry is not translated yet.")
def replace_quotes(match):
length = len(match.group(0)) - 4
return "« " + length * "x" + " »"
class NonBreakableSpaceChecker(Checker):
def __init__(self):
super().__init__(name="NBSP")
def check_item(self, item: PoItem):
text = item.msgstr_rst2txt
for match in re.finditer(r"(.{0,30})(«[^ ])(.{0,30})", text):
self.__add_message(item, *match.groups())
for match in re.finditer(r"(.{0,30})([^ ][»])(.{0,30})", text):
self.__add_message(item, *match.groups())
text = re.sub(r"«\s(.*?)\s»", replace_quotes, text)
text = re.sub(r"http://", "http-//", text)
text = re.sub(r"https://", "https-//", text)
for sign in "?!:;":
regex = r"(.{0,30})([^ ][" + sign + r"])(.{0,30})"
for match in re.finditer(regex, text):
prefix = item.msgstr_rst2txt[match.start(1) : match.end(1)]
suffix = item.msgstr_rst2txt[match.start(3) : match.end(3)]
match = item.msgstr_rst2txt[match.start(2) : match.end(2)]
self.__add_message_space_before(item, prefix, match, suffix)
def __add_message(self, item, prefix, match, suffix):
item.add_error(
self.name,
f'Space should be replaced with a non-breakable space in "{match}": between ###{prefix}### and ###{suffix}###',
)
def __add_message_space_before(self, item, prefix, match, suffix):
item.add_error(
self.name,
f'There should be a non-breakable space before "{match[1:]}": between ###{prefix}{match[0]}### and ###{match[1:]}{suffix}###',
)
class GrammalecteChecker(Checker):
def __init__(self):
super().__init__(name="Grammalecte")
self.dir = None
def check_file(self, pofile: PoFile):
if not isinstance(pofile, PoFile):
log.error("%s is not an instance of PoFile", str(pofile))
fd, name = tempfile.mkstemp(suffix=".txt", prefix="padpo_", text=True)
with open(name, "w", encoding="utf8") as f:
text = pofile.rst2txt()
text = re.sub(r"«\s(.*?)\s»", replace_quotes, text)
f.write(text)
result = subprocess.run(
[
"grammalecte-cli.py",
"-f",
name,
"-off",
"apos",
"--json",
"--only_when_errors",
],
capture_output=True,
text=True,
)
if result.stdout:
warnings = json.loads(result.stdout)
for warning in warnings["data"]:
for error in warning["lGrammarErrors"]:
if self.filter_out(error):
continue
item_index = int(warning["iParagraph"]) // 2
item = pofile.content[item_index]
start = max(0, int(error["nStart"]) - 40)
end = max(0, int(error["nEnd"]) + 10)
item.add_warning(
self.name,
# self.name + " " + error["sRuleId"], # TODO
error["sMessage"]
+ " => ###"
+ item.msgstr_rst2txt[start:end]
+ "###",
)
Path(name).unlink()
def check_item(self, item: PoItem):
pass
def filter_out(self, error):
msg = error["sRuleId"]
if msg == "esp_milieu_ligne":
return True # double space
if msg == "nbsp_avant_deux_points":
return True
if msg == "nbsp_avant_double_ponctuation":
return True
return False
checkers = [
DoubleSpaceChecker(),
LineLengthChecker(),
FuzzyChecker(),
EmptyChecker(),
NonBreakableSpaceChecker(),
GrammalecteChecker(),
]
def check_file(path):
file = PoFile(path)
for checker in checkers:
checker.check_file(file)
return file.display_warnings()
def check_directory(path):
path = Path(path)
any_error = False
for file in path.rglob("*.po"):
any_error = check_file(file) or any_error
return any_error
def check_path(path):
path = Path(path)
if path.is_dir():
return check_directory(path)
else:
return check_file(path)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Linter for *.po files.")
parser.add_argument("-v", "--verbose", action="count", default=0)
parser.add_argument(
"path",
metavar="PATH",
type=str,
help="path of the file or directory to check",
)
args = parser.parse_args()
if args.verbose < 1:
log.reduced_logging()
elif args.verbose < 2:
log.normal_logging()
else:
log.full_logging()
any_error = check_path(args.path)
if any_error:
sys.exit(1)
| 29.273458
| 138
| 0.53384
|
60024e33ac262c8b9a3060f731193c8285445ab9
| 1,004
|
py
|
Python
|
youtubemanager/users/admin.py
|
Nihilistik/YoutubeManager
|
3d227038f20cc36559122e9b2984323e14eb109b
|
[
"Apache-2.0"
] | null | null | null |
youtubemanager/users/admin.py
|
Nihilistik/YoutubeManager
|
3d227038f20cc36559122e9b2984323e14eb109b
|
[
"Apache-2.0"
] | null | null | null |
youtubemanager/users/admin.py
|
Nihilistik/YoutubeManager
|
3d227038f20cc36559122e9b2984323e14eb109b
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from django.contrib.auth import admin as auth_admin
from django.contrib.auth import get_user_model
from django.utils.translation import gettext_lazy as _
from youtubemanager.users.forms import UserChangeForm, UserCreationForm
User = get_user_model()
@admin.register(User)
class UserAdmin(auth_admin.UserAdmin):
form = UserChangeForm
add_form = UserCreationForm
fieldsets = (
(None, {"fields": ("username", "password")}),
(_("Personal info"), {"fields": ("name", "email")}),
(
_("Permissions"),
{
"fields": (
"is_active",
"is_staff",
"is_superuser",
"groups",
"user_permissions",
),
},
),
(_("Important dates"), {"fields": ("last_login", "date_joined")}),
)
list_display = ["username", "name", "is_superuser"]
search_fields = ["name"]
| 28.685714
| 74
| 0.558765
|
6bf5f0fb49729d4893a96459e3351306a4fd999c
| 631
|
py
|
Python
|
backend/manage.py
|
crowdbotics-apps/kylee-33250
|
3efa5ef5259c883c6f4c8057ba92dd54dd024e0c
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/manage.py
|
crowdbotics-apps/kylee-33250
|
3efa5ef5259c883c6f4c8057ba92dd54dd024e0c
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/manage.py
|
crowdbotics-apps/kylee-33250
|
3efa5ef5259c883c6f4c8057ba92dd54dd024e0c
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'kylee_33250.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.681818
| 75
| 0.684628
|
16da57a93c10920c68398ae9dd1a5da2766ad4f4
| 267
|
py
|
Python
|
tests/artificial/transf_BoxCox/trend_LinearTrend/cycle_5/ar_12/test_artificial_32_BoxCox_LinearTrend_5_12_0.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
tests/artificial/transf_BoxCox/trend_LinearTrend/cycle_5/ar_12/test_artificial_32_BoxCox_LinearTrend_5_12_0.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | 1
|
2019-11-30T23:39:38.000Z
|
2019-12-01T04:34:35.000Z
|
tests/artificial/transf_BoxCox/trend_LinearTrend/cycle_5/ar_12/test_artificial_32_BoxCox_LinearTrend_5_12_0.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "LinearTrend", cycle_length = 5, transform = "BoxCox", sigma = 0.0, exog_count = 0, ar_order = 12);
| 38.142857
| 162
| 0.730337
|
744cb870b0f85f2a4a9ff9749f880262af39a3e7
| 10,141
|
py
|
Python
|
trixi/logger/message/slackmessagelogger.py
|
justusschock/trixi
|
73886338888645f46594e4d9907f4d1cfc1cddac
|
[
"MIT"
] | 1
|
2018-11-30T21:53:36.000Z
|
2018-11-30T21:53:36.000Z
|
trixi/logger/message/slackmessagelogger.py
|
justusschock/trixi
|
73886338888645f46594e4d9907f4d1cfc1cddac
|
[
"MIT"
] | null | null | null |
trixi/logger/message/slackmessagelogger.py
|
justusschock/trixi
|
73886338888645f46594e4d9907f4d1cfc1cddac
|
[
"MIT"
] | null | null | null |
import io
import matplotlib.pyplot as plt
import numpy as np
import torch
import torchvision
from PIL import Image
from slackclient import SlackClient
from trixi.util.util import figure_to_image, get_image_as_buffered_file
from trixi.logger.plt.numpyseabornimageplotlogger import NumpySeabornImagePlotLogger
class SlackMessageLogger(NumpySeabornImagePlotLogger):
"""
Slack logger, inherits the NumpySeabornImagePlotLogger and sends plots/logs to a chat via a Slack bot.
"""
@staticmethod
def find_uid_for_email(slack_client, email):
"""
Returns the slack user id for a given email
Args:
slack_client: Slack client (already authorized)
email: Workspace email address to get the user id for
Returns:
Slack workspace user id
"""
user_list = slack_client.api_call("users.list").get('members', [])
user_id = ""
for user in user_list:
if user.get('profile', {}).get('email', '') == email:
user_id = user['id']
break
return user_id
@staticmethod
def find_cid_for_user(slack_client, uid):
"""
Returns the chat/channel id for a direct message of the bot with the given User
Args:
slack_client: Slack client (already authorized)
uid: User id of the user
Returns:
chat/channel id for a direct message
"""
direct_conv = slack_client.api_call(
"conversations.open",
users=[uid]
)
c_id = direct_conv.get('channel', {}).get('id', None)
return c_id
def __init__(self, token, user_email, exp_name=None, *args, **kwargs):
"""
Creates a new NumpySeabornImagePlotLogger object.
Args:
token (str): The Bot User OAuth Access Token of the slack bot used.
user_email (str): The user email in the workspace for the chat between the user and the slack bot.
exp_name: name of the experiment, always used as message prefix
"""
super(SlackMessageLogger, self).__init__(**kwargs)
self.token = token
self.user_email = user_email
self.slack_client = SlackClient(token)
self.uid = self.find_uid_for_email(self.slack_client, self.user_email)
self.cid = self.find_cid_for_user(self.slack_client, self.uid)
self.exp_name = exp_name
def process_params(self, f, *args, **kwargs):
"""
Inherited "decorator": convert PyTorch variables and Tensors to numpy arrays
"""
# convert args
args = (a.detach().cpu().numpy() if torch.is_tensor(a) else a for a in args)
# convert kwargs
for key, data in kwargs.items():
if torch.is_tensor(data):
kwargs[key] = data.detach().cpu().numpy()
return f(self, *args, **kwargs)
def send_message(self, message="", file=None):
if file is None:
ret_val = self.slack_client.api_call(
"chat.postMessage",
channel=self.cid,
text=message
)
return ret_val.get('ts', '')
else:
ret_val = self.slack_client.api_call(
"files.upload",
channels=self.cid,
file=file,
title=message
)
return ret_val.get('ts', '')
def show_text(self, text, *args, **kwargs):
"""
Sends a text to a chat using an existing slack bot.
Args:
text (str): Text message to be sent to the bot.
"""
if self.exp_name is not None:
text = self.exp_name + ":\n" + text
try:
self.send_message(message=text)
except:
print("Could not send text to slack")
def show_image(self, image, *args, **kwargs):
"""
Sends an image file to a chat using an existing slack bot.
Args:
image (str or np array): Path to the image file to be sent to the chat.
"""
try:
if isinstance(image, str):
with open(image, 'rb') as img_file:
self.send_message(message=self.exp_name, file=img_file)
elif isinstance(image, np.ndarray):
buf = get_image_as_buffered_file(image)
self.send_message(message=self.exp_name, file=buf)
except:
print("Could not send image to slack")
def show_image_grid(self, image_array, name=None, nrow=8, padding=2,
normalize=False, range=None, scale_each=False, pad_value=0, *args, **kwargs):
"""
Sends an array of images to a chat using an existing Slack bot. (Requires torch and torchvision)
Args:
image_array (np.narray / torch.tensor): Image array/ tensor which will be sent as an image grid
make_grid_kargs: Key word arguments for the torchvision make grid method
"""
caption = ""
if self.exp_name is not None:
caption += self.exp_name + " "
if name is not None:
caption += name + " "
if isinstance(image_array, np.ndarray):
image_array = torch.from_numpy(image_array)
image_array = image_array.cpu()
grid = torchvision.utils.make_grid(image_array, nrow=nrow, padding=padding, pad_value=pad_value,
normalize=normalize, range=range, scale_each=scale_each)
ndarr = grid.mul(255).clamp(0, 255).byte().permute(1, 2, 0).numpy()
buf = get_image_as_buffered_file(ndarr)
try:
self.send_message(message=caption, file=buf)
except:
print("Could not send image_grid to slack")
def show_value(self, value, name, counter=None, tag=None, *args, **kwargs):
"""
Sends a value to a chat using an existing slack bot.
Args:
value: Value to be plotted sent to the chat.
name: Name for the plot.
counter: Optional counter to be sent in conjunction with the value.
tag: Tag to be used as a label for the plot.
"""
caption = ""
if self.exp_name is not None:
caption += self.exp_name + " "
if name is not None:
caption += name + " "
figure = super().show_value(value=value, name=name, counter=counter, tag=tag)
buf = get_image_as_buffered_file(figure)
try:
self.send_message(message=caption, file=buf)
except:
print("Could not send plot to slack")
def show_barplot(self, array, name="barplot", *args, **kwargs):
"""
Sends a barplot to a chat using an existing slack bot.
Args:
array: array of shape NxM where N is the number of rows and M is the number of elements in the row.
name: The name of the figure
"""
caption = ""
if self.exp_name is not None:
caption += self.exp_name + " "
if name is not None:
caption += name + " "
figure = super().show_barplot(array, name, *args, **kwargs)
buf = get_image_as_buffered_file(figure)
try:
self.send_message(message=caption, file=buf)
except:
print("Could not send plot to slack")
def show_lineplot(self, y_vals, x_vals=None, name="lineplot", *args, **kwargs):
"""
Sends a lineplot to a chat using an existing slack bot.
Args:
y_vals: Array of shape MxN , where M is the number of points and N is the number of different line
x_vals: Has to have the same shape as Y: MxN. For each point in Y it gives the corresponding X value (if
not set the points are assumed to be equally distributed in the interval [0, 1] )
name: The name of the figure
"""
caption = ""
if self.exp_name is not None:
caption += self.exp_name + " "
if name is not None:
caption += name + " "
figure = super().show_lineplot(y_vals, x_vals, name, *args, **kwargs)
buf = get_image_as_buffered_file(figure)
try:
self.send_message(message=caption, file=buf)
except:
print("Could not send plot to slack")
def show_scatterplot(self, array, name="scatterplot", *args, **kwargs):
"""
Sends a scatterplot to a chat using an existing slack bot.
Args:
array: A 2d array with size N x dim, where each element i \in N at X[i] results in a a 2d (if dim = 2)/
3d (if dim = 3) point.
name: The name of the figure
"""
caption = ""
if self.exp_name is not None:
caption += self.exp_name + " "
if name is not None:
caption += name + " "
figure = super().show_scatterplot(array, name, *args, **kwargs)
buf = get_image_as_buffered_file(figure)
try:
self.send_message(message=caption, file=buf)
except:
print("Could not send plot to slack")
def show_piechart(self, array, name="piechart", *args, **kwargs):
"""
Sends a piechart to a chat using an existing slack bot.
Args:
array: Array of positive integers. Each integer will be presented as a part of the pie (with the total
as the sum of all integers)
name: The name of the figure
"""
caption = ""
if self.exp_name is not None:
caption += self.exp_name + " "
if name is not None:
caption += name + " "
figure = super().show_piechart(array, name, *args, **kwargs)
buf = get_image_as_buffered_file(figure)
try:
self.send_message(message=caption, file=buf)
except:
print("Could not send plot to slack")
def print(self, text, *args, **kwargs):
"""Just calls show_text()"""
self.show_text(text, *args, **kwargs)
| 33.468647
| 116
| 0.57805
|
14a490f6fc59e451f1df582b0cf7075e7ded7ad8
| 1,046
|
py
|
Python
|
tests/modules/test_utils.py
|
karenc/houston
|
4eaaaf11d61394035e34b55bb847ea7eb4099c61
|
[
"Apache-2.0"
] | 6
|
2021-04-06T19:50:52.000Z
|
2022-01-19T17:42:33.000Z
|
tests/modules/test_utils.py
|
WildMeOrg/houston
|
8102229421388e44234c07ee6cb73bf705b6fba0
|
[
"Apache-2.0"
] | 491
|
2021-01-20T01:10:00.000Z
|
2022-03-31T19:30:48.000Z
|
tests/modules/test_utils.py
|
karenc/houston
|
4eaaaf11d61394035e34b55bb847ea7eb4099c61
|
[
"Apache-2.0"
] | 2
|
2021-03-12T02:33:55.000Z
|
2021-03-16T20:18:43.000Z
|
# -*- coding: utf-8 -*-
from unittest.mock import Mock
import pytest
from app.modules.utils import fail_on_missing_static_folder
class TestFailOnMissingStaticFolder:
def test_passing(self, tmp_path):
app = Mock()
app.static_folder = tmp_path
fail_on_missing_static_folder(app)
def test_missing_static_folder(self):
app = Mock()
static_folder = '/tmp/foo/bar/baz'
app.static_folder = static_folder
with pytest.raises(RuntimeError, match=static_folder):
fail_on_missing_static_folder(app)
def test_passing_specific_file(self, tmp_path):
app = Mock()
app.static_folder = tmp_path
with (tmp_path / 'foo.txt').open('w') as fb:
fb.write('bar')
fail_on_missing_static_folder(app, 'foo.txt')
def test_missing_specific_file(self, tmp_path):
app = Mock()
app.static_folder = tmp_path
with pytest.raises(RuntimeError, match=str(tmp_path)):
fail_on_missing_static_folder(app, 'bar.txt')
| 30.764706
| 62
| 0.66826
|
bf60dff5c7c03f5c7a10c0a9afbcc99fb4d24f98
| 2,339
|
py
|
Python
|
umlayer/model/project.py
|
selforthis/umlayer
|
8679af2ec46d346bc1b4adbcb70e3ebc6a52604a
|
[
"MIT"
] | null | null | null |
umlayer/model/project.py
|
selforthis/umlayer
|
8679af2ec46d346bc1b4adbcb70e3ebc6a52604a
|
[
"MIT"
] | null | null | null |
umlayer/model/project.py
|
selforthis/umlayer
|
8679af2ec46d346bc1b4adbcb70e3ebc6a52604a
|
[
"MIT"
] | 1
|
2021-11-28T17:26:00.000Z
|
2021-11-28T17:26:00.000Z
|
from uuid import UUID
from . import BaseItem
class Project:
"""Contains project data"""
def __init__(self):
self.project_items = {} # bad design
self._root = None
self._is_dirty = False
@property
def root(self):
return self._root
def setRoot(self, root: BaseItem):
self._add(root)
self._root = root
def __str__(self):
return f"<Project root={self._root}>"
def dirty(self):
return self._is_dirty
def setProjectDirty(self, dirty): # bad design
if self._is_dirty == dirty:
return
self._is_dirty = dirty
def remove(self, project_item_id: UUID = None):
if project_item_id not in self.project_items.keys():
raise AttributeError("element_id")
self._remove(project_item_id)
self.setProjectDirty(True)
def _remove(self, project_item_id: UUID = None):
for child in self.children(project_item_id):
self._remove(child.id)
del self.project_items[project_item_id]
def add(self, project_item: BaseItem, parent_id: UUID):
if parent_id not in self.project_items:
print(f"Add: item {parent_id} does not belong to {self}")
self.printProjectItems()
raise AttributeError("parent_id")
self._add(project_item, parent_id)
self.setProjectDirty(True)
def _add(self, project_item: BaseItem, parent_id: UUID = None):
project_item.parent_id = parent_id
self.project_items[project_item.id] = project_item
def get(self, project_item_id: UUID) -> BaseItem:
return self.project_items.get(project_item_id)
def children(self, parent_id: UUID) -> set[BaseItem]:
return set(
project_item
for project_item in self.project_items.values()
if project_item.parent_id == parent_id
)
def count(self):
return len(self.project_items)
def printProjectItems(self):
"""Debugging feature"""
print(self)
for item_id, project_item in self.project_items.items():
print(
f"{project_item.name():15s} id={project_item.id} par={project_item.parent_id}"
)
if item_id != project_item.id:
print("Bad item")
print()
| 29.2375
| 94
| 0.61864
|
12e9f8f6f4f862cbe84b24cb325b7888df8517b1
| 22,080
|
py
|
Python
|
fast_bert/data_cls.py
|
thinker-94/fast-bert
|
337bb1331fa8aab83d34c895a9d6b64d18a4efc5
|
[
"Apache-2.0"
] | 1
|
2020-10-19T06:14:10.000Z
|
2020-10-19T06:14:10.000Z
|
fast_bert/data_cls.py
|
ak3ra/fast-bert
|
f32e12290c2c4e9fc6a104dba117bcf435492f80
|
[
"Apache-2.0"
] | null | null | null |
fast_bert/data_cls.py
|
ak3ra/fast-bert
|
f32e12290c2c4e9fc6a104dba117bcf435492f80
|
[
"Apache-2.0"
] | null | null | null |
import pandas as pd
import os
import torch
from pathlib import Path
import pickle
import logging
import shutil
from torch.utils.data import (
Dataset,
TensorDataset,
DataLoader,
RandomSampler,
SequentialSampler,
)
from torch.utils.data.distributed import DistributedSampler
from transformers import AutoTokenizer
class InputExample(object):
"""A single training/test example for simple sequence classification."""
def __init__(self, guid, text_a, text_b=None, label=None):
"""Constructs a InputExample.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
labels: (Optional) [string]. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
self.guid = guid
self.text_a = text_a
self.text_b = text_b
if isinstance(label, list):
self.label = label
elif label:
self.label = str(label)
else:
self.label = None
class InputFeatures(object):
"""A single set of features of data."""
def __init__(self, input_ids, input_mask, segment_ids, label_id):
self.input_ids = input_ids
self.input_mask = input_mask
self.segment_ids = segment_ids
self.label_id = label_id
def _truncate_seq_pair(tokens_a, tokens_b, max_length):
"""Truncates a sequence pair in place to the maximum length."""
# This is a simple heuristic which will always truncate the longer sequence
# one token at a time. This makes more sense than truncating an equal percent
# of tokens from each, since if one sequence is very short then each token
# that's truncated likely contains more information than a longer sequence.
while True:
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_length:
break
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
tokens_b.pop()
def convert_examples_to_features(
examples,
label_list,
max_seq_length,
tokenizer,
output_mode="classification",
cls_token_at_end=False,
pad_on_left=False,
cls_token="[CLS]",
sep_token="[SEP]",
pad_token=0,
sequence_a_segment_id=0,
sequence_b_segment_id=1,
cls_token_segment_id=1,
pad_token_segment_id=0,
mask_padding_with_zero=True,
logger=None,
):
""" Loads a data file into a list of `InputBatch`s
`cls_token_at_end` define the location of the CLS token:
- False (Default, BERT/XLM pattern): [CLS] + A + [SEP] + B + [SEP]
- True (XLNet/GPT pattern): A + [SEP] + B + [SEP] + [CLS]
`cls_token_segment_id` define the segment id associated to the CLS token (0 for BERT, 2 for XLNet)
"""
label_map = {label: i for i, label in enumerate(label_list)}
features = []
for (ex_index, example) in enumerate(examples):
if ex_index % 10000 == 0:
if logger:
logger.info("Writing example %d of %d" % (ex_index, len(examples)))
tokens_a = tokenizer.tokenize(str(example.text_a))
tokens_b = None
if example.text_b:
tokens_b = tokenizer.tokenize(str(example.text_b))
# Modifies `tokens_a` and `tokens_b` in place so that the total
# length is less than the specified length.
# Account for [CLS], [SEP], [SEP] with "- 3"
_truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3)
else:
# Account for [CLS] and [SEP] with "- 2"
if len(tokens_a) > max_seq_length - 2:
tokens_a = tokens_a[: (max_seq_length - 2)]
# The convention in BERT is:
# (a) For sequence pairs:
# tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]
# type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1
# (b) For single sequences:
# tokens: [CLS] the dog is hairy . [SEP]
# type_ids: 0 0 0 0 0 0 0
#
# Where "type_ids" are used to indicate whether this is the first
# sequence or the second sequence. The embedding vectors for `type=0` and
# `type=1` were learned during pre-training and are added to the wordpiece
# embedding vector (and position vector). This is not *strictly* necessary
# since the [SEP] token unambiguously separates the sequences, but it makes
# it easier for the model to learn the concept of sequences.
#
# For classification tasks, the first vector (corresponding to [CLS]) is
# used as as the "sentence vector". Note that this only makes sense because
# the entire model is fine-tuned.
tokens = tokens_a + [sep_token]
segment_ids = [sequence_a_segment_id] * len(tokens)
if tokens_b:
tokens += tokens_b + [sep_token]
segment_ids += [sequence_b_segment_id] * (len(tokens_b) + 1)
if cls_token_at_end:
tokens = tokens + [cls_token]
segment_ids = segment_ids + [cls_token_segment_id]
else:
tokens = [cls_token] + tokens
segment_ids = [cls_token_segment_id] + segment_ids
input_ids = tokenizer.convert_tokens_to_ids(tokens)
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
input_mask = [1 if mask_padding_with_zero else 0] * len(input_ids)
# Zero-pad up to the sequence length.
padding_length = max_seq_length - len(input_ids)
if pad_on_left:
input_ids = ([pad_token] * padding_length) + input_ids
input_mask = (
[0 if mask_padding_with_zero else 1] * padding_length
) + input_mask
segment_ids = ([pad_token_segment_id] * padding_length) + segment_ids
else:
input_ids = input_ids + ([pad_token] * padding_length)
input_mask = input_mask + (
[0 if mask_padding_with_zero else 1] * padding_length
)
segment_ids = segment_ids + ([pad_token_segment_id] * padding_length)
assert len(input_ids) == max_seq_length
assert len(input_mask) == max_seq_length
assert len(segment_ids) == max_seq_length
if isinstance(example.label, list):
label_id = []
for label in example.label:
label_id.append(float(label))
else:
if example.label is not None:
label_id = label_map[example.label]
else:
label_id = ""
features.append(
InputFeatures(
input_ids=input_ids,
input_mask=input_mask,
segment_ids=segment_ids,
label_id=label_id,
)
)
return features
class DataProcessor(object):
"""Base class for data converters for sequence classification data sets."""
def get_train_examples(self, filename, size=-1):
"""Gets a collection of `InputExample`s for the train set."""
raise NotImplementedError()
def get_dev_examples(self, filename, size=-1):
"""Gets a collection of `InputExample`s for the dev set."""
raise NotImplementedError()
def get_test_examples(self, filename, size=-1):
"""Gets a collection of `InputExample`s for the dev set."""
raise NotImplementedError()
def get_labels(self):
"""Gets the list of labels for this data set."""
raise NotImplementedError()
class TextProcessor(DataProcessor):
def __init__(self, data_dir, label_dir):
self.data_dir = data_dir
self.label_dir = label_dir
self.labels = None
def get_train_examples(
self, filename="train.csv", text_col="text", label_col="label", size=-1
):
if size == -1:
data_df = pd.read_csv(os.path.join(self.data_dir, filename))
return self._create_examples(
data_df, "train", text_col=text_col, label_col=label_col
)
else:
data_df = pd.read_csv(os.path.join(self.data_dir, filename))
# data_df['comment_text'] = data_df['comment_text'].apply(cleanHtml)
return self._create_examples(
data_df.sample(size), "train", text_col=text_col, label_col=label_col
)
def get_dev_examples(
self, filename="val.csv", text_col="text", label_col="label", size=-1
):
if size == -1:
data_df = pd.read_csv(os.path.join(self.data_dir, filename))
return self._create_examples(
data_df, "dev", text_col=text_col, label_col=label_col
)
else:
data_df = pd.read_csv(os.path.join(self.data_dir, filename))
return self._create_examples(
data_df.sample(size), "dev", text_col=text_col, label_col=label_col
)
def get_test_examples(
self, filename="val.csv", text_col="text", label_col="label", size=-1
):
data_df = pd.read_csv(os.path.join(self.data_dir, filename))
# data_df['comment_text'] = data_df['comment_text'].apply(cleanHtml)
if size == -1:
return self._create_examples(
data_df, "test", text_col=text_col, label_col=None
)
else:
return self._create_examples(
data_df.sample(size), "test", text_col=text_col, label_col=None
)
def get_labels(self, filename="labels.csv"):
"""See base class."""
if self.labels is None:
self.labels = list(
pd.read_csv(os.path.join(self.label_dir, filename), header=None)[0]
.astype("str")
.values
)
return self.labels
def _create_examples(self, df, set_type, text_col, label_col):
"""Creates examples for the training and dev sets."""
if label_col is None:
return list(
df.apply(
lambda row: InputExample(
guid=row.index, text_a=str(row[text_col]), label=None
),
axis=1,
)
)
else:
return list(
df.apply(
lambda row: InputExample(
guid=row.index,
text_a=str(row[text_col]),
label=str(row[label_col]),
),
axis=1,
)
)
class MultiLabelTextProcessor(TextProcessor):
def _create_examples(self, df, set_type, text_col, label_col):
def _get_labels(row, label_col):
if isinstance(label_col, list):
return list(row[label_col])
else:
# create one hot vector of labels
label_list = self.get_labels()
labels = [0] * len(label_list)
# cast with string in case labels are integers
labels[label_list.index(str(row[label_col]))] = 1
return labels
"""Creates examples for the training and dev sets."""
if label_col is None:
return list(
df.apply(
lambda row: InputExample(
guid=row.index, text_a=row[text_col], label=[]
),
axis=1,
)
)
else:
return list(
df.apply(
lambda row: InputExample(
guid=row.index,
text_a=row[text_col],
label=_get_labels(row, label_col),
),
axis=1,
)
)
class LRFinderDataset(Dataset):
def __init__(self, data_dir, filename, text_col, label_col):
super().__init__()
self.text_col = text_col
self.label_col = label_col
self.data = pd.read_csv(os.path.join(data_dir, filename))
def __getitem__(self, idx):
return self.data.loc[idx, self.text_col], self.data.loc[idx, self.label_col]
def __len__(self):
return self.data.shape[0]
class BertDataBunch(object):
def __init__(
self,
data_dir,
label_dir,
tokenizer,
train_file="train.csv",
val_file="val.csv",
test_data=None,
label_file="labels.csv",
text_col="text",
label_col="label",
batch_size_per_gpu=16,
max_seq_length=512,
multi_gpu=True,
multi_label=False,
backend="nccl",
model_type="bert",
logger=None,
clear_cache=False,
no_cache=False,
custom_sampler=None,
pos_weight=None,
weight=None
):
# just in case someone passes string instead of Path
if isinstance(data_dir, str):
data_dir = Path(data_dir)
if isinstance(label_dir, str):
label_dir = Path(label_dir)
if isinstance(tokenizer, str):
# instantiate the new tokeniser object using the tokeniser name
tokenizer = AutoTokenizer.from_pretrained(tokenizer, use_fast=True)
self.tokenizer = tokenizer
self.data_dir = data_dir
self.train_file = train_file
self.val_file = val_file
self.test_data = test_data
self.cache_dir = data_dir / "cache"
self.max_seq_length = max_seq_length
self.batch_size_per_gpu = batch_size_per_gpu
self.train_dl = None
self.val_dl = None
self.test_dl = None
self.multi_label = multi_label
self.n_gpu = 1
self.no_cache = no_cache
self.model_type = model_type
self.output_mode = "classification"
self.custom_sampler = custom_sampler
self.pos_weight = pos_weight
self.weight = weight
if logger is None:
logger = logging.getLogger()
self.logger = logger
if multi_gpu:
self.n_gpu = torch.cuda.device_count()
if clear_cache:
shutil.rmtree(self.cache_dir, ignore_errors=True)
if multi_label:
processor = MultiLabelTextProcessor(data_dir, label_dir)
else:
processor = TextProcessor(data_dir, label_dir)
self.labels = processor.get_labels(label_file)
if train_file:
# Train DataLoader
train_examples = None
cached_features_file = os.path.join(
self.cache_dir,
"cached_{}_{}_{}_{}_{}".format(
self.model_type.replace("/", "-"),
"train",
"multi_label" if self.multi_label else "multi_class",
str(self.max_seq_length),
os.path.basename(train_file),
),
)
if os.path.exists(cached_features_file) is False or self.no_cache is True:
train_examples = processor.get_train_examples(
train_file, text_col=text_col, label_col=label_col
)
train_dataset = self.get_dataset_from_examples(
train_examples, "train", no_cache=self.no_cache
)
self.train_batch_size = self.batch_size_per_gpu * max(1, self.n_gpu)
if self.custom_sampler is not None:
train_sampler = self.custom_sampler
else:
train_sampler = RandomSampler(train_dataset)
self.train_dl = DataLoader(
train_dataset, sampler=train_sampler, batch_size=self.train_batch_size
)
if val_file:
# Validation DataLoader
val_examples = None
cached_features_file = os.path.join(
self.cache_dir,
"cached_{}_{}_{}_{}_{}".format(
self.model_type.replace("/", "-"),
"dev",
"multi_label" if self.multi_label else "multi_class",
str(self.max_seq_length),
os.path.basename(val_file),
),
)
if os.path.exists(cached_features_file) is False:
val_examples = processor.get_dev_examples(
val_file, text_col=text_col, label_col=label_col
)
val_dataset = self.get_dataset_from_examples(
val_examples, "dev", no_cache=self.no_cache
)
# no grads necessary, hence double val batch size
self.val_batch_size = self.batch_size_per_gpu * 2 * max(1, self.n_gpu)
val_sampler = SequentialSampler(val_dataset)
self.val_dl = DataLoader(
val_dataset, sampler=val_sampler, batch_size=self.val_batch_size
)
if test_data:
# Test set loader for predictions
test_examples = []
input_data = []
for index, text in enumerate(test_data):
test_examples.append(InputExample(index, text))
input_data.append({"id": index, "text": text})
test_dataset = self.get_dataset_from_examples(
test_examples, "test", is_test=True, no_cache=self.no_cache
)
self.test_batch_size = self.batch_size_per_gpu * max(1, self.n_gpu)
test_sampler = SequentialSampler(test_dataset)
self.test_dl = DataLoader(
test_dataset, sampler=test_sampler, batch_size=self.test_batch_size
)
def get_dl_from_texts(self, texts):
test_examples = []
input_data = []
for index, text in enumerate(texts):
test_examples.append(InputExample(index, text, label=None))
input_data.append({"id": index, "text": text})
test_dataset = self.get_dataset_from_examples(
test_examples, "test", is_test=True, no_cache=True
)
test_sampler = SequentialSampler(test_dataset)
return DataLoader(
test_dataset, sampler=test_sampler, batch_size=self.batch_size_per_gpu
)
def save(self, filename="databunch.pkl"):
tmp_path = self.data_dir / "tmp"
tmp_path.mkdir(exist_ok=True)
with open(str(tmp_path / filename), "wb") as f:
pickle.dump(self, f)
def get_dataset_from_examples(
self, examples, set_type="train", is_test=False, no_cache=False
):
if set_type == "train":
file_name = self.train_file
elif set_type == "dev":
file_name = self.val_file
elif set_type == "test":
file_name = (
"test" # test is not supposed to be a file - just a list of texts
)
cached_features_file = os.path.join(
self.cache_dir,
"cached_{}_{}_{}_{}_{}".format(
self.model_type.replace("/", "-"),
set_type,
"multi_label" if self.multi_label else "multi_class",
str(self.max_seq_length),
os.path.basename(file_name),
),
)
if os.path.exists(cached_features_file) and no_cache is False:
self.logger.info(
"Loading features from cached file %s", cached_features_file
)
features = torch.load(cached_features_file)
else:
# Create tokenized and numericalized features
features = convert_examples_to_features(
examples,
label_list=self.labels,
max_seq_length=self.max_seq_length,
tokenizer=self.tokenizer,
output_mode=self.output_mode,
# xlnet has a cls token at the end
cls_token_at_end=bool(self.model_type in ["xlnet"]),
cls_token=self.tokenizer.cls_token,
sep_token=self.tokenizer.sep_token,
cls_token_segment_id=2 if self.model_type in ["xlnet"] else 0,
# pad on the left for xlnet
pad_on_left=bool(self.model_type in ["xlnet"]),
pad_token_segment_id=4 if self.model_type in ["xlnet"] else 0,
logger=self.logger,
)
# Create folder if it doesn't exist
if no_cache is False:
self.cache_dir.mkdir(exist_ok=True)
self.logger.info(
"Saving features into cached file %s", cached_features_file
)
torch.save(features, cached_features_file)
# Convert to Tensors and build dataset
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_input_mask = torch.tensor(
[f.input_mask for f in features], dtype=torch.long
)
all_segment_ids = torch.tensor(
[f.segment_ids for f in features], dtype=torch.long
)
if is_test is False: # labels not available for test set
if self.multi_label:
all_label_ids = torch.tensor(
[f.label_id for f in features], dtype=torch.float
)
else:
all_label_ids = torch.tensor(
[f.label_id for f in features], dtype=torch.long
)
dataset = TensorDataset(
all_input_ids, all_input_mask, all_segment_ids, all_label_ids
)
else:
all_label_ids = []
dataset = TensorDataset(all_input_ids, all_input_mask, all_segment_ids)
return dataset
| 35.670436
| 106
| 0.573007
|
cfa413b95c04a222e85983e7b0753f386328ca1e
| 1,465
|
py
|
Python
|
src/python/nimbusml/ensemble/output_combiner/classifierstacking.py
|
michaelgsharp/NimbusML
|
50031157265f49eec85d27fe67582d9ddaf01ef9
|
[
"MIT"
] | 134
|
2018-11-01T22:15:24.000Z
|
2019-05-04T11:30:08.000Z
|
src/python/nimbusml/ensemble/output_combiner/classifierstacking.py
|
michaelgsharp/NimbusML
|
50031157265f49eec85d27fe67582d9ddaf01ef9
|
[
"MIT"
] | 226
|
2019-05-07T19:00:44.000Z
|
2021-01-06T07:59:48.000Z
|
src/python/nimbusml/ensemble/output_combiner/classifierstacking.py
|
michaelgsharp/NimbusML
|
50031157265f49eec85d27fe67582d9ddaf01ef9
|
[
"MIT"
] | 43
|
2019-05-15T20:19:42.000Z
|
2022-03-30T10:26:07.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# --------------------------------------------------------------------------------------------
# - Generated by tools/entrypoint_compiler.py: do not edit by hand
"""
ClassifierStacking
"""
__all__ = ["ClassifierStacking"]
from ...internal.core.ensemble.output_combiner.classifierstacking import \
ClassifierStacking as core
from ...internal.utils.utils import trace
class ClassifierStacking(core):
"""
**Description**
Computes the output by training a model on a training set where each instance is a vector containing the outputs of the different models on a training instance, and the instance's label
:param validation_dataset_proportion: The proportion of instances to be
selected to test the individual base learner. If it is 0, it uses
training set.
:param params: Additional arguments sent to compute engine.
"""
@trace
def __init__(
self,
validation_dataset_proportion=0.3,
**params):
core.__init__(
self,
validation_dataset_proportion=validation_dataset_proportion,
**params)
def get_params(self, deep=False):
"""
Get the parameters for this operator.
"""
return core.get_params(self)
| 31.847826
| 193
| 0.593174
|
5bf3c1268a2bad898575c87a48dbd637dfcc1779
| 2,441
|
py
|
Python
|
hs_composite_resource/page_processors.py
|
tommac7/hydroshare
|
87c4543a55f98103d2614bf4c47f7904c3f9c029
|
[
"BSD-3-Clause"
] | null | null | null |
hs_composite_resource/page_processors.py
|
tommac7/hydroshare
|
87c4543a55f98103d2614bf4c47f7904c3f9c029
|
[
"BSD-3-Clause"
] | null | null | null |
hs_composite_resource/page_processors.py
|
tommac7/hydroshare
|
87c4543a55f98103d2614bf4c47f7904c3f9c029
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib import messages
from django.contrib.messages import get_messages
from django.http import HttpResponseRedirect
from mezzanine.pages.page_processors import processor_for
from hs_core import page_processors
from hs_core.views import add_generic_context
from .models import CompositeResource
@processor_for(CompositeResource)
def landing_page(request, page):
"""
A typical Mezzanine page processor.
"""
content_model = page.get_content_model()
netcdf_logical_files = content_model.get_logical_files('NetCDFLogicalFile')
for lf in netcdf_logical_files:
if lf.metadata.is_dirty:
msg = "One or more NetCDF files are out of sync with metadata changes."
# prevent same message being displayed more than once
msg_exists = False
storage = get_messages(request)
for message in storage:
if message.message == msg:
msg_exists = True
break
if not msg_exists:
messages.info(request, msg)
break
timeseries_logical_files = content_model.get_logical_files('TimeSeriesLogicalFile')
for lf in timeseries_logical_files:
if lf.metadata.is_dirty:
msg = "One or more SQLite files are out of sync with metadata changes."
# prevent same message being displayed more than once
msg_exists = False
storage = get_messages(request)
for message in storage:
if message.message == msg:
msg_exists = True
break
if not msg_exists:
messages.info(request, msg)
break
edit_resource = page_processors.check_resource_mode(request)
context = page_processors.get_page_context(page, request.user, resource_edit=edit_resource,
extended_metadata_layout=None, request=request)
if isinstance(context, HttpResponseRedirect):
# sending user to login page
return context
file_type_missing_metadata = {'file_type_missing_metadata':
content_model.get_missing_file_type_metadata_info()}
context.update(file_type_missing_metadata)
hs_core_context = add_generic_context(request, page)
context.update(hs_core_context)
# sending user to resource landing page
return context
| 38.140625
| 95
| 0.661204
|
d70e0e5292e3c28ab1c74cffd972822ee009cf5d
| 391
|
py
|
Python
|
myparts/myparts/asgi.py
|
edcodes/Django-Parts-Inventory
|
49e41666d854bc79f0a65debb2fedb534f2e2d51
|
[
"Apache-2.0"
] | null | null | null |
myparts/myparts/asgi.py
|
edcodes/Django-Parts-Inventory
|
49e41666d854bc79f0a65debb2fedb534f2e2d51
|
[
"Apache-2.0"
] | null | null | null |
myparts/myparts/asgi.py
|
edcodes/Django-Parts-Inventory
|
49e41666d854bc79f0a65debb2fedb534f2e2d51
|
[
"Apache-2.0"
] | 2
|
2022-02-10T21:14:02.000Z
|
2022-03-31T00:51:38.000Z
|
"""
ASGI config for myparts project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'myparts.settings')
application = get_asgi_application()
| 23
| 78
| 0.785166
|
fda0d162f80a83dfb83f4f12fde4088ec6db52f8
| 2,929
|
py
|
Python
|
qiskit/aqua/algorithms/classical/exact_ls_solver/exact_ls_solver.py
|
Nick-Singstock/qiskit-aqua
|
8c2bc57b78dec447faec3adbc966471a3206c2ef
|
[
"Apache-2.0"
] | 1
|
2020-11-06T01:09:28.000Z
|
2020-11-06T01:09:28.000Z
|
qiskit/aqua/algorithms/classical/exact_ls_solver/exact_ls_solver.py
|
Nick-Singstock/qiskit-aqua
|
8c2bc57b78dec447faec3adbc966471a3206c2ef
|
[
"Apache-2.0"
] | null | null | null |
qiskit/aqua/algorithms/classical/exact_ls_solver/exact_ls_solver.py
|
Nick-Singstock/qiskit-aqua
|
8c2bc57b78dec447faec3adbc966471a3206c2ef
|
[
"Apache-2.0"
] | 1
|
2020-11-06T01:09:43.000Z
|
2020-11-06T01:09:43.000Z
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM Corp. 2017 and later.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""The Exact LinearSystem algorithm."""
import logging
import numpy as np
from qiskit.aqua.algorithms import QuantumAlgorithm
from qiskit.aqua import AquaError
logger = logging.getLogger(__name__)
class ExactLSsolver(QuantumAlgorithm):
"""The Exact LinearSystem algorithm."""
CONFIGURATION = {
'name': 'ExactLSsolver',
'description': 'ExactLSsolver Algorithm',
'classical': True,
'input_schema': {
'$schema': 'http://json-schema.org/schema#',
'id': 'ExactLSsolver_schema',
'type': 'object',
'properties': {
},
'additionalProperties': False
},
'problems': ['linear_system']
}
def __init__(self, matrix=None, vector=None):
"""Constructor.
Args:
matrix (array): the input matrix of linear system of equations
vector (array): the input vector of linear system of equations
"""
self.validate(locals())
super().__init__()
self._matrix = matrix
self._vector = vector
self._ret = {}
@classmethod
def init_params(cls, params, algo_input):
"""
Initialize via parameters dictionary and algorithm input instance
Args:
params: parameters dictionary
algo_input: LinearSystemInput instance
"""
if algo_input is None:
raise AquaError("LinearSystemInput instance is required.")
matrix = algo_input.matrix
vector = algo_input.vector
if not isinstance(matrix, np.ndarray):
matrix = np.asarray(matrix)
if not isinstance(vector, np.ndarray):
vector = np.asarray(vector)
if matrix.shape[0] != len(vector):
raise ValueError("Input vector dimension does not match input "
"matrix dimension!")
if matrix.shape[0] != matrix.shape[1]:
raise ValueError("Input matrix must be square!")
return cls(matrix, vector)
def _solve(self):
self._ret['eigvals'] = np.linalg.eig(self._matrix)[0]
self._ret['solution'] = list(np.linalg.solve(self._matrix, self._vector))
def _run(self):
"""
Run the algorithm to compute eigenvalues and solution.
Returns:
Dictionary of results
"""
self._solve()
return self._ret
| 30.831579
| 81
| 0.616251
|
3592638e4b25908a0ee99d07580467244a7eab90
| 2,066
|
py
|
Python
|
tests/test_codec.py
|
Chrisa142857/CompressAI
|
75760096b9700a58d346351251d544050f3418fb
|
[
"Apache-2.0"
] | 62
|
2021-08-10T01:25:44.000Z
|
2022-03-31T07:53:35.000Z
|
tests/test_codec.py
|
Chrisa142857/CompressAI
|
75760096b9700a58d346351251d544050f3418fb
|
[
"Apache-2.0"
] | 6
|
2021-08-12T06:40:55.000Z
|
2022-03-09T15:34:50.000Z
|
tests/test_codec.py
|
Chrisa142857/CompressAI
|
75760096b9700a58d346351251d544050f3418fb
|
[
"Apache-2.0"
] | 8
|
2021-08-06T02:03:12.000Z
|
2022-03-22T19:33:08.000Z
|
# Copyright 2020 InterDigital Communications, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import pytest
import torch
from compressai.zoo import models
archs = [
"bmshj2018-factorized",
"bmshj2018-hyperprior",
"mbt2018-mean",
"mbt2018",
]
class TestCompressDecompress:
@pytest.mark.parametrize("arch,N", itertools.product(archs, [1, 2, 3]))
def test_codec(self, arch: str, N: int):
x = torch.zeros(N, 3, 256, 256)
h, w = x.size()[-2:]
x[:, :, h // 4 : -h // 4, w // 4 : -w // 4].fill_(1)
model = models[arch]
net = model(quality=1, metric="mse", pretrained=True).eval()
with torch.no_grad():
rv = net.compress(x)
assert "shape" in rv
shape = rv["shape"]
ds = net.downsampling_factor
assert shape == torch.Size([x.size(2) // ds, x.size(3) // ds])
assert "strings" in rv
strings_list = rv["strings"]
# y_strings (+ optional z_strings)
assert len(strings_list) == 1 or len(strings_list) == 2
for strings in strings_list:
assert len(strings) == N
for string in strings:
assert isinstance(string, bytes)
assert len(string) > 0
with torch.no_grad():
rv = net.decompress(strings_list, shape)
assert "x_hat" in rv
x_hat = rv["x_hat"]
assert x_hat.size() == x.size()
mse = torch.mean((x - x_hat) ** 2)
psnr = -10 * torch.log10(mse).item()
assert 35 < psnr < 41
| 31.30303
| 75
| 0.612294
|
60ca8977a15642dc37560e5e885d4c6ee6213fc4
| 6,578
|
py
|
Python
|
controllers/dc.py
|
rommelsotto/eden
|
0e7326c559aa8d72f2934803b040321f597f45c4
|
[
"MIT"
] | 27
|
2015-01-13T23:52:49.000Z
|
2016-01-11T08:08:45.000Z
|
controllers/dc.py
|
rommelsotto/eden
|
0e7326c559aa8d72f2934803b040321f597f45c4
|
[
"MIT"
] | 140
|
2015-01-01T07:38:10.000Z
|
2016-03-01T10:51:54.000Z
|
controllers/dc.py
|
rommelsotto/eden
|
0e7326c559aa8d72f2934803b040321f597f45c4
|
[
"MIT"
] | 109
|
2015-01-02T17:36:17.000Z
|
2016-02-07T14:49:28.000Z
|
# -*- coding: utf-8 -*-
"""
Data Collection:
- a front-end UI to manage Assessments which uses the Dynamic Tables back-end
"""
module = request.controller
if not settings.has_module(module):
raise HTTP(404, body="Module disabled: %s" % module)
# -----------------------------------------------------------------------------
def index():
""" Module's Home Page """
module_name = settings.modules[module].name_nice
response.title = module_name
return {"module_name": module_name,
}
# -----------------------------------------------------------------------------
def template():
""" RESTful CRUD controller """
from s3 import FS
s3.filter = FS("master") == "dc_response"
def prep(r):
if r.record and r.component_name == "question":
# If the template has responses then we should make the Questions read-only
# @ToDo: Allow Editing unanswered questions?
rtable = s3db.dc_response
query = (rtable.template_id == r.id) & \
(rtable.deleted == False)
if db(query).select(rtable.id,
limitby=(0, 1)
):
s3db.configure("dc_question",
deletable = False,
editable = False,
)
# Add JS
scripts_append = s3.scripts.append
if s3.debug:
scripts_append("/%s/static/scripts/tag-it.js" % appname)
scripts_append("/%s/static/scripts/S3/s3.dc_question.js" % appname)
else:
scripts_append("/%s/static/scripts/tag-it.min.js" % appname)
scripts_append("/%s/static/scripts/S3/s3.dc_question.min.js" % appname)
# Add CSS
s3.stylesheets.append("plugins/jquery.tagit.css")
# Open in native controller to access Translations tabs
s3db.configure("dc_question",
linkto = lambda record_id: URL(f="question", args=[record_id, "read"]),
linkto_update = lambda record_id: URL(f="question", args=[record_id, "update"]),
)
return True
s3.prep = prep
return s3_rest_controller(rheader = s3db.dc_rheader)
# -----------------------------------------------------------------------------
def question():
"""
RESTful CRUD controller
- used for imports & to manage translations
"""
return s3_rest_controller(rheader = s3db.dc_rheader,
)
# -----------------------------------------------------------------------------
def question_l10n():
"""
RESTful CRUD controller
- used for imports
"""
return s3_rest_controller(#rheader = s3db.dc_rheader,
)
# -----------------------------------------------------------------------------
def target():
""" RESTful CRUD controller """
# Pre-process
def prep(r):
if r.interactive:
if r.component_name == "response":
# Default component values from master record
record = r.record
table = s3db.dc_response
table.language.default = record.language
table.location_id.default = record.location_id
f = table.template_id
f.default = record.template_id
f.writable = False
f.comment = None
# Open in native controller (cannot just set native as can't call this 'response')
s3db.configure("dc_response",
linkto = lambda record_id: URL(f="respnse", args=[record_id, "read"]),
linkto_update = lambda record_id: URL(f="respnse", args=[record_id, "update"]),
)
return True
s3.prep = prep
return s3_rest_controller(rheader = s3db.dc_rheader,
)
# -----------------------------------------------------------------------------
def respnse(): # Cannot call this 'response' or it will clobber the global
""" RESTful CRUD controller """
# All templates use the same component name for answers so need to add the right component manually
try:
response_id = int(request.args(0))
except:
# Multiple record method
pass
else:
dtable = s3db.s3_table
rtable = s3db.dc_response
ttable = s3db.dc_template
query = (rtable.id == response_id) & \
(rtable.template_id == ttable.id) & \
(ttable.table_id == dtable.id)
template = db(query).select(dtable.name,
limitby=(0, 1),
).first()
try:
dtablename = template.name
except:
# Old URL?
pass
else:
components = {dtablename: {"name": "answer",
"joinby": "response_id",
"multiple": False,
}
}
s3db.add_components("dc_response", **components)
# Pre-process
def prep(r):
if r.interactive:
if r.component_name == "answer":
# CRUD Strings
tablename = r.component.tablename
s3.crud_strings[tablename] = Storage(
label_create = T("Create Responses"),
title_display = T("Response Details"),
title_list = T("Responses"),
title_update = T("Edit Response"),
label_list_button = T("List Responses"),
label_delete_button = T("Clear Response"),
msg_record_created = T("Response created"),
msg_record_modified = T("Response updated"),
msg_record_deleted = T("Response deleted"),
msg_list_empty = T("No Responses currently defined"),
)
# Custom Form with Questions & Subheadings sorted correctly
s3db.dc_answer_form(r, tablename)
return True
s3.prep = prep
return s3_rest_controller("dc", "response",
rheader = s3db.dc_rheader,
)
# END =========================================================================
| 36.342541
| 110
| 0.467923
|
92f119992f8dd0a6aa8d915b607484b2bfa3e39e
| 2,383
|
py
|
Python
|
redis/commands/json/__init__.py
|
davidylee/redis-py
|
8924504f9875d3c80733504d6f6923f5611ebf17
|
[
"MIT"
] | null | null | null |
redis/commands/json/__init__.py
|
davidylee/redis-py
|
8924504f9875d3c80733504d6f6923f5611ebf17
|
[
"MIT"
] | null | null | null |
redis/commands/json/__init__.py
|
davidylee/redis-py
|
8924504f9875d3c80733504d6f6923f5611ebf17
|
[
"MIT"
] | null | null | null |
# from typing import Optional
from json import JSONDecoder, JSONEncoder
# # from redis.client import Redis
from .helpers import bulk_of_jsons
from ..helpers import nativestr, delist
from .commands import JSONCommands
# from ..feature import AbstractFeature
class JSON(JSONCommands):
"""
Create a client for talking to json.
:param decoder:
:type json.JSONDecoder: An instance of json.JSONDecoder
:param encoder:
:type json.JSONEncoder: An instance of json.JSONEncoder
"""
def __init__(
self,
client,
decoder=JSONDecoder(),
encoder=JSONEncoder(),
):
"""
Create a client for talking to json.
:param decoder:
:type json.JSONDecoder: An instance of json.JSONDecoder
:param encoder:
:type json.JSONEncoder: An instance of json.JSONEncoder
"""
# Set the module commands' callbacks
self.MODULE_CALLBACKS = {
"JSON.CLEAR": int,
"JSON.DEL": int,
"JSON.FORGET": int,
"JSON.GET": self._decode,
"JSON.MGET": bulk_of_jsons(self._decode),
"JSON.SET": lambda r: r and nativestr(r) == "OK",
"JSON.NUMINCRBY": self._decode,
"JSON.NUMMULTBY": self._decode,
"JSON.TOGGLE": lambda b: b == b"true",
"JSON.STRAPPEND": int,
"JSON.STRLEN": int,
"JSON.ARRAPPEND": int,
"JSON.ARRINDEX": int,
"JSON.ARRINSERT": int,
"JSON.ARRLEN": int,
"JSON.ARRPOP": self._decode,
"JSON.ARRTRIM": int,
"JSON.OBJLEN": int,
"JSON.OBJKEYS": delist,
# "JSON.RESP": delist,
"JSON.DEBUG": int,
}
self.client = client
self.execute_command = client.execute_command
for key, value in self.MODULE_CALLBACKS.items():
self.client.set_response_callback(key, value)
self.__encoder__ = encoder
self.__decoder__ = decoder
def _decode(self, obj):
"""Get the decoder."""
if obj is None:
return obj
try:
return self.__decoder__.decode(obj)
except TypeError:
return self.__decoder__.decode(obj.decode())
def _encode(self, obj):
"""Get the encoder."""
return self.__encoder__.encode(obj)
| 28.035294
| 63
| 0.572388
|
f2a51fcaf7b1df1860738170e6f3358ddcdce4de
| 2,740
|
py
|
Python
|
playwithmpv/server.py
|
playallinmpv/playinmpv
|
49f2c8d2b04645e808d8780b0f2c9dd359d4f4ce
|
[
"MIT"
] | 9
|
2022-01-28T05:02:12.000Z
|
2022-03-26T06:54:53.000Z
|
playwithmpv/server.py
|
playallinmpv/playinmpv
|
49f2c8d2b04645e808d8780b0f2c9dd359d4f4ce
|
[
"MIT"
] | 4
|
2022-02-13T14:28:16.000Z
|
2022-03-14T19:18:00.000Z
|
playwithmpv/server.py
|
playallinmpv/playinmpv
|
49f2c8d2b04645e808d8780b0f2c9dd359d4f4ce
|
[
"MIT"
] | 1
|
2022-01-01T20:19:45.000Z
|
2022-01-01T20:19:45.000Z
|
# coding: utf-8
import os, sys
from flask import Flask
from flask_cors import CORS
from webargs import fields
from webargs.flaskparser import use_args
# --http-header-fields=
DEFAULT_USER_AGENT = (
'"referer:https://www.bilibili.com/","user-agent: Mozil'
"la/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (K"
"HTML, like Gecko) Chrome/90.0.4430.72 Safari/537.36 Edg/90"
'.0.818.39"'
)
video_args = {
"dir": fields.Str(required=True),
"referer": fields.Str(required=True),
"user-agent": fields.Str(),
"urls": fields.List(fields.Str()),
"titles": fields.List(fields.Str()),
}
app = Flask(__name__)
CORS(app, resources=r"/*") # To fix ajax CORS problem
def create_valid_url(url: str) -> str:
"""
Generate a video direct play url.
"""
return url
@app.route("/", methods=["GET", "POST"])
@use_args(video_args)
def paly_with_mpv(args):
"""
1. Handle requests and parse json file.
2. Generate mpv args and mpv command.
3. Execute mpv command.
"""
if args["user-agent"] is None:
args["user-agent"] = DEFAULT_USER_AGENT
args["urls"] = map(create_valid_url, args["urls"])
playlist_file = os.path.join(os.environ["HOME"], "_tmp_playlist.m3u")
with open(playlist_file, "w", encoding="utf-8") as f:
# Whether delete the tmp playlist file? Look a time xixi.
f.write("#EXTM3U\n")
f.write("#Generated by Wullic-videoanywhere\n")
for title, url in zip(args["titles"], args["urls"]):
f.write("#EXTINF:-1," + title + "\n")
f.write(url + "\n")
mpv_httpd_header = '--http-header-fields="referer:{0}","user-agent:{1}"'.format(
args["referer"], args["user-agent"]
)
mpv_cmd = " ".join([args["dir"], mpv_httpd_header, "--playlist=" + playlist_file])
print(mpv_cmd)
print("\n".join(args["titles"]))
ret = os.system(mpv_cmd)
if ret != 0:
return {
"success": False,
"message": "MPV: command not found. Ensure your mpv path",
}
else:
return {"success": True, "message": ""}
def main():
"""
Command line program and run a server.
"""
if sys.version_info < (3, 8):
raise ValueError("This script is only for use with Python 3.8 or later")
else:
import argparse
parser = argparse.ArgumentParser(
description="Start a server to support " "playing video urls with MPV."
)
parser.add_argument(
"-p",
"--port",
default=50000,
dest="port",
help="choose the port that server " "listens to (default: 50000)",
)
args = parser.parse_args()
app.run(port=args.port)
| 29.148936
| 86
| 0.593796
|
56f25bc60969373cf93c21f91e309918ab7e6548
| 13,194
|
py
|
Python
|
tools/check_format.py
|
mju/envoy
|
93d96b52b47ac2f296379c15b03946cb33c58252
|
[
"Apache-2.0"
] | null | null | null |
tools/check_format.py
|
mju/envoy
|
93d96b52b47ac2f296379c15b03946cb33c58252
|
[
"Apache-2.0"
] | 1
|
2018-07-23T15:14:27.000Z
|
2018-07-24T21:53:29.000Z
|
tools/check_format.py
|
mju/envoy
|
93d96b52b47ac2f296379c15b03946cb33c58252
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import argparse
import common
import fileinput
import multiprocessing
import os
import os.path
import re
import subprocess
import sys
import traceback
EXCLUDED_PREFIXES = ("./generated/", "./thirdparty/", "./build", "./.git/",
"./bazel-", "./bazel/external", "./.cache",
"./tools/testdata/check_format/")
SUFFIXES = (".cc", ".h", "BUILD", ".md", ".rst", ".proto")
DOCS_SUFFIX = (".md", ".rst")
PROTO_SUFFIX = (".proto")
# Files in these paths can make reference to protobuf stuff directly
GOOGLE_PROTOBUF_WHITELIST = ('ci/prebuilt', 'source/common/protobuf', 'api/test')
CLANG_FORMAT_PATH = os.getenv("CLANG_FORMAT", "clang-format-5.0")
BUILDIFIER_PATH = os.getenv("BUILDIFIER_BIN", "$GOPATH/bin/buildifier")
ENVOY_BUILD_FIXER_PATH = os.path.join(
os.path.dirname(os.path.abspath(sys.argv[0])), "envoy_build_fixer.py")
HEADER_ORDER_PATH = os.path.join(
os.path.dirname(os.path.abspath(sys.argv[0])), "header_order.py")
SUBDIR_SET = set(common.includeDirOrder())
INCLUDE_ANGLE = "#include <"
INCLUDE_ANGLE_LEN = len(INCLUDE_ANGLE)
PROTOBUF_TYPE_ERRORS = {
# Well-known types should be referenced from the ProtobufWkt namespace.
"Protobuf::Any": "ProtobufWkt::Any",
"Protobuf::Empty": "ProtobufWkt::Empty",
"Protobuf::ListValue": "ProtobufWkt::ListValue",
"Protobuf::NULL_VALUE": "ProtobufWkt::NULL_VALUE",
"Protobuf::StringValue": "ProtobufWkt::StringValue",
"Protobuf::Struct": "ProtobufWkt::Struct",
"Protobuf::Value": "ProtobufWkt::Value",
# Maps including strings should use the protobuf string types.
"Protobuf::MapPair<std::string": "Protobuf::MapPair<Envoy::ProtobufTypes::String",
# Other common mis-namespacing of protobuf types.
"ProtobufWkt::Map": "Protobuf::Map",
"ProtobufWkt::MapPair": "Protobuf::MapPair",
"ProtobufUtil::MessageDifferencer": "Protobuf::util::MessageDifferencer"
}
def checkNamespace(file_path):
with open(file_path) as f:
text = f.read()
if not re.search('^\s*namespace\s+Envoy\s*{', text, re.MULTILINE) and \
not 'NOLINT(namespace-envoy)' in text:
return ["Unable to find Envoy namespace or NOLINT(namespace-envoy) for file: %s" % file_path]
return []
# To avoid breaking the Lyft import, we just check for path inclusion here.
def whitelistedForProtobufDeps(file_path):
return (file_path.endswith(PROTO_SUFFIX) or
any(path_segment in file_path for path_segment in GOOGLE_PROTOBUF_WHITELIST))
def findSubstringAndReturnError(pattern, file_path, error_message):
with open(file_path) as f:
text = f.read()
if pattern in text:
error_messages = [file_path + ': ' + error_message]
for i, line in enumerate(text.splitlines()):
if pattern in line:
error_messages.append(" %s:%s" % (file_path, i + 1))
return error_messages
return []
def isApiFile(file_path):
return file_path.startswith(args.api_prefix)
def isBuildFile(file_path):
basename = os.path.basename(file_path)
if basename in {"BUILD", "BUILD.bazel"} or basename.endswith(".BUILD"):
return True
return False
def hasInvalidAngleBracketDirectory(line):
if not line.startswith(INCLUDE_ANGLE):
return False
path = line[INCLUDE_ANGLE_LEN:]
slash = path.find("/")
if slash == -1:
return False
subdir = path[0:slash]
return subdir in SUBDIR_SET
def checkFileContents(file_path, checker):
error_messages = []
for line_number, line in enumerate(fileinput.input(file_path)):
def reportError(message):
error_messages.append("%s:%d: %s" % (file_path, line_number + 1, message))
checker(line, file_path, reportError)
return error_messages
DOT_MULTI_SPACE_REGEX = re.compile('\\. +')
def fixSourceLine(line):
# Strip double space after '.' This may prove overenthusiastic and need to
# be restricted to comments and metadata files but works for now.
line = re.sub(DOT_MULTI_SPACE_REGEX, '. ', line)
if hasInvalidAngleBracketDirectory(line):
line = line.replace('<', '"').replace(">", '"')
# Fix incorrect protobuf namespace references.
for invalid_construct, valid_construct in PROTOBUF_TYPE_ERRORS.items():
line = line.replace(invalid_construct, valid_construct)
return line
def checkSourceLine(line, file_path, reportError):
# Check fixable errors. These may have been fixed already.
if line.find(". ") != -1:
reportError("over-enthusiastic spaces")
if hasInvalidAngleBracketDirectory(line):
reportError("envoy includes should not have angle brackets")
for invalid_construct, valid_construct in PROTOBUF_TYPE_ERRORS.items():
if invalid_construct in line:
reportError("incorrect protobuf type reference %s; "
"should be %s" % (invalid_construct, valid_construct))
# Some errors cannot be fixed automatically, and actionable, consistent,
# navigable messages should be emitted to make it easy to find and fix
# the errors by hand.
if not whitelistedForProtobufDeps(file_path):
if '"google/protobuf' in line or "google::protobuf" in line:
reportError("unexpected direct dependency on google.protobuf, use "
"the definitions in common/protobuf/protobuf.h instead.")
if line.startswith('#include <mutex>') or line.startswith('#include <condition_variable'):
# We don't check here for std::mutex because that may legitimately show up in
# comments, for example this one.
reportError("Don't use <mutex> or <condition_variable*>, switch to "
"Thread::MutexBasicLockable in source/common/common/thread.h")
def checkBuildLine(line, file_path, reportError):
if not whitelistedForProtobufDeps(file_path) and '"protobuf"' in line:
reportError("unexpected direct external dependency on protobuf, use "
"//source/common/protobuf instead.")
if envoy_build_rule_check and '@envoy//' in line:
reportError("Superfluous '@envoy//' prefix")
def fixBuildLine(line):
if envoy_build_rule_check:
line = line.replace('@envoy//', '//')
return line
def fixBuildPath(file_path):
for line in fileinput.input(file_path, inplace=True):
sys.stdout.write(fixBuildLine(line))
error_messages = []
# TODO(htuch): Add API specific BUILD fixer script.
if not isApiFile(file_path):
if os.system(
"%s %s %s" % (ENVOY_BUILD_FIXER_PATH, file_path, file_path)) != 0:
error_messages += ["envoy_build_fixer rewrite failed for file: %s" % file_path]
if os.system("%s -mode=fix %s" % (BUILDIFIER_PATH, file_path)) != 0:
error_messages += ["buildifier rewrite failed for file: %s" % file_path]
return error_messages
def checkBuildPath(file_path):
error_messages = []
if not isApiFile(file_path):
command = "%s %s | diff %s -" % (ENVOY_BUILD_FIXER_PATH, file_path, file_path)
error_messages += executeCommand(
command, "envoy_build_fixer check failed", file_path)
command = "cat %s | %s -mode=fix | diff %s -" % (file_path, BUILDIFIER_PATH, file_path)
error_messages += executeCommand(command, "buildifier check failed", file_path)
error_messages += checkFileContents(file_path, checkBuildLine)
return error_messages
def fixSourcePath(file_path):
for line in fileinput.input(file_path, inplace=True):
sys.stdout.write(fixSourceLine(line))
error_messages = []
if not file_path.endswith(DOCS_SUFFIX) and not file_path.endswith(PROTO_SUFFIX):
error_messages += fixHeaderOrder(file_path)
error_messages += clangFormat(file_path)
return error_messages
def checkSourcePath(file_path):
error_messages = checkFileContents(file_path, checkSourceLine)
if not file_path.endswith(DOCS_SUFFIX) and not file_path.endswith(PROTO_SUFFIX):
error_messages += checkNamespace(file_path)
command = ("%s %s | diff %s -" % (HEADER_ORDER_PATH, file_path, file_path))
error_messages += executeCommand(command, "header_order.py check failed", file_path)
command = ("%s %s | diff %s -" % (CLANG_FORMAT_PATH, file_path, file_path))
error_messages += executeCommand(command, "clang-format check failed", file_path)
return error_messages
# Example target outputs are:
# - "26,27c26"
# - "12,13d13"
# - "7a8,9"
def executeCommand(command, error_message, file_path,
regex=re.compile(r"^(\d+)[a|c|d]?\d*(?:,\d+[a|c|d]?\d*)?$")):
try:
output = subprocess.check_output(command, shell=True, stderr=subprocess.STDOUT).strip()
if output:
return output.split("\n")
return []
except subprocess.CalledProcessError as e:
if (e.returncode != 0 and e.returncode != 1):
return ["ERROR: something went wrong while executing: %s" % e.cmd]
# In case we can't find any line numbers, record an error message first.
error_messages = ["%s for file: %s" % (error_message, file_path)]
for line in e.output.splitlines():
for num in regex.findall(line):
error_messages.append(" %s:%s" % (file_path, num))
return error_messages
def fixHeaderOrder(file_path):
command = "%s --rewrite %s" % (HEADER_ORDER_PATH, file_path)
if os.system(command) != 0:
return ["header_order.py rewrite error: %s" % (file_path)]
return []
def clangFormat(file_path):
command = "%s -i %s" % (CLANG_FORMAT_PATH, file_path)
if os.system(command) != 0:
return ["clang-format rewrite error: %s" % (file_path)]
return []
def checkFormat(file_path):
if file_path.startswith(EXCLUDED_PREFIXES):
return []
if not file_path.endswith(SUFFIXES):
return []
error_messages = []
# Apply fixes first, if asked, and then run checks. If we wind up attempting to fix
# an issue, but there's still an error, that's a problem.
try_to_fix = operation_type == "fix"
if isBuildFile(file_path):
if try_to_fix:
error_messages += fixBuildPath(file_path)
error_messages += checkBuildPath(file_path)
else:
if try_to_fix:
error_messages += fixSourcePath(file_path)
error_messages += checkSourcePath(file_path)
if error_messages:
return ["From %s" % file_path] + error_messages
return error_messages
def checkFormatReturnTraceOnError(file_path):
"""Run checkFormat and return the traceback of any exception."""
try:
return checkFormat(file_path)
except:
return traceback.format_exc().split("\n")
def checkFormatVisitor(arg, dir_name, names):
"""Run checkFormat in parallel for the given files.
Args:
arg: a tuple (pool, result_list) for starting tasks asynchronously.
dir_name: the parent directory of the given files.
names: a list of file names.
"""
# Unpack the multiprocessing.Pool process pool and list of results. Since
# python lists are passed as references, this is used to collect the list of
# async results (futures) from running checkFormat and passing them back to
# the caller.
pool, result_list = arg
for file_name in names:
result = pool.apply_async(checkFormatReturnTraceOnError, args=(dir_name + "/" + file_name,))
result_list.append(result)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Check or fix file format.')
parser.add_argument('operation_type', type=str, choices=['check', 'fix'],
help="specify if the run should 'check' or 'fix' format.")
parser.add_argument('target_path', type=str, nargs="?", default=".",
help="specify the root directory for the script to recurse over. Default '.'.")
parser.add_argument('--add-excluded-prefixes', type=str, nargs="+",
help="exclude additional prefixes.")
parser.add_argument('-j', '--num-workers', type=int, default=multiprocessing.cpu_count(),
help="number of worker processes to use; defaults to one per core.")
parser.add_argument('--api-prefix', type=str, default='./api/', help="path of the API tree")
parser.add_argument('--skip_envoy_build_rule_check', action='store_true',
help="Skip checking for '@envoy//' prefix in build rules.")
args = parser.parse_args()
operation_type = args.operation_type
target_path = args.target_path
envoy_build_rule_check = not args.skip_envoy_build_rule_check
if args.add_excluded_prefixes:
EXCLUDED_PREFIXES += tuple(args.add_excluded_prefixes)
if os.path.isfile(target_path):
error_messages = checkFormat("./" + target_path)
else:
pool = multiprocessing.Pool(processes=args.num_workers)
results = []
# For each file in target_path, start a new task in the pool and collect the
# results (results is passed by reference, and is used as an output).
os.path.walk(target_path, checkFormatVisitor, (pool, results))
# Close the pool to new tasks, wait for all of the running tasks to finish,
# then collect the error messages.
pool.close()
pool.join()
error_messages = sum((r.get() for r in results), [])
if error_messages:
for e in error_messages:
print "ERROR: %s" % e
print "ERROR: check format failed. run 'tools/check_format.py fix'"
sys.exit(1)
if operation_type == "check":
print "PASS"
| 40.103343
| 101
| 0.695771
|
dfa9cf2de9bd9719b08e3fd434d2263678660af8
| 19,331
|
py
|
Python
|
train_student.py
|
EricZsy/BalancedKnowledgeDistillation
|
88a2de840a3fc6eb2ee881c729f293b8e78714aa
|
[
"MIT"
] | 22
|
2021-04-23T02:52:33.000Z
|
2022-03-23T03:28:50.000Z
|
train_student.py
|
EricZsy/BalancedKnowledgeDistillation
|
88a2de840a3fc6eb2ee881c729f293b8e78714aa
|
[
"MIT"
] | null | null | null |
train_student.py
|
EricZsy/BalancedKnowledgeDistillation
|
88a2de840a3fc6eb2ee881c729f293b8e78714aa
|
[
"MIT"
] | 3
|
2021-06-10T11:46:43.000Z
|
2022-03-31T11:30:08.000Z
|
import argparse
import os
import random
import time
import warnings
import sys
import numpy as np
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.multiprocessing as mp
import torch.utils.data
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import models as teacher_models
import models as student_models
from tensorboardX import SummaryWriter
from sklearn.metrics import confusion_matrix
from utils import *
from imbalance_cifar import IMBALANCECIFAR10, IMBALANCECIFAR100
from losses import *
model_names = sorted(name for name in teacher_models.__dict__
if name.islower() and not name.startswith("__")
and callable(teacher_models.__dict__[name]))
parser = argparse.ArgumentParser(description='PyTorch Cifar Training')
parser.add_argument('--dataset', default='cifar10', help='dataset setting')
parser.add_argument('-a', '--arch', metavar='ARCH', default='resnet32',
choices=model_names,
help='model architecture: ' +
' | '.join(model_names) +
' (default: resnet32)')
parser.add_argument('--loss_type', default="BKD", type=str, help='loss type')
parser.add_argument('--imb_type', default="exp", type=str, help='imbalance type')
parser.add_argument('--imb_factor', default=0.01, type=float, help='imbalance factor')
parser.add_argument('--train_rule', default='None', type=str, help='data sampling strategy for train loader')
parser.add_argument('--rand_number', default=0, type=int, help='fix random number for data sampling')
parser.add_argument('--exp_str', default='0', type=str, help='number to indicate which experiment it is')
parser.add_argument('-j', '--workers', default=4, type=int, metavar='N',
help='number of data loading workers (default: 4)')
parser.add_argument('--epochs', default=200, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('--start-epoch', default=0, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('-b', '--batch-size', default=128, type=int,
metavar='N',
help='mini-batch size')
parser.add_argument('--lr', '--learning-rate', default=0.1, type=float,
metavar='LR', help='initial learning rate', dest='lr')
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--wd', '--weight-decay', default=2e-4, type=float,
metavar='W', help='weight decay (default: 1e-4)',
dest='weight_decay')
parser.add_argument('-p', '--print-freq', default=10, type=int,
metavar='N', help='print frequency (default: 10)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('--pretrained', dest='pretrained', action='store_true',
help='use pre-trained model')
parser.add_argument('--seed', default=None, type=int,
help='seed for initializing training. ')
parser.add_argument('--gpu', default=0, type=int,
help='GPU id to use.')
parser.add_argument('--root_log', type=str, default='student_log')
parser.add_argument('--root_model', type=str, default='student_checkpoint')
parser.add_argument('--T', '--temperature', default=2.0, type=float,
metavar='N',
help='distillation temperature')
parser.add_argument('--alpha', default=1.0, type=float, metavar='M',
help='alpha')
parser.add_argument('--model_dir', type=str, default=None)
best_acc1 = 0
def main():
args = parser.parse_args()
if not os.path.exists(args.root_log):
os.mkdir(args.root_log)
args.root_log = os.path.join(args.root_log, args.dataset)
if not os.path.exists(args.root_log):
os.mkdir(args.root_log)
args.store_name = '_'.join(
[args.dataset, args.arch, args.loss_type, args.train_rule, args.imb_type,
str(args.imb_factor), 'T' + str(args.T), args.exp_str])
prepare_folders(args)
if args.seed is not None:
random.seed(args.seed)
torch.manual_seed(args.seed)
cudnn.deterministic = True
warnings.warn('You have chosen to seed training. '
'This will turn on the CUDNN deterministic setting, '
'which can slow down your training considerably! '
'You may see unexpected behavior when restarting '
'from checkpoints.')
os.environ["CUDA_VISIBLE_DEVICES"] = str(args.gpu)
args.gpu = 0
if args.gpu is not None:
warnings.warn('You have chosen a specific GPU. This will completely '
'disable data parallelism.')
ngpus_per_node = torch.cuda.device_count()
main_worker(args.gpu, ngpus_per_node, args)
def main_worker(gpu, ngpus_per_node, args):
global best_acc1
args.gpu = gpu
if args.gpu is not None:
print("Use GPU: {} for training".format(args.gpu))
# create model
print("=> creating model '{}'".format(args.arch))
num_classes = 100 if args.dataset == 'cifar100' else 10
use_norm = True if args.loss_type == 'LDAM' else False
if args.dataset == 'cifar10':
args.train_rule = 'DRW'
else:
args.train_rule = 'Reweight'
teacher_model = teacher_models.__dict__[args.arch](num_classes=num_classes, use_norm=use_norm)
student_model = student_models.__dict__[args.arch](num_classes=num_classes, use_norm=use_norm)
teacher_model = load_network(teacher_model, args)
args.num_classes = num_classes
if args.gpu is not None:
torch.cuda.set_device(args.gpu)
teacher_model = teacher_model.to(args.gpu)
student_model = student_model.to(args.gpu)
else:
# DataParallel will divide and allocate batch_size to all available GPUs
teacher_model = torch.nn.DataParallel(teacher_model).cuda()
student_model = torch.nn.DataParallel(student_model).cuda()
optimizer = torch.optim.SGD(student_model.parameters(), args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay)
# optionally resume from a checkpoint
if args.resume:
if os.path.isfile(args.resume):
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume, map_location='cuda:0')
args.start_epoch = checkpoint['epoch']
best_acc1 = checkpoint['best_acc1']
if args.gpu is not None:
# best_acc1 may be from a checkpoint from a different GPU
best_acc1 = best_acc1.to(args.gpu)
student_model.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
print("=> loaded checkpoint '{}' (epoch {})"
.format(args.resume, checkpoint['epoch']))
else:
print("=> no checkpoint found at '{}'".format(args.resume))
cudnn.benchmark = True
# Data loading code
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
transform_val = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
if args.dataset == 'cifar10':
train_dataset = IMBALANCECIFAR10(root='./data', imb_type=args.imb_type, imb_factor=args.imb_factor,
rand_number=args.rand_number, train=True, download=True,
transform=transform_train)
val_dataset = datasets.CIFAR10(root='./data', train=False, download=True, transform=transform_val)
elif args.dataset == 'cifar100':
train_dataset = IMBALANCECIFAR100(root='./data', imb_type=args.imb_type, imb_factor=args.imb_factor,
rand_number=args.rand_number, train=True, download=True,
transform=transform_train)
val_dataset = datasets.CIFAR100(root='./data', train=False, download=True, transform=transform_val)
else:
warnings.warn('Dataset is not listed')
return
cls_num_list = train_dataset.get_cls_num_list()
print('cls num list:')
print(cls_num_list)
args.cls_num_list = cls_num_list
train_sampler = None
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=args.batch_size, shuffle=(train_sampler is None),
num_workers=args.workers, pin_memory=True, sampler=train_sampler)
val_loader = torch.utils.data.DataLoader(
val_dataset, batch_size=100, shuffle=False,
num_workers=args.workers, pin_memory=True)
# init log for training
log_training = open(os.path.join(args.root_log, args.store_name, 'log_train.csv'), 'w')
log_testing = open(os.path.join(args.root_log, args.store_name, 'log_test.csv'), 'w')
with open(os.path.join(args.root_log, args.store_name, 'args.txt'), 'w') as f:
f.write(str(args))
tf_writer = SummaryWriter(log_dir=os.path.join(args.root_log, args.store_name))
for epoch in range(args.start_epoch, args.epochs):
adjust_learning_rate(optimizer, epoch, args)
if args.train_rule == 'None':
train_sampler = None
per_cls_weights = None
elif args.train_rule == 'Resample':
train_sampler = ImbalancedDatasetSampler(train_dataset)
per_cls_weights = None
elif args.train_rule == 'Reweight':
train_sampler = None
beta = 0.9999
effective_num = 1.0 - np.power(beta, cls_num_list)
per_cls_weights = (1.0 - beta) / np.array(effective_num)
per_cls_weights = per_cls_weights / np.sum(per_cls_weights) * len(cls_num_list)
per_cls_weights = torch.FloatTensor(per_cls_weights).cuda(args.gpu)
elif args.train_rule == 'DRW':
train_sampler = None
idx = epoch // 160
betas = [0, 0.9999]
effective_num = 1.0 - np.power(betas[idx], cls_num_list)
per_cls_weights = (1.0 - betas[idx]) / np.array(effective_num)
per_cls_weights = per_cls_weights / np.sum(per_cls_weights) * len(cls_num_list)
per_cls_weights = torch.FloatTensor(per_cls_weights).cuda(args.gpu)
else:
warnings.warn('Sample rule is not listed')
if args.loss_type == 'CE':
criterion = nn.CrossEntropyLoss(weight=per_cls_weights).cuda(args.gpu)
elif args.loss_type == 'KD':
criterion = KDLoss(cls_num_list=cls_num_list, T=args.T, weight=per_cls_weights).cuda(args.gpu)
elif args.loss_type == 'BKD':
criterion = BKDLoss(cls_num_list=cls_num_list, T=args.T, weight=per_cls_weights).cuda(args.gpu)
elif args.loss_type == 'LDAM':
criterion = LDAMLoss(cls_num_list=cls_num_list, max_m=0.5, s=30, weight=per_cls_weights).cuda(args.gpu)
elif args.loss_type == 'Focal':
criterion = FocalLoss(weight=per_cls_weights, gamma=1).cuda(args.gpu)
else:
warnings.warn('Loss type is not listed')
return
# train for one epoch
train(train_loader, teacher_model, student_model, criterion, optimizer, epoch, args, log_training, tf_writer)
# evaluate on validation set
acc1 = validate(val_loader, teacher_model, student_model, criterion, epoch, args, log_testing, tf_writer)
# remember best acc@1 and save checkpoint
is_best = acc1 > best_acc1
best_acc1 = max(acc1, best_acc1)
tf_writer.add_scalar('acc/test_top1_best', best_acc1, epoch)
output_best = 'Best Prec@1: %.3f\n' % (best_acc1)
print(output_best)
log_testing.write(output_best + '\n')
log_testing.flush()
save_checkpoint(args, {
'epoch': epoch + 1,
'arch': args.arch,
'state_dict': student_model.state_dict(),
'best_acc1': best_acc1,
'optimizer': optimizer.state_dict(),
}, is_best)
def train(train_loader, teacher_model, student_model, criterion, optimizer, epoch, args, log, tf_writer):
batch_time = AverageMeter('Time', ':6.3f')
data_time = AverageMeter('Data', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Acc@1', ':6.2f')
top5 = AverageMeter('Acc@5', ':6.2f')
# switch to train mode
student_model.train()
teacher_model.eval()
end = time.time()
for i, (input, target) in enumerate(train_loader):
# measure data loading time
data_time.update(time.time() - end)
if args.gpu is not None:
input = input.cuda(args.gpu, non_blocking=True)
target = target.cuda(args.gpu, non_blocking=True)
# compute output
with torch.no_grad():
teacher_output = teacher_model(input)
output = student_model(input)
alpha = args.alpha
if 'KD' in args.loss_type:
loss, kd = criterion(output, teacher_output, target, alpha)
else:
loss = criterion(output, target)
# measure accuracy and record loss
acc1, acc5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), input.size(0))
top1.update(acc1[0], input.size(0))
top5.update(acc5[0], input.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
output = ('Epoch: [{0}][{1}/{2}], lr: {lr:.5f}\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Prec@5 {top5.val:.3f} ({top5.avg:.3f})\t'.format(
# 'KDLoss {kd_loss.val:.4f} ({kd_loss.avg:.4f})\t'.format(
epoch, i, len(train_loader), batch_time=batch_time,
data_time=data_time, loss=losses, top1=top1, top5=top5, #kd_loss=kd_loss,
lr=optimizer.param_groups[-1]['lr'] * 0.1)) # TODO
print(output)
log.write(output + '\n')
log.flush()
tf_writer.add_scalar('loss/train', losses.avg, epoch)
tf_writer.add_scalar('acc/train_top1', top1.avg, epoch)
tf_writer.add_scalar('acc/train_top5', top5.avg, epoch)
tf_writer.add_scalar('lr', optimizer.param_groups[-1]['lr'], epoch)
def validate(val_loader, teacher_model, student_model, criterion, epoch, args, log=None, tf_writer=None, flag='val'):
batch_time = AverageMeter('Time', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Acc@1', ':6.2f')
top5 = AverageMeter('Acc@5', ':6.2f')
# switch to evaluate mode
teacher_model.eval()
student_model.eval()
all_preds = []
all_targets = []
with torch.no_grad():
end = time.time()
for i, (input, target) in enumerate(val_loader):
if args.gpu is not None:
input = input.cuda(args.gpu, non_blocking=True)
target = target.cuda(args.gpu, non_blocking=True)
# compute output
with torch.no_grad():
teacher_output = teacher_model(input)
output = student_model(input)
alpha = 0
if 'KD' in args.loss_type:
loss, kd = criterion(output, teacher_output, target, alpha)
else:
loss = criterion(output, target)
# measure accuracy and record loss
acc1, acc5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), input.size(0))
top1.update(acc1[0], input.size(0))
top5.update(acc5[0], input.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
_, pred = torch.max(output, 1)
all_preds.extend(pred.cpu().numpy())
all_targets.extend(target.cpu().numpy())
if i % args.print_freq == 0:
output = ('Test: [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(
i, len(val_loader), batch_time=batch_time, loss=losses,
top1=top1, top5=top5))
print(output)
cf = confusion_matrix(all_targets, all_preds).astype(float)
cls_cnt = cf.sum(axis=1)
cls_hit = np.diag(cf)
cls_acc = cls_hit / cls_cnt
output = ('{flag} Results: Prec@1 {top1.avg:.3f} Prec@5 {top5.avg:.3f} Loss {loss.avg:.5f}'
.format(flag=flag, top1=top1, top5=top5, loss=losses))
out_cls_acc = '%s Class Accuracy: %s' % (
flag, (np.array2string(cls_acc, separator=',', formatter={'float_kind': lambda x: "%.3f" % x})))
print(output)
print(out_cls_acc)
if log is not None:
log.write(output + '\n')
log.write(out_cls_acc + '\n')
log.flush()
tf_writer.add_scalar('loss/test_' + flag, losses.avg, epoch)
tf_writer.add_scalar('acc/test_' + flag + '_top1', top1.avg, epoch)
tf_writer.add_scalar('acc/test_' + flag + '_top5', top5.avg, epoch)
return top1.avg
def adjust_learning_rate(optimizer, epoch, args):
"""Sets the learning rate to the initial LR decayed by 10 every 30 epochs"""
epoch = epoch + 1
if epoch <= 5:
lr = args.lr * epoch / 5
elif epoch > 180:
lr = args.lr * 0.0001
elif epoch > 160:
lr = args.lr * 0.01
else:
lr = args.lr
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def load_network(network, args):
if args.dataset == 'cifar10':
save_path = 'teacher_checkpoint/cifar10_resnet32_CE_None_exp_0.01_0/ckpt.pth.tar'
else:
save_path = 'teacher_checkpoint/cifar100_resnet32_CE_None_exp_0.01_0/ckpt.pth.tar'
if args.model_dir:
save_path = os.path.join(args.model_dir, 'ckpt.pth.tar')
print(save_path)
# network = nn.DataParallel(network)
network.load_state_dict(torch.load(save_path, map_location='cuda:0')['state_dict'])
return network
if __name__ == '__main__':
main()
| 42.485714
| 117
| 0.614971
|
512159974e3c492a1a46938cc73cd8f6afdb7960
| 1,491
|
py
|
Python
|
tests/ut/python/nn/test_clip_by_norm.py
|
GuoSuiming/mindspore
|
48afc4cfa53d970c0b20eedfb46e039db2a133d5
|
[
"Apache-2.0"
] | 3,200
|
2020-02-17T12:45:41.000Z
|
2022-03-31T20:21:16.000Z
|
tests/ut/python/nn/test_clip_by_norm.py
|
forwhat461/mindspore
|
59a277756eb4faad9ac9afcc7fd526e8277d4994
|
[
"Apache-2.0"
] | 176
|
2020-02-12T02:52:11.000Z
|
2022-03-28T22:15:55.000Z
|
tests/ut/python/nn/test_clip_by_norm.py
|
forwhat461/mindspore
|
59a277756eb4faad9ac9afcc7fd526e8277d4994
|
[
"Apache-2.0"
] | 621
|
2020-03-09T01:31:41.000Z
|
2022-03-30T03:43:19.000Z
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
""" test clip_by_norm """
import numpy as np
import mindspore.nn as nn
from mindspore import Tensor
from ..ut_filter import non_graph_engine
@non_graph_engine
def test_clip_by_norm():
clip_by_norm = nn.ClipByNorm()
x = Tensor(np.array([[-2, 0, 0], [0, 3, 4]]).astype(np.float32))
clip_norm = Tensor(np.array([1]).astype(np.float32))
clip_by_norm(x, clip_norm)
@non_graph_engine
def test_clip_by_norm_const():
class Network(nn.Cell):
def __init__(self):
super(Network, self).__init__()
self.norm_value = Tensor(np.array([1]).astype(np.float32))
self.clip = nn.ClipByNorm()
def construct(self, x):
return self.clip(x, self.norm_value)
net = Network()
x = Tensor(np.array([[-2, 0, 0], [0, 3, 4]]).astype(np.float32))
net(x)
| 33.133333
| 78
| 0.654594
|
72db131eb7bd4c2e4f925b590fb6a39beb9bb23c
| 3,028
|
py
|
Python
|
src/img_manipulation.py
|
gruunday/CURART
|
6e4265be2450c575e2a421aa6e0c167de993e784
|
[
"Apache-2.0"
] | null | null | null |
src/img_manipulation.py
|
gruunday/CURART
|
6e4265be2450c575e2a421aa6e0c167de993e784
|
[
"Apache-2.0"
] | 2
|
2019-05-26T11:08:47.000Z
|
2019-05-26T11:09:13.000Z
|
src/img_manipulation.py
|
gruunday/CURART
|
6e4265be2450c575e2a421aa6e0c167de993e784
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import cv2
import time
import glob, os
def get_keypoints(img):
'''
Takes an image and returns its keypoints and descriptors
img: cv2.imread object
returns: (matrix of key points, matrix of descriptors)
'''
# Load the sift algorithm
sift = cv2.xfeatures2d.SIFT_create()
# Find keypoints and descriptors of org image and image to compare
key_points, desc = sift.detectAndCompute(img, None)
return (key_points, desc)
def get_match(desc1, desc2):
'''
Takes two matrixes of descriptors about and image and uses a Flann based matcher
to match these matrixes
desc1: matrix of descriptors
desc2: matrix of descriptors
returns: list of matching descriptors
'''
# Load FlannBasedMatcher method used to find the matches between descriptors and 2 images
search_params = dict()
index_params = dict(algorithm=0, trees=5)
flann = cv2.FlannBasedMatcher(index_params, search_params)
# Find matches between 2 images and store in array
matches = flann.knnMatch(np.asarray(desc1,np.float32), np.asarray(desc2,np.float32), k=2)
good_points = []
ratio = 0.6
for m, n in matches:
if m.distance < ratio * n.distance:
good_points.append(m)
return good_points
def rotate_img(image):
'''
Will rotate a matrix corresponding to an image
image: Matrix corresponding to an image
returns: Rotated matrix corresponding to an image
'''
rows,cols,u = image.shape
rot_image = cv2.getRotationMatrix2D((cols/2,rows/2),90,1)
return cv2.warpAffine(image,rot_image,(cols,rows))
def load_img(img):
'''
Takes a file name and loads that image into an opencv matrix
img: String
returns: opencv matrix
'''
return cv2.imread(img)
def match_images(org_img, comp_img):
'''
Will attempt to match two images
img1: string corresponding to filename of image
img2: string corresponding to filename of image
results: String, Percentage accuracy
'''
# Check images are exactly equal to each other
if org_img.shape == comp_img.shape:
diff = cv2.subtract(org_img, comp_img)
b, g, r = cv2.split(diff)
if cv2.countNonZero(b) == 0 and cv2.countNonZero(g) == 0 and cv2.countNonZero(r) == 0:
return "Images Exactly Identical"
# Will get the matching points of two images
# for all orientations and return a percentage
max_points = 0
for i in range(0, 1):
org_keypoints = get_keypoints(org_img)
comp_keypoints = get_keypoints(comp_img)
key_points1, desc1 = org_keypoints
key_points2, desc2 = comp_keypoints
good_points = get_match(desc1, desc2)
max_points += min([len(desc1), len(desc2)])
relevence = (len(good_points) / max_points) * 4
comp_img = rotate_img(comp_img)
return relevence
if __name__ == '__main__':
print(match_images('default.jpg', 'upside.jpg'))
| 28.838095
| 94
| 0.667768
|
13243c662eaf1e83e96d19b7279580e0ac2b5b2d
| 9,545
|
py
|
Python
|
utest/model/test_statistics.py
|
MaciejWiczk/robotframework
|
33468e7090f9a925715076905992f5abf965d8f4
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-10-26T07:31:30.000Z
|
2020-10-26T07:31:30.000Z
|
utest/model/test_statistics.py
|
MaciejWiczk/robotframework
|
33468e7090f9a925715076905992f5abf965d8f4
|
[
"ECL-2.0",
"Apache-2.0"
] | 40
|
2020-11-06T08:30:26.000Z
|
2022-03-02T10:06:51.000Z
|
utest/model/test_statistics.py
|
MaciejWiczk/robotframework
|
33468e7090f9a925715076905992f5abf965d8f4
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-11-23T03:44:31.000Z
|
2020-11-23T03:44:31.000Z
|
import unittest
from robot.utils.asserts import assert_equal
from robot.model.statistics import Statistics
from robot.result import TestCase, TestSuite
def verify_stat(stat, name, passed, failed, combined=None,
id=None, elapsed=0):
assert_equal(stat.name, name, 'stat.name')
assert_equal(stat.passed, passed)
assert_equal(stat.failed, failed)
assert_equal(stat.total, passed + failed)
if hasattr(stat, 'combined'):
assert_equal(stat.combined, combined)
if hasattr(stat, 'id'):
assert_equal(stat.id, id)
assert_equal(stat.elapsed, elapsed)
def verify_suite(suite, name, id, passed, failed):
verify_stat(suite.stat, name, passed, failed, id=id)
def generate_suite():
suite = TestSuite(name='Root Suite')
s1 = suite.suites.create(name='First Sub Suite')
s2 = suite.suites.create(name='Second Sub Suite')
s11 = s1.suites.create(name='Sub Suite 1_1')
s12 = s1.suites.create(name='Sub Suite 1_2')
s13 = s1.suites.create(name='Sub Suite 1_3')
s21 = s2.suites.create(name='Sub Suite 2_1')
s11.tests = [TestCase(status='PASS'), TestCase(status='FAIL', tags=['t1'])]
s12.tests = [TestCase(status='PASS', tags=['t_1','t2',]),
TestCase(status='PASS', tags=['t1','smoke']),
TestCase(status='FAIL', tags=['t1','t2','t3','smoke'])]
s13.tests = [TestCase(status='PASS', tags=['t1','t 2','smoke'])]
s21.tests = [TestCase(status='FAIL', tags=['t3','Smoke'])]
return suite
class TestStatisticsSimple(unittest.TestCase):
def setUp(self):
suite = TestSuite(name='Hello')
suite.tests = [TestCase(status='PASS'), TestCase(status='PASS'),
TestCase(status='FAIL')]
self.statistics = Statistics(suite)
def test_total(self):
verify_stat(self.statistics.total._stat, 'All Tests', 2, 1)
def test_suite(self):
verify_suite(self.statistics.suite, 'Hello', 's1', 2, 1)
def test_tags(self):
assert_equal(list(self.statistics.tags), [])
class TestStatisticsNotSoSimple(unittest.TestCase):
def setUp(self):
suite = generate_suite()
self.statistics = Statistics(suite, 2, ['t*','smoke'], ['t3'],
[('t? & smoke', ''), ('none NOT t1', 'a title')])
def test_total(self):
verify_stat(self.statistics.total._stat, 'All Tests', 4, 3)
def test_suite(self):
suite = self.statistics.suite
verify_suite(suite, 'Root Suite', 's1', 4, 3)
[s1, s2] = suite.suites
verify_suite(s1, 'Root Suite.First Sub Suite', 's1-s1', 4, 2)
verify_suite(s2, 'Root Suite.Second Sub Suite', 's1-s2', 0, 1)
assert_equal(len(s1.suites), 0)
assert_equal(len(s2.suites), 0)
def test_tags(self):
# Tag stats are tested more thoroughly in test_tagstatistics.py
tags = self.statistics.tags
verify_stat(tags.tags['smoke'], 'smoke', 2, 2)
verify_stat(tags.tags['t1'], 't1', 3, 2)
verify_stat(tags.tags['t2'], 't2', 2, 1)
expected = [('a title', 0, 0, 'none NOT t1'),
('t? & smoke', 2, 2, 't? & smoke'),
('smoke', 2, 2),
('t1', 3, 2),
('t2', 2, 1)]
assert_equal(len(list(tags)), len(expected))
for t, e in zip(tags, expected):
verify_stat(t, *e)
class TestSuiteStatistics(unittest.TestCase):
def test_all_levels(self):
suite = Statistics(generate_suite()).suite
verify_suite(suite, 'Root Suite', 's1', 4, 3)
[s1, s2] = suite.suites
verify_suite(s1, 'Root Suite.First Sub Suite', 's1-s1', 4, 2)
verify_suite(s2, 'Root Suite.Second Sub Suite', 's1-s2', 0, 1)
[s11, s12, s13] = s1.suites
verify_suite(s11, 'Root Suite.First Sub Suite.Sub Suite 1_1', 's1-s1-s1', 1, 1)
verify_suite(s12, 'Root Suite.First Sub Suite.Sub Suite 1_2', 's1-s1-s2', 2, 1)
verify_suite(s13, 'Root Suite.First Sub Suite.Sub Suite 1_3', 's1-s1-s3', 1, 0)
[s21] = s2.suites
verify_suite(s21, 'Root Suite.Second Sub Suite.Sub Suite 2_1', 's1-s2-s1', 0, 1)
def test_only_root_level(self):
suite = Statistics(generate_suite(), suite_stat_level=1).suite
verify_suite(suite, 'Root Suite', 's1', 4, 3)
assert_equal(len(suite.suites), 0)
def test_deeper_level(self):
PASS = TestCase(status='PASS')
FAIL = TestCase(status='FAIL')
suite = TestSuite(name='1')
suite.suites = [TestSuite(name='1'), TestSuite(name='2'), TestSuite(name='3')]
suite.suites[0].suites = [TestSuite(name='1')]
suite.suites[1].suites = [TestSuite(name='1'), TestSuite(name='2')]
suite.suites[2].tests = [PASS, FAIL]
suite.suites[0].suites[0].suites = [TestSuite(name='1')]
suite.suites[1].suites[0].tests = [PASS, PASS, PASS, FAIL]
suite.suites[1].suites[1].tests = [PASS, PASS, FAIL, FAIL]
suite.suites[0].suites[0].suites[0].tests = [FAIL, FAIL, FAIL]
s1 = Statistics(suite, suite_stat_level=3).suite
verify_suite(s1, '1', 's1', 6, 7)
[s11, s12, s13] = s1.suites
verify_suite(s11, '1.1', 's1-s1', 0, 3)
verify_suite(s12, '1.2', 's1-s2', 5, 3)
verify_suite(s13, '1.3', 's1-s3', 1, 1)
[s111] = s11.suites
verify_suite(s111, '1.1.1', 's1-s1-s1', 0, 3)
[s121, s122] = s12.suites
verify_suite(s121, '1.2.1', 's1-s2-s1', 3, 1)
verify_suite(s122, '1.2.2', 's1-s2-s2', 2, 2)
assert_equal(len(s111.suites), 0)
def test_iter_only_one_level(self):
[stat] = list(Statistics(generate_suite(), suite_stat_level=1).suite)
verify_stat(stat, 'Root Suite', 4, 3, id='s1')
def test_iter_also_sub_suites(self):
stats = list(Statistics(generate_suite()).suite)
verify_stat(stats[0], 'Root Suite', 4, 3, id='s1')
verify_stat(stats[1], 'Root Suite.First Sub Suite', 4, 2, id='s1-s1')
verify_stat(stats[2], 'Root Suite.First Sub Suite.Sub Suite 1_1', 1, 1, id='s1-s1-s1')
verify_stat(stats[3], 'Root Suite.First Sub Suite.Sub Suite 1_2', 2, 1, id='s1-s1-s2')
verify_stat(stats[4], 'Root Suite.First Sub Suite.Sub Suite 1_3', 1, 0, id='s1-s1-s3')
verify_stat(stats[5], 'Root Suite.Second Sub Suite', 0, 1, id='s1-s2')
verify_stat(stats[6], 'Root Suite.Second Sub Suite.Sub Suite 2_1', 0, 1, id='s1-s2-s1')
class TestElapsedTime(unittest.TestCase):
def setUp(self):
ts = '20120816 00:00:'
suite = TestSuite(starttime=ts+'00.000', endtime=ts+'59.999')
suite.suites = [
TestSuite(starttime=ts+'00.000', endtime=ts+'30.000'),
TestSuite(starttime=ts+'30.000', endtime=ts+'42.042')
]
suite.suites[0].tests = [
TestCase(starttime=ts+'00.000', endtime=ts+'00.001', tags=['t1']),
TestCase(starttime=ts+'00.001', endtime=ts+'01.001', tags=['t1', 't2'])
]
suite.suites[1].tests = [
TestCase(starttime=ts+'30.000', endtime=ts+'40.000', tags=['t1', 't2', 't3'])
]
self.stats = Statistics(suite, tag_stat_combine=[('?2', 'combined')])
def test_total_stats(self):
assert_equal(self.stats.total._stat.elapsed, 11001)
def test_tag_stats(self):
t1, t2, t3 = self.stats.tags.tags.values()
verify_stat(t1, 't1', 0, 3, elapsed=11001)
verify_stat(t2, 't2', 0, 2, elapsed=11000)
verify_stat(t3, 't3', 0, 1, elapsed=10000)
def test_combined_tag_stats(self):
combined = self.stats.tags.combined[0]
verify_stat(combined, 'combined', 0, 2, combined='?2', elapsed=11000)
def test_suite_stats(self):
assert_equal(self.stats.suite.stat.elapsed, 59999)
assert_equal(self.stats.suite.suites[0].stat.elapsed, 30000)
assert_equal(self.stats.suite.suites[1].stat.elapsed, 12042)
def test_suite_stats_when_suite_has_no_times(self):
suite = TestSuite()
assert_equal(Statistics(suite).suite.stat.elapsed, 0)
ts = '20120816 00:00:'
suite.tests = [TestCase(starttime=ts+'00.000', endtime=ts+'00.001'),
TestCase(starttime=ts+'00.001', endtime=ts+'01.001')]
assert_equal(Statistics(suite).suite.stat.elapsed, 1001)
suite.suites = [TestSuite(starttime=ts+'02.000', endtime=ts+'12.000'),
TestSuite()]
assert_equal(Statistics(suite).suite.stat.elapsed, 11001)
def test_elapsed_from_get_attributes(self):
for time, expected in [('00:00:00.000', '00:00:00'),
('00:00:00.001', '00:00:00'),
('00:00:00.500', '00:00:01'),
('00:00:00.999', '00:00:01'),
('00:00:01.000', '00:00:01'),
('00:00:01.001', '00:00:01'),
('00:00:01.499', '00:00:01'),
('00:00:01.500', '00:00:02'),
('01:59:59:499', '01:59:59'),
('01:59:59:500', '02:00:00')]:
suite = TestSuite(starttime='20120817 00:00:00.000',
endtime='20120817 ' + time)
stat = Statistics(suite).suite.stat
elapsed = stat.get_attributes(include_elapsed=True)['elapsed']
assert_equal(elapsed, expected, time)
if __name__ == "__main__":
unittest.main()
| 42.995495
| 95
| 0.581037
|
838daf1bd86266e3dcc5bfa3fdbf979e1e4138cb
| 2,965
|
py
|
Python
|
tests/test_updatable.py
|
am610/firecrown
|
b5b73cd9f23bc64840ec47fd48d302f19dd533f9
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_updatable.py
|
am610/firecrown
|
b5b73cd9f23bc64840ec47fd48d302f19dd533f9
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_updatable.py
|
am610/firecrown
|
b5b73cd9f23bc64840ec47fd48d302f19dd533f9
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
from firecrown.updatable import Updatable, UpdatableCollection
from firecrown.parameters import RequiredParameters, ParamsMap
class Missing_update(Updatable):
"""A type that is abstract because it does not implement _update."""
def required_parameters(self): # pragma: no cover
return super().required_parameters()
class Missing_required_parameters(Updatable):
"""A type that is abstract because it does not implement required_parameters."""
def _update(self, params): # pragma: no cover
super()._update(params)
class MinimalUpdatable(Updatable):
"""A concrete time that implements Updatable."""
def __init__(self):
"""Initialize object with defaulted value."""
self.a = 1.0
def _update(self, params):
self.a = params.get_from_prefix_param(None, "a")
def required_parameters(self):
return RequiredParameters(["a"])
class SimpleUpdatable(Updatable):
"""A concrete type that implements Updatable."""
def __init__(self):
"""Initialize object with defaulted values."""
self.x = 0.0
self.y = 2.5
def _update(self, params):
self.x = params.get_from_prefix_param(None, "x")
self.y = params.get_from_prefix_param(None, "y")
def required_parameters(self):
return RequiredParameters(["x", "y"])
def test_verify_abstract_interface():
with pytest.raises(TypeError):
x = Missing_update()
with pytest.raises(TypeError):
x = Missing_required_parameters()
def test_simple_updatable():
obj = SimpleUpdatable()
expected_params = RequiredParameters(["x", "y"])
assert obj.required_parameters() == expected_params
assert obj.x == 0.0
assert obj.y == 2.5
new_params = ParamsMap({"x": -1.0, "y": 5.5})
obj.update(new_params)
assert obj.x == -1.0
assert obj.y == 5.5
def test_updatable_collection_appends():
coll = UpdatableCollection()
assert len(coll) == 0
coll.append(SimpleUpdatable())
assert len(coll) == 1
assert coll[0].x == 0.0
assert coll[0].y == 2.5
assert coll.required_parameters() == RequiredParameters(["x", "y"])
coll.append(MinimalUpdatable())
assert len(coll) == 2
assert coll[1].a == 1.0
assert coll.required_parameters() == RequiredParameters(["x", "y", "a"])
def test_updateable_collection_updates():
coll = UpdatableCollection()
assert len(coll) == 0
coll.append(SimpleUpdatable())
assert len(coll) == 1
assert coll[0].x == 0.0
assert coll[0].y == 2.5
new_params = {"x": -1.0, "y": 5.5}
coll.update(ParamsMap(new_params))
assert len(coll) == 1
assert coll[0].x == -1.0
assert coll[0].y == 5.5
def test_updatable_collection_rejects_nonupdatables():
coll = UpdatableCollection()
assert len(coll) == 0
with pytest.raises(TypeError):
coll.append(3) # int is not a subtype of Updatable
assert len(coll) == 0
| 27.453704
| 84
| 0.659359
|
242cdecd8fbc3444c5cbe872c293f34697544136
| 743
|
py
|
Python
|
localization_service/venv/bin/rst2html4.py
|
vladbragoi/indoor_localization_system
|
b98d278cbd6a2ff8dcc093631eed0605e9e3a35f
|
[
"Apache-2.0"
] | null | null | null |
localization_service/venv/bin/rst2html4.py
|
vladbragoi/indoor_localization_system
|
b98d278cbd6a2ff8dcc093631eed0605e9e3a35f
|
[
"Apache-2.0"
] | null | null | null |
localization_service/venv/bin/rst2html4.py
|
vladbragoi/indoor_localization_system
|
b98d278cbd6a2ff8dcc093631eed0605e9e3a35f
|
[
"Apache-2.0"
] | null | null | null |
#!/home/vlad/tesi/python server/venv/bin/python
# $Id: rst2html4.py 7994 2016-12-10 17:41:45Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing (X)HTML.
The output conforms to XHTML 1.0 transitional
and almost to HTML 4.01 transitional (except for closing empty tags).
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates (X)HTML documents from standalone reStructuredText '
'sources. ' + default_description)
publish_cmdline(writer_name='html4', description=description)
| 27.518519
| 78
| 0.74428
|
f5cb63dcad28ea7b8a7dce7f35dc482ab7956ab5
| 2,765
|
py
|
Python
|
client/tests/logging_utils_test.py
|
maruel/swarming
|
8ab7568635fcbfd85a01884b64704fc2a1ac13c7
|
[
"Apache-2.0"
] | 74
|
2015-04-01T02:35:15.000Z
|
2021-12-17T22:10:56.000Z
|
client/tests/logging_utils_test.py
|
maruel/swarming
|
8ab7568635fcbfd85a01884b64704fc2a1ac13c7
|
[
"Apache-2.0"
] | 123
|
2015-04-01T04:02:57.000Z
|
2022-03-02T12:49:55.000Z
|
client/tests/logging_utils_test.py
|
maruel/swarming
|
8ab7568635fcbfd85a01884b64704fc2a1ac13c7
|
[
"Apache-2.0"
] | 32
|
2015-04-03T01:40:47.000Z
|
2021-11-13T15:20:13.000Z
|
#!/usr/bin/env vpython3
# Copyright 2015 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
import logging
import os
import subprocess
import sys
import tempfile
import unittest
import re
# Mutates sys.path.
import test_env
from utils import file_path
from utils import logging_utils
# PID YYYY-MM-DD HH:MM:SS.MMM
_LOG_HEADER = r'^%d \d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d\d\d' % os.getpid()
_LOG_HEADER_PID = r'^\d+ \d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d\d\d'
class Test(unittest.TestCase):
# This test fails when running via test runner
# Need to run in sequential_test_runner.py as an executable
no_run = 1
def setUp(self):
super(Test, self).setUp()
self.tmp = tempfile.mkdtemp(prefix='logging_utils')
def tearDown(self):
try:
file_path.rmtree(self.tmp)
finally:
super(Test, self).tearDown()
def test_capture(self):
root = logging.RootLogger(logging.DEBUG)
with logging_utils.CaptureLogs('foo', root) as log:
root.debug('foo')
result = log.read()
expected = _LOG_HEADER + ': DEBUG foo\n$'
if sys.platform == 'win32':
expected = expected.replace('\n', '\r\n')
self.assertTrue(
re.match(expected.encode('utf-8'), result), (expected, result))
def test_prepare_logging(self):
root = logging.RootLogger(logging.DEBUG)
filepath = os.path.join(self.tmp, 'test.log')
logging_utils.prepare_logging(filepath, root)
root.debug('foo')
with open(filepath, 'rb') as f:
result = f.read()
# It'd be nice to figure out a way to ensure it's properly in UTC but it's
# tricky to do reliably.
expected = _LOG_HEADER + ' D: foo\n$'
self.assertTrue(
re.match(expected.encode('utf-8'), result), (expected, result))
def test_rotating(self):
# Create a rotating log. Create a subprocess then delete the file. Make sure
# nothing blows up.
# Everything is done in a child process because the called functions mutate
# the global state.
r = subprocess.call(
[sys.executable, '-u', 'phase1.py', self.tmp],
cwd=os.path.join(test_env.TESTS_DIR, 'logging_utils'))
self.assertEqual(0, r)
self.assertEqual({'shared.1.log'}, set(os.listdir(self.tmp)))
with open(os.path.join(self.tmp, 'shared.1.log'), 'rb') as f:
lines = f.read().splitlines()
expected = [
r' I: Parent1',
r' I: Child1',
r' I: Child2',
r' I: Parent2',
]
for e, l in zip(expected, lines):
ex = _LOG_HEADER_PID + e + '$'
self.assertTrue(re.match(ex.encode('utf-8'), l), (ex, l))
self.assertEqual(len(expected), len(lines))
if __name__ == '__main__':
test_env.main()
| 30.384615
| 80
| 0.651718
|
72d28cb8f5258ee592d2706a4bf9322adfa5e65e
| 6,209
|
py
|
Python
|
python/profilo/model/ttypes.py
|
amanjeetsingh150/profilo
|
a0d626d1c3889bdbaaa5c12ca2d2606f0fee45da
|
[
"Apache-2.0"
] | 1
|
2020-03-24T23:51:43.000Z
|
2020-03-24T23:51:43.000Z
|
python/profilo/model/ttypes.py
|
amanjeetsingh150/profilo
|
a0d626d1c3889bdbaaa5c12ca2d2606f0fee45da
|
[
"Apache-2.0"
] | 4
|
2021-03-11T04:11:14.000Z
|
2022-02-27T09:35:19.000Z
|
python/profilo/model/ttypes.py
|
amanjeetsingh150/profilo
|
a0d626d1c3889bdbaaa5c12ca2d2606f0fee45da
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright 2018-present, Facebook, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# This is an import of generated code, stripped for usability here.
#
# @partially-generated
class CounterUnit(object):
BYTES = 0
SECONDS = 1
ITEMS = 2
RATIO = 3
_VALUES_TO_NAMES = {
0: "BYTES",
1: "SECONDS",
2: "ITEMS",
3: "RATIO",
}
_NAMES_TO_VALUES = {
"BYTES": 0,
"SECONDS": 1,
"ITEMS": 2,
"RATIO": 3,
}
class StackTrace(object):
"""
Attributes:
- frames
"""
__init__ = None
class StackFrame(object):
"""
Attributes:
- identifier
- symbol
- codeLocation
- lineNumber
"""
__init__ = None
class PathDefinition(object):
"""
Attributes:
- begin
- end
- name
"""
__init__ = None
class CriticalPathData(object):
"""
Attributes:
- points
- totalCost
- name
"""
__init__ = None
class Event(object):
"""
Attributes:
- traceID
- type
- id1
- id2
- timestamp
- annotations
- sequenceNumber
"""
__init__ = None
class Properties(object):
"""
Attributes:
- coreProps
- customProps
- counterProps
- errors
- sets
- stacktraces
- stacktraceHashes
- stacktraceHashResolvers
"""
__init__ = None
class Point(object):
"""
Attributes:
- id
- timestamp: Timestamps are in units of microseconds. This is the timestamp after clocks have been aligned
- unalignedTimestamp: Timestamps are in units of microseconds. This is the original timestamp before any correction has been applied
- properties
- sequenceNumber
"""
__init__ = None
class Block(object):
"""
Attributes:
- id
- begin
- end
- otherPoints
- properties
"""
__init__ = None
class Edge(object):
"""
Edges express causal relationship between points: sourcePoint being the cause and targetPoint the effect.
Attributes:
- sourcePoint
- targetPoint
- properties
"""
__init__ = None
class Trace(object):
"""
IDs for a single object type are unique within a trace.
Attributes:
- id
- executionUnits
- blocks
- points
- version
- edges
- properties
"""
__init__ = None
class ExecutionUnit(object):
"""
Timestamps within a single Execution Unit are considered to be in sync.
Attributes:
- id
- blocks
- properties
"""
__init__ = None
def StackTrace__init__(
self,
frames=None,
):
self.frames = frames
StackTrace.__init__ = StackTrace__init__
def StackFrame__init__(
self,
identifier=None,
symbol=None,
codeLocation=None,
lineNumber=None,
):
self.identifier = identifier
self.symbol = symbol
self.codeLocation = codeLocation
self.lineNumber = lineNumber
StackFrame.__init__ = StackFrame__init__
def PathDefinition__init__(
self,
begin=None,
end=None,
name=None,
):
self.begin = begin
self.end = end
self.name = name
PathDefinition.__init__ = PathDefinition__init__
def CriticalPathData__init__(
self,
points=None,
totalCost=None,
name=None,
):
self.points = points
self.totalCost = totalCost
self.name = name
CriticalPathData.__init__ = CriticalPathData__init__
def Event__init__(
self,
traceID=None,
type=None,
id1=None,
id2=None,
timestamp=None,
annotations=None,
sequenceNumber=None,
):
self.traceID = traceID
self.type = type
self.id1 = id1
self.id2 = id2
self.timestamp = timestamp
self.annotations = annotations
self.sequenceNumber = sequenceNumber
Event.__init__ = Event__init__
def Properties__init__(
self,
coreProps=None,
customProps=None,
counterProps=None,
errors=None,
sets=None,
stacktraces=None,
stacktraceHashes=None,
stacktraceHashResolvers=None,
):
self.coreProps = coreProps
self.customProps = customProps
self.counterProps = counterProps
self.errors = errors
self.sets = sets
self.stacktraces = stacktraces
self.stacktraceHashes = stacktraceHashes
self.stacktraceHashResolvers = stacktraceHashResolvers
Properties.__init__ = Properties__init__
def Block__init__(
self,
id=None,
begin=None,
end=None,
otherPoints=None,
properties=None,
):
self.id = id
self.begin = begin
self.end = end
self.otherPoints = otherPoints
self.properties = properties
Block.__init__ = Block__init__
def Point__init__(
self,
id=None,
timestamp=None,
unalignedTimestamp=None,
properties=None,
sequenceNumber=None,
):
self.id = id
self.timestamp = timestamp
self.unalignedTimestamp = unalignedTimestamp
self.properties = properties
self.sequenceNumber = sequenceNumber
Point.__init__ = Point__init__
def Edge__init__(
self,
sourcePoint=None,
targetPoint=None,
properties=None,
):
self.sourcePoint = sourcePoint
self.targetPoint = targetPoint
self.properties = properties
Edge.__init__ = Edge__init__
def Trace__init__(
self,
id=None,
executionUnits=None,
blocks=None,
points=None,
version=None,
edges=None,
properties=None,
):
self.id = id
self.executionUnits = executionUnits
self.blocks = blocks
self.points = points
self.version = version
self.edges = edges
self.properties = properties
Trace.__init__ = Trace__init__
def ExecutionUnit__init__(
self,
id=None,
blocks=None,
properties=None,
):
self.id = id
self.blocks = blocks
self.properties = properties
ExecutionUnit.__init__ = ExecutionUnit__init__
| 16.735849
| 136
| 0.661298
|
44c4fb1098dbeac79f8fcc626471ada0947a32ea
| 1,288
|
py
|
Python
|
setup.py
|
korvyashka/pykongregate
|
8f6874c8674fd00d7408a4e31ff7386d301a1d01
|
[
"MIT"
] | null | null | null |
setup.py
|
korvyashka/pykongregate
|
8f6874c8674fd00d7408a4e31ff7386d301a1d01
|
[
"MIT"
] | null | null | null |
setup.py
|
korvyashka/pykongregate
|
8f6874c8674fd00d7408a4e31ff7386d301a1d01
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
import os
import sys
py_version = sys.version_info
try:
version = ".".join(
[str(i) for i in __import__('pykongregate').__VERSION__]
)
except:
# Warning: used only for tox
version = "uknown"
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
CLASSIFIERS = [
'Development Status :: 1 - Planning',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Natural Language :: English',
]
install_requires = [
'simplejson',
'requests'
]
setup(
name='pykongregate',
author='Korvyashka <korvyashka666@gmail.com>',
version=version,
author_email='korvyashka666@gmail.com',
download_url='https://github.com/korvyashka/pykongregate.git',
description='python client for kongregate REST API',
long_description=open(readme).read(),
license='MIT license',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=install_requires,
packages=find_packages(exclude=['tests', 'docs']),
test_suite='tests',
include_package_data=True,
zip_safe=False
)
| 24.301887
| 66
| 0.680901
|
67048705c88c65812a46cbcb6ea304de6b02a9b6
| 12,693
|
py
|
Python
|
tests/unit/test_tf_layers.py
|
ita9naiwa/NVTabular
|
69fd3f56fc10cf20d02f868578bcd7c736248604
|
[
"Apache-2.0"
] | 1
|
2021-08-31T08:21:09.000Z
|
2021-08-31T08:21:09.000Z
|
tests/unit/test_tf_layers.py
|
beingaryan/NVTabular
|
126c8e38ffe77ce36a228079776410d97580f992
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_tf_layers.py
|
beingaryan/NVTabular
|
126c8e38ffe77ce36a228079776410d97580f992
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright (c) 2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
import pytest
tf = pytest.importorskip("tensorflow")
layers = pytest.importorskip("nvtabular.framework_utils.tensorflow.layers")
def get_good_feature_columns():
return (
tf.feature_column.numeric_column("scalar_continuous", (1,)),
tf.feature_column.numeric_column("vector_continuous", (128,)),
tf.feature_column.categorical_column_with_identity("one_hot", 100),
tf.feature_column.categorical_column_with_identity("multi_hot", 5),
)
def get_bad_feature_columns():
return (
tf.feature_column.numeric_column("a", (5, 10)),
tf.feature_column.categorical_column_with_hash_bucket("b", 100),
)
def _compute_expected_multi_hot(table, values, nnzs, combiner):
rows = []
start_idx = 0
for nnz in nnzs:
vals = values[start_idx : start_idx + nnz]
vectors = table[vals]
if combiner == "sum":
rows.append(vectors.sum(axis=0))
else:
rows.append(vectors.mean(axis=0))
start_idx += nnz
return np.array(rows)
@pytest.mark.parametrize("aggregation", ["stack", "concat"])
@pytest.mark.parametrize("combiner", ["sum", "mean"]) # TODO: add sqrtn
def test_dense_embedding_layer(aggregation, combiner):
raw_good_columns = get_good_feature_columns()
scalar_numeric, vector_numeric, one_hot, multi_hot = raw_good_columns
one_hot_embedding = tf.feature_column.indicator_column(one_hot)
multi_hot_embedding = tf.feature_column.embedding_column(multi_hot, 8, combiner=combiner)
# should raise ValueError if passed categorical columns
with pytest.raises(ValueError):
embedding_layer = layers.DenseFeatures(raw_good_columns, aggregation=aggregation)
if aggregation == "stack":
# can't pass numeric to stack aggregation unless dims are 1
with pytest.raises(ValueError):
embedding_layer = layers.DenseFeatures(
[
scalar_numeric,
vector_numeric,
one_hot_embedding,
multi_hot_embedding,
],
aggregation=aggregation,
)
# can't have mismatched dims with stack aggregation
with pytest.raises(ValueError):
embedding_layer = layers.DenseFeatures(
[one_hot_embedding, multi_hot_embedding], aggregation=aggregation
)
# reset b embedding to have matching dims
multi_hot_embedding = tf.feature_column.embedding_column(multi_hot, 100, combiner=combiner)
cols = [one_hot_embedding, multi_hot_embedding]
else:
cols = [scalar_numeric, vector_numeric, one_hot_embedding, multi_hot_embedding]
embedding_layer = layers.DenseFeatures(cols, aggregation=aggregation)
inputs = {
"scalar_continuous": tf.keras.Input(name="scalar_continuous", shape=(1,), dtype=tf.float32),
"vector_continuous__values": tf.keras.Input(
name="vector_continuous__values", shape=(1,), dtype=tf.float32
),
"one_hot": tf.keras.Input(name="one_hot", shape=(1,), dtype=tf.int64),
"multi_hot__values": tf.keras.Input(name="multi_hot__values", shape=(1,), dtype=tf.int64),
"multi_hot__nnzs": tf.keras.Input(name="multi_hot__nnzs", shape=(1,), dtype=tf.int64),
}
if aggregation == "stack":
inputs.pop("scalar_continuous")
inputs.pop("vector_continuous__values")
output = embedding_layer(inputs)
model = tf.keras.Model(inputs=inputs, outputs=output)
model.compile("sgd", "mse")
# TODO: check for out-of-range categorical behavior
scalar = np.array([0.1, -0.2, 0.3], dtype=np.float32)
vector = np.random.randn(3, 128).astype("float32")
one_hot = np.array([44, 21, 32])
multi_hot_values = np.array([0, 2, 1, 4, 1, 3, 1])
multi_hot_nnzs = np.array([1, 2, 4])
x = {
"scalar_continuous": scalar[:, None],
"vector_continuous__values": vector.flatten()[:, None],
"one_hot": one_hot[:, None],
"multi_hot__values": multi_hot_values[:, None],
"multi_hot__nnzs": multi_hot_nnzs[:, None],
}
if aggregation == "stack":
x.pop("scalar_continuous")
x.pop("vector_continuous__values")
multi_hot_embedding_table = embedding_layer.embedding_tables["multi_hot"].numpy()
multi_hot_embedding_rows = _compute_expected_multi_hot(
multi_hot_embedding_table, multi_hot_values, multi_hot_nnzs, combiner
)
# check that shape and values match up
y_hat = model(x).numpy()
assert y_hat.shape[0] == 3
if aggregation == "stack":
assert len(y_hat.shape) == 3
assert y_hat.shape[1] == (len(x) - 1)
assert y_hat.shape[2] == 100
assert (y_hat[:, 0] == multi_hot_embedding_rows).all()
y_c = y_hat[:, 1]
elif aggregation == "concat":
assert len(y_hat.shape) == 2
assert y_hat.shape[1] == 1 + 100 + 8 + 128
assert (y_hat[:, 108] == scalar).all()
assert (y_hat[:, 109:] == vector).all()
assert (y_hat[:, :8] == multi_hot_embedding_rows).all()
y_c = y_hat[:, 8:108]
nonzero_c_idx = np.where(y_c != 0)
assert (nonzero_c_idx[1] == one_hot).all()
assert (y_c[nonzero_c_idx] == 1).all()
# make sure unusable columns get flagged
bad_col_a, bad_col_b = get_bad_feature_columns()
bad_col_b_embedding = tf.feature_column.indicator_column(bad_col_b)
with pytest.raises(ValueError):
# matrix numeric should raise, even though dims match
embedding_layer = layers.DenseFeatures(
[bad_col_a, one_hot_embedding, multi_hot_embedding], aggregation=aggregation
)
with pytest.raises(ValueError):
embedding_layer = layers.DenseFeatures(
[bad_col_b_embedding, multi_hot_embedding], aggregation=aggregation
)
def test_linear_embedding_layer():
raw_good_columns = get_good_feature_columns()
scalar_numeric, vector_numeric, one_hot_col, multi_hot_col = raw_good_columns
one_hot_embedding = tf.feature_column.indicator_column(one_hot_col)
with pytest.raises(ValueError):
embedding_layer = layers.LinearFeatures([scalar_numeric, one_hot_embedding])
embedding_layer = layers.LinearFeatures(raw_good_columns)
inputs = {
"scalar_continuous": tf.keras.Input(name="scalar_continuous", shape=(1,), dtype=tf.float32),
"vector_continuous__values": tf.keras.Input(
name="vector_continuous__values", shape=(1,), dtype=tf.float32
),
"one_hot": tf.keras.Input(name="one_hot", shape=(1,), dtype=tf.int64),
"multi_hot__values": tf.keras.Input(name="multi_hot__values", shape=(1,), dtype=tf.int64),
"multi_hot__nnzs": tf.keras.Input(name="multi_hot__nnzs", shape=(1,), dtype=tf.int64),
}
output = embedding_layer(inputs)
model = tf.keras.Model(inputs=inputs, outputs=output)
model.compile("sgd", "mse")
# TODO: check for out-of-range categorical behavior
scalar = np.array([0.1, -0.2, 0.3], dtype=np.float32)
vector = np.random.randn(3, 128).astype("float32")
one_hot = np.array([44, 21, 32])
multi_hot_values = np.array([0, 2, 1, 4, 1, 3, 1])
multi_hot_nnzs = np.array([1, 2, 4])
x = {
"scalar_continuous": scalar[:, None],
"vector_continuous__values": vector.flatten()[:, None],
"one_hot": one_hot[:, None],
"multi_hot__values": multi_hot_values[:, None],
"multi_hot__nnzs": multi_hot_nnzs[:, None],
}
y_hat = model(x).numpy()
assert (y_hat == 0).all()
# do one iteration of training to make sure values
# match up, since all initialized to zero
y = np.array([-0.5, 1.2, 3.4])[:, None]
with tf.GradientTape() as tape:
y_hat = model(x)
loss = tf.reduce_mean((y_hat - y) ** 2)
grads = tape.gradient(loss, model.trainable_weights)
model.optimizer.apply_gradients(zip(grads, model.trainable_weights))
y_hat = model(x).numpy()[:, 0]
numeric_weight = embedding_layer.embedding_tables["numeric"].numpy()
one_hot_weights = embedding_layer.embedding_tables["one_hot"].numpy()[one_hot][:, 0]
multi_hot_weights = _compute_expected_multi_hot(
embedding_layer.embedding_tables["multi_hot"].numpy(),
multi_hot_values,
multi_hot_nnzs,
"sum",
)[:, 0]
bias = embedding_layer.bias.numpy()[0]
rtol = 1e-5
numeric = np.concatenate([scalar[:, None], vector], axis=1)
expected_y_hat = (numeric @ numeric_weight)[:, 0] + one_hot_weights + multi_hot_weights + bias
assert np.isclose(y_hat, expected_y_hat, rtol=rtol).all()
# make sure unusable columns get flagged
bad_col_a, bad_col_b = get_bad_feature_columns()
with pytest.raises(ValueError):
embedding_layer = layers.LinearFeatures([bad_col_a, one_hot_col, multi_hot_col])
with pytest.raises(ValueError):
embedding_layer = layers.LinearFeatures([scalar_numeric, bad_col_b, multi_hot_col])
@pytest.mark.parametrize("embedding_dim", [1, 4, 16])
@pytest.mark.parametrize("num_features", [1, 16, 64])
@pytest.mark.parametrize("interaction_type", [None, "field_all", "field_each", "field_interaction"])
@pytest.mark.parametrize("self_interaction", [True, False])
def test_dot_product_interaction_layer(
embedding_dim, num_features, interaction_type, self_interaction
):
if num_features == 1 and not self_interaction:
return
input_ = tf.keras.Input(name="x", shape=(num_features, embedding_dim), dtype=tf.float32)
interaction_layer = layers.DotProductInteraction(interaction_type, self_interaction)
output = interaction_layer(input_)
model = tf.keras.Model(inputs=input_, outputs=output)
model.compile("sgd", "mse")
x = np.random.randn(8, num_features, embedding_dim).astype(np.float32)
y_hat = model.predict(x)
if self_interaction:
expected_dim = num_features * (num_features + 1) // 2
else:
expected_dim = num_features * (num_features - 1) // 2
assert y_hat.shape[1] == expected_dim
if interaction_type is not None:
W = interaction_layer.kernel.numpy()
expected_outputs = []
for i in range(num_features):
j_start = i if self_interaction else i + 1
for j in range(j_start, num_features):
x_i = x[:, i]
x_j = x[:, j]
if interaction_type == "field_all":
W_ij = W
elif interaction_type == "field_each":
W_ij = W[i].T
elif interaction_type == "field_interaction":
W_ij = W[i, j]
if interaction_type is not None:
x_i = x_i @ W_ij
expected_outputs.append((x_i * x_j).sum(axis=1))
expected_output = np.stack(expected_outputs).T
rtol = 1e-1
atol = 1e-2
match = np.isclose(expected_output, y_hat, rtol=rtol, atol=atol)
assert match.all()
def test_multihot_empty_rows():
multi_hot = tf.feature_column.categorical_column_with_identity("multihot", 5)
multi_hot_embedding = tf.feature_column.embedding_column(multi_hot, 8, combiner="sum")
embedding_layer = layers.DenseFeatures([multi_hot_embedding])
inputs = {
"multihot__values": tf.keras.Input(name="multihot__values", shape=(1,), dtype=tf.int64),
"multihot__nnzs": tf.keras.Input(name="multihot__nnzs", shape=(1,), dtype=tf.int64),
}
output = embedding_layer(inputs)
model = tf.keras.Model(inputs=inputs, outputs=output)
model.compile("sgd", "binary_crossentropy")
multi_hot_values = np.array([0, 2, 1, 4, 1, 3, 1])
multi_hot_nnzs = np.array([1, 0, 2, 4, 0])
x = {
"multihot__values": multi_hot_values[:, None],
"multihot__nnzs": multi_hot_nnzs[:, None],
}
multi_hot_embedding_table = embedding_layer.embedding_tables["multihot"].numpy()
multi_hot_embedding_rows = _compute_expected_multi_hot(
multi_hot_embedding_table, multi_hot_values, multi_hot_nnzs, "sum"
)
y_hat = model(x).numpy()
np.testing.assert_allclose(y_hat, multi_hot_embedding_rows)
| 39.542056
| 100
| 0.667455
|
26dae0186243c37fa5d8270d946d32a60a8a9100
| 28,537
|
py
|
Python
|
hydragnn/postprocess/visualizer.py
|
pzhanggit/HydraGNN
|
0af9f4e567338d0104e631d69988ab062f425fe4
|
[
"BSD-3-Clause"
] | null | null | null |
hydragnn/postprocess/visualizer.py
|
pzhanggit/HydraGNN
|
0af9f4e567338d0104e631d69988ab062f425fe4
|
[
"BSD-3-Clause"
] | null | null | null |
hydragnn/postprocess/visualizer.py
|
pzhanggit/HydraGNN
|
0af9f4e567338d0104e631d69988ab062f425fe4
|
[
"BSD-3-Clause"
] | null | null | null |
##############################################################################
# Copyright (c) 2021, Oak Ridge National Laboratory #
# All rights reserved. #
# #
# This file is part of HydraGNN and is distributed under a BSD 3-clause #
# license. For the licensing terms see the LICENSE file in the top-level #
# directory. #
# #
# SPDX-License-Identifier: BSD-3-Clause #
##############################################################################
import matplotlib as mpl
mpl.use("Agg")
import matplotlib.pyplot as plt
from itertools import chain
import time, pickle
import numpy as np
from math import sqrt, floor, ceil
plt.rcParams.update({"font.size": 18})
class Visualizer:
"""A class used for visualizing results of GCNN outputs.
Methods
-------
__init__(self, model_with_config_name: str, node_feature=[], num_heads=1, head_dims=[1])
Create a Visualizer object with model name (also the location to save the plots), node feature list,
number of heads, and head dimensions list.
__hist2d_contour(self, data1, data2)
Calculate joint histogram of data1 and data2 values.
__err_condmean(self, data1, data2, weight=1.0)
Calculate conditional mean values of the weighted difference between data1 and data2 on data1, i.e., <weight*|data1-data2|>_data1.
__scatter_impl(self,ax,x,y,s=None,c=None,marker=None,title=None,x_label=None,y_label=None,xylim_equal=False,)
Create scatter plot of x and y values.
##create plots for a single variable with name varname
create_plot_global_analysis(self, varname, true_values, predicted_values, save_plot=True)
Creates scatter/conditonal mean/error pdf plots from the true and predicted values of variable varname.
For node level output, statistics across all nodes, l2 length and sum, are also considered for analysis.
(file: varname +"_scatter_condm_err.png")
create_parity_plot_and_error_histogram_scalar(self, varname, true_values, predicted_values, iepoch=None, save_plot=True)
Creates scatter plots for true_values and predicted values and error histogram.
(file: varname + ".png")
create_parity_plot_vector(self, varname, true_values, predicted_values, iepoch=None, save_plot=True)
Creates scatter plots for true_values and predicted values and error histogram.
(file: varname + ".png")
create_error_histogram_per_node(self, varname, true_values, predicted_values, iepoch=None, save_plot=True)
Creates error histogram plot for the true and predicted values per node.
(file: varname+"_error_hist1d_"+ str(iepoch).zfill(4)+ ".png" or varname+"_error_hist1d.png" )
create_parity_plot_per_node_vector(self, varname, true_values, predicted_values, iepoch=None, save_plot=True):
Creates scatter plots per node for true and predicted values of length 3 vector variable varname.
(file: varname+"_"+ str(iepoch).zfill(4) + ".png" or varname+".png" )
#create plots for all heads
plot_history(self,total_loss_train,total_loss_val,total_loss_test,task_loss_train_sum,task_loss_val_sum, task_loss_test_sum,task_loss_train,task_loss_val,task_loss_test,task_weights,task_names,):
Save history of losses to file and plot.
create_scatter_plots(self, true_values, predicted_values, output_names=None, iepoch=None)
Creates scatter plots for all head predictions. One plot for each head
create_plot_global(self, true_values, predicted_values, output_names=None)
Creates global analysis for all head predictons, e.g., scatter/condmean/error pdf plot. One plot for each head
"""
def __init__(
self,
model_with_config_name: str,
node_feature: [],
num_nodes_list: [],
num_heads=1,
head_dims=[1],
):
self.true_values = []
self.predicted_values = []
self.model_with_config_name = model_with_config_name
self.node_feature = node_feature
self.num_nodes = len(node_feature[0])
self.num_heads = num_heads
self.head_dims = head_dims
self.num_nodes_list = num_nodes_list
def __hist2d_contour(self, data1, data2):
hist2d_pasr, xedge_pasr, yedge_pasr = np.histogram2d(
np.hstack(data1), np.hstack(data2), bins=50
)
xcen_pasr = 0.5 * (xedge_pasr[0:-1] + xedge_pasr[1:])
ycen_pasr = 0.5 * (yedge_pasr[0:-1] + yedge_pasr[1:])
BCTY_pasr, BCTX_pasr = np.meshgrid(ycen_pasr, xcen_pasr)
hist2d_pasr = hist2d_pasr / np.amax(hist2d_pasr)
return BCTX_pasr, BCTY_pasr, hist2d_pasr
def __err_condmean(self, data1, data2, weight=1.0):
errabs = np.abs(np.hstack(data1) - np.hstack(data2)) * weight
hist2d_pasr, xedge_pasr, yedge_pasr = np.histogram2d(
np.hstack(data1), errabs, bins=50
)
xcen_pasr = 0.5 * (xedge_pasr[0:-1] + xedge_pasr[1:])
ycen_pasr = 0.5 * (yedge_pasr[0:-1] + yedge_pasr[1:])
hist2d_pasr = hist2d_pasr / np.amax(hist2d_pasr)
mean1d_cond = np.dot(hist2d_pasr, ycen_pasr) / (
np.sum(hist2d_pasr, axis=1) + 1e-12
)
return xcen_pasr, mean1d_cond
def __scatter_impl(
self,
ax,
x,
y,
s=None,
c=None,
marker=None,
title=None,
x_label=None,
y_label=None,
xylim_equal=False,
):
ax.scatter(x, y, s=s, edgecolor="b", marker=marker, facecolor="none")
ax.set_title(title)
ax.set_xlabel(x_label)
ax.set_ylabel(y_label)
if xylim_equal:
ax.set_aspect("equal")
minimum = np.min((ax.get_xlim(), ax.get_ylim()))
maximum = np.max((ax.get_xlim(), ax.get_ylim()))
ax.set_xlim(minimum, maximum)
ax.set_ylim(minimum, maximum)
self.add_identity(ax, color="r", ls="--")
def create_plot_global_analysis(
self, varname, true_values, predicted_values, save_plot=True
):
"""Creates scatter/condmean/error pdf plots for the true and predicted values of variable varname."""
nshape = np.asarray(predicted_values).shape
if nshape[1] == 1:
fig, axs = plt.subplots(1, 3, figsize=(15, 4.5))
plt.subplots_adjust(
left=0.08, bottom=0.15, right=0.95, top=0.925, wspace=0.35, hspace=0.1
)
ax = axs[0]
self.__scatter_impl(
ax,
true_values,
predicted_values,
title="Scalar output",
x_label="True",
y_label="Predicted",
xylim_equal=True,
)
ax = axs[1]
xtrue, error = self.__err_condmean(
true_values, predicted_values, weight=1.0
)
ax.plot(xtrue, error, "ro")
ax.set_title("Conditional mean abs. error")
ax.set_xlabel("True")
ax.set_ylabel("abs. error")
ax = axs[2]
hist1d, bin_edges = np.histogram(
np.array(predicted_values) - np.array(true_values),
bins=40,
density=True,
)
ax.plot(0.5 * (bin_edges[:-1] + bin_edges[1:]), hist1d, "ro")
ax.set_title("Scalar output: error PDF")
ax.set_xlabel("Error")
ax.set_ylabel("PDF")
else:
fig, axs = plt.subplots(3, 3, figsize=(18, 16))
vlen_true = []
vlen_pred = []
vsum_true = []
vsum_pred = []
for isamp in range(nshape[0]):
vlen_true.append(
sqrt(sum([comp ** 2 for comp in true_values[isamp][:]]))
)
vlen_pred.append(
sqrt(sum([comp ** 2 for comp in predicted_values[isamp][:]]))
)
vsum_true.append(sum(true_values[isamp][:]))
vsum_pred.append(sum(predicted_values[isamp][:]))
ax = axs[0, 0]
self.__scatter_impl(
ax,
vlen_true,
vlen_pred,
title="Vector output: length",
x_label="True",
y_label="Predicted",
xylim_equal=True,
)
ax = axs[1, 0]
xtrue, error = self.__err_condmean(
vlen_true, vlen_pred, weight=1.0 / sqrt(nshape[1])
)
ax.plot(xtrue, error, "ro")
ax.set_ylabel("Conditional mean abs error")
ax.set_xlabel("True")
ax = axs[2, 0]
hist1d, bin_edges = np.histogram(
np.array(vlen_pred) - np.array(vlen_true), bins=40, density=True
)
ax.plot(0.5 * (bin_edges[:-1] + bin_edges[1:]), hist1d, "ro")
ax.set_ylabel("Error PDF")
ax.set_xlabel("Error")
ax = axs[0, 1]
self.__scatter_impl(
ax,
vsum_true,
vsum_pred,
title="Vector output: sum",
x_label="True",
y_label="Predicted",
xylim_equal=True,
)
ax = axs[1, 1]
xtrue, error = self.__err_condmean(
vsum_true, vsum_pred, weight=1.0 / nshape[1]
)
ax.plot(xtrue, error, "ro")
ax = axs[2, 1]
hist1d, bin_edges = np.histogram(
np.array(vsum_pred) - np.array(vsum_true), bins=40, density=True
)
ax.plot(0.5 * (bin_edges[:-1] + bin_edges[1:]), hist1d, "ro")
truecomp = []
predcomp = []
for icomp in range(nshape[1]):
truecomp.append(true_values[:][icomp])
predcomp.append(predicted_values[:][icomp])
ax = axs[0, 2]
self.__scatter_impl(
ax,
truecomp,
predcomp,
title="Vector output: components",
x_label="True",
y_label="Predicted",
xylim_equal=True,
)
ax = axs[1, 2]
xtrue, error = self.__err_condmean(truecomp, predcomp, weight=1.0)
ax.plot(xtrue, error, "ro")
ax = axs[2, 2]
hist1d, bin_edges = np.histogram(
np.array(predcomp) - np.array(truecomp), bins=40, density=True
)
ax.plot(0.5 * (bin_edges[:-1] + bin_edges[1:]), hist1d, "ro")
plt.subplots_adjust(
left=0.075, bottom=0.1, right=0.98, top=0.95, wspace=0.2, hspace=0.25
)
if save_plot:
fig.savefig(
f"./logs/{self.model_with_config_name}/"
+ varname
+ "_scatter_condm_err.png"
)
plt.close()
else:
plt.show()
def create_parity_plot_and_error_histogram_scalar(
self, varname, true_values, predicted_values, iepoch=None, save_plot=True
):
"""Creates scatter plots for true_values and predicted values of variable varname at iepoch."""
nshape = np.asarray(predicted_values).shape
if nshape[1] == 1:
fig, axs = plt.subplots(1, 2, figsize=(12, 6))
ax = axs[0]
self.__scatter_impl(
ax,
true_values,
predicted_values,
title=varname,
x_label="True",
y_label="Predicted",
xylim_equal=True,
)
ax = axs[1]
hist1d, bin_edges = np.histogram(
np.array(predicted_values) - np.array(true_values),
bins=40,
density=True,
)
ax.plot(0.5 * (bin_edges[:-1] + bin_edges[1:]), hist1d, "ro")
ax.set_title(varname + ": error PDF")
plt.subplots_adjust(
left=0.075, bottom=0.1, right=0.98, top=0.9, wspace=0.2, hspace=0.25
)
else:
nrow = floor(sqrt((nshape[1] + 2)))
ncol = ceil((nshape[1] + 2) / nrow)
fig, axs = plt.subplots(nrow, ncol, figsize=(ncol * 3, nrow * 3))
axs = axs.flatten()
for inode in range(nshape[1]):
xfeature = []
truecomp = []
predcomp = []
for isamp in range(nshape[0]):
xfeature.append(self.node_feature[isamp][inode])
truecomp.append(true_values[isamp][inode])
predcomp.append(predicted_values[isamp][inode])
ax = axs[inode]
self.__scatter_impl(
ax,
truecomp,
predcomp,
s=6,
c=xfeature,
title="node:" + str(inode),
xylim_equal=True,
)
ax = axs[nshape[1]] # summation over all the nodes/nodes
xfeature = []
truecomp = []
predcomp = []
for isamp in range(nshape[0]):
xfeature.append(sum(self.node_feature[isamp][:]))
truecomp.append(sum(true_values[isamp][:]))
predcomp.append(sum(predicted_values[isamp][:]))
self.__scatter_impl(
ax, truecomp, predcomp, s=40, c=xfeature, title="SUM", xylim_equal=True
)
# summation over all the samples for each node
ax = axs[nshape[1] + 1]
xfeature = []
truecomp = []
predcomp = []
for inode in range(nshape[1]):
xfeature.append(sum(self.node_feature[:][inode]))
truecomp.append(sum(true_values[:][inode]))
predcomp.append(sum(predicted_values[:][inode]))
self.__scatter_impl(
ax,
truecomp,
predcomp,
s=40,
c=xfeature,
title="SMP_Mean4sites:0-" + str(nshape[1]),
xylim_equal=True,
)
for iext in range(nshape[1] + 2, axs.size):
axs[iext].axis("off")
plt.subplots_adjust(
left=0.05, bottom=0.05, right=0.98, top=0.95, wspace=0.1, hspace=0.25
)
if save_plot:
if iepoch:
fig.savefig(
f"./logs/{self.model_with_config_name}/"
+ varname
+ "_"
+ str(iepoch).zfill(4)
+ ".png"
)
else:
fig.savefig(f"./logs/{self.model_with_config_name}/" + varname + ".png")
plt.close()
return
def create_error_histogram_per_node(
self, varname, true_values, predicted_values, iepoch=None, save_plot=True
):
"""Creates error histogram plot for true and predicted values of variable varname at iepoch.[node level]"""
nshape = np.asarray(predicted_values).shape
if nshape[1] == 1:
return
else:
nrow = floor(sqrt((nshape[1] + 2)))
ncol = ceil((nshape[1] + 2) / nrow)
# error plots
fig, axs = plt.subplots(nrow, ncol, figsize=(ncol * 3.5, nrow * 3.2))
axs = axs.flatten()
for inode in range(nshape[1]):
xfeature = []
truecomp = []
predcomp = []
for isamp in range(nshape[0]):
xfeature.append(self.node_feature[isamp][inode])
truecomp.append(true_values[isamp][inode])
predcomp.append(predicted_values[isamp][inode])
hist1d, bin_edges = np.histogram(
np.array(predcomp) - np.array(truecomp), bins=40, density=True
)
ax = axs[inode]
ax.plot(0.5 * (bin_edges[:-1] + bin_edges[1:]), hist1d, "ro")
ax.set_title("node:" + str(inode))
ax = axs[nshape[1]]
xfeature = []
truecomp = []
predcomp = []
for isamp in range(nshape[0]):
xfeature.append(sum(self.node_feature[isamp][:]))
truecomp.append(sum(true_values[isamp][:]))
predcomp.append(sum(predicted_values[isamp][:]))
hist1d, bin_edges = np.histogram(
np.array(predcomp) - np.array(truecomp), bins=40, density=True
)
ax.plot(0.5 * (bin_edges[:-1] + bin_edges[1:]), hist1d, "ro")
ax.set_title("SUM")
# summation over all the samples for each node
ax = axs[nshape[1] + 1]
xfeature = []
truecomp = []
predcomp = []
for inode in range(nshape[1]):
xfeature.append(sum(self.node_feature[:][inode]))
truecomp.append(sum(true_values[:][inode]))
predcomp.append(sum(predicted_values[:][inode]))
hist1d, bin_edges = np.histogram(
np.array(predcomp) - np.array(truecomp), bins=40, density=True
)
ax.plot(0.5 * (bin_edges[:-1] + bin_edges[1:]), hist1d, "ro")
ax.set_title("SMP_Mean4sites:0-" + str(nshape[1]))
for iext in range(nshape[1] + 2, axs.size):
axs[iext].axis("off")
plt.subplots_adjust(
left=0.075, bottom=0.1, right=0.98, top=0.9, wspace=0.2, hspace=0.35
)
if save_plot:
if iepoch:
fig.savefig(
f"./logs/{self.model_with_config_name}/"
+ varname
+ "_error_hist1d_"
+ str(iepoch).zfill(4)
+ ".png"
)
else:
fig.savefig(
f"./logs/{self.model_with_config_name}/"
+ varname
+ "_error_hist1d.png"
)
plt.close()
def create_parity_plot_vector(
self,
varname,
true_values,
predicted_values,
head_dim,
iepoch=None,
save_plot=True,
):
"""Creates scatter plots for true and predicted values of vector variable varname."""
predicted_vec = np.reshape(np.asarray(predicted_values), (-1, head_dim))
true_vec = np.reshape(np.asarray(true_values), (-1, head_dim))
num_samples = true_vec.shape[0]
markers_vec = ["o", "s", "d"] # different markers for three vector components
nrow = floor(sqrt(head_dim))
ncol = ceil(head_dim / nrow)
fig, axs = plt.subplots(nrow, ncol, figsize=(ncol * 4, nrow * 4))
axs = axs.flatten()
for icomp in range(head_dim):
ax = axs[icomp]
self.__scatter_impl(
ax,
true_vec[:, icomp],
predicted_vec[:, icomp],
s=6,
c="b",
marker=markers_vec[icomp],
title="comp:" + str(icomp),
xylim_equal=True,
)
for iext in range(head_dim, axs.size):
axs[iext].axis("off")
plt.subplots_adjust(
left=0.1, bottom=0.12, right=0.98, top=0.9, wspace=0.2, hspace=0.25
)
if save_plot:
if iepoch:
fig.savefig(
f"./logs/{self.model_with_config_name}/"
+ varname
+ "_"
+ str(iepoch).zfill(4)
+ ".png"
)
else:
fig.savefig(f"./logs/{self.model_with_config_name}/" + varname + ".png")
plt.close()
# FIXME: this function is currently unused and is explicitly written for 3d vectors.
def create_parity_plot_per_node_vector(
self, varname, true_values, predicted_values, iepoch=None, save_plot=True
):
"""Creates scatter plots for true and predicted values of vector variable varname[nodel level]."""
nshape = np.asarray(predicted_values).shape
predicted_vec = np.reshape(np.asarray(predicted_values), (nshape[0], -1, 3))
true_vec = np.reshape(np.asarray(true_values), (nshape[0], -1, 3))
num_nodes = true_vec.shape[1]
markers_vec = ["o", "s", "d"] # different markers for three vector components
nrow = floor(sqrt((num_nodes + 2)))
ncol = ceil((num_nodes + 2) / nrow)
fig, axs = plt.subplots(nrow, ncol, figsize=(ncol * 3, nrow * 3))
axs = axs.flatten()
for inode in range(num_nodes):
for icomp in range(3):
xfeature = []
truecomp = []
predcomp = []
for isamp in range(nshape[0]):
xfeature.append(self.node_feature[isamp][inode])
truecomp.append(true_vec[isamp, inode, icomp])
predcomp.append(predicted_vec[isamp, inode, icomp])
ax = axs[inode]
self.__scatter_impl(
ax,
truecomp,
predcomp,
s=6,
c=xfeature,
marker=markers_vec[icomp],
title="node:" + str(inode),
xylim_equal=True,
)
ax = axs[num_nodes] # summation over all the nodes/nodes
for icomp in range(3):
xfeature = []
truecomp = []
predcomp = []
for isamp in range(nshape[0]):
xfeature.append(sum(self.node_feature[isamp][:]))
truecomp.append(sum(true_vec[isamp, :, icomp]))
predcomp.append(sum(predicted_vec[isamp, :, icomp]))
self.__scatter_impl(
ax,
truecomp,
predcomp,
s=40,
c=xfeature,
marker=markers_vec[icomp],
title="SUM",
xylim_equal=True,
)
ax = axs[num_nodes + 1] # summation over all the samples for each node
for icomp in range(3):
xfeature = []
truecomp = []
predcomp = []
for inode in range(num_nodes):
xfeature.append(sum(self.node_feature[:][inode]))
truecomp.append(sum(true_vec[:, inode, icomp]))
predcomp.append(sum(predicted_vec[:, inode, icomp]))
self.__scatter_impl(
ax,
truecomp,
predcomp,
s=40,
c=xfeature,
marker=markers_vec[icomp],
title="SMP_Mean4sites:0-" + str(num_nodes),
xylim_equal=True,
)
for iext in range(num_nodes + 2, axs.size):
axs[iext].axis("off")
plt.subplots_adjust(
left=0.05, bottom=0.05, right=0.98, top=0.95, wspace=0.1, hspace=0.25
)
if save_plot:
if iepoch:
fig.savefig(
f"./logs/{self.model_with_config_name}/"
+ varname
+ "_"
+ str(iepoch).zfill(4)
+ ".png"
)
else:
fig.savefig(f"./logs/{self.model_with_config_name}/" + varname + ".png")
plt.close()
def add_identity(self, axes, *line_args, **line_kwargs):
(identity,) = axes.plot([], [], *line_args, **line_kwargs)
def callback(axes):
low_x, high_x = axes.get_xlim()
low_y, high_y = axes.get_ylim()
low = max(low_x, low_y)
high = min(high_x, high_y)
identity.set_data([low, high], [low, high])
callback(axes)
axes.callbacks.connect("xlim_changed", callback)
axes.callbacks.connect("ylim_changed", callback)
return axes
def plot_history(
self,
total_loss_train,
total_loss_val,
total_loss_test,
task_loss_train,
task_loss_val,
task_loss_test,
task_weights,
task_names,
):
nrow = 1
fhist = open(f"./logs/{self.model_with_config_name}/history_loss.pckl", "wb")
pickle.dump(
[
total_loss_train,
total_loss_val,
total_loss_test,
task_loss_train,
task_loss_val,
task_loss_test,
task_weights,
task_names,
],
fhist,
)
fhist.close()
num_tasks = len(task_loss_train[0])
if num_tasks > 0:
task_loss_train = np.array(task_loss_train)
task_loss_val = np.array(task_loss_val)
task_loss_test = np.array(task_loss_test)
nrow = 2
fig, axs = plt.subplots(nrow, num_tasks, figsize=(16, 6 * nrow))
axs = axs.flatten()
ax = axs[0]
ax.plot(total_loss_train, "-", label="train")
ax.plot(total_loss_val, ":", label="validation")
ax.plot(total_loss_test, "--", label="test")
ax.set_title("total loss")
ax.set_xlabel("Epochs")
ax.set_yscale("log")
ax.legend()
for iext in range(1, num_tasks):
axs[iext].axis("off")
for ivar in range(task_loss_train.shape[1]):
ax = axs[num_tasks + ivar]
ax.plot(task_loss_train[:, ivar], label="train")
ax.plot(task_loss_val[:, ivar], label="validation")
ax.plot(task_loss_test[:, ivar], "--", label="test")
ax.set_title(task_names[ivar] + ", {:.4f}".format(task_weights[ivar]))
ax.set_xlabel("Epochs")
ax.set_yscale("log")
if ivar == 0:
ax.legend()
for iext in range(num_tasks + task_loss_train.shape[1], axs.size):
axs[iext].axis("off")
plt.subplots_adjust(
left=0.1, bottom=0.08, right=0.98, top=0.9, wspace=0.25, hspace=0.3
)
fig.savefig(f"./logs/{self.model_with_config_name}/history_loss.png")
plt.close()
def create_scatter_plots(
self, true_values, predicted_values, output_names=None, iepoch=None
):
"""Creates scatter plots for all head predictions."""
for ihead in range(self.num_heads):
if self.head_dims[ihead] > 1:
# vector output
self.create_parity_plot_vector(
output_names[ihead],
true_values[ihead],
predicted_values[ihead],
self.head_dims[ihead],
iepoch,
)
else:
self.create_parity_plot_and_error_histogram_scalar(
output_names[ihead],
true_values[ihead],
predicted_values[ihead],
iepoch,
)
self.create_error_histogram_per_node(
output_names[ihead],
true_values[ihead],
predicted_values[ihead],
iepoch,
)
def create_plot_global(self, true_values, predicted_values, output_names=None):
"""Creates global analysis for all head predictons, e.g., scatter/condmean/error pdf plot."""
for ihead in range(self.num_heads):
self.create_plot_global_analysis(
output_names[ihead],
true_values[ihead],
predicted_values[ihead],
save_plot=True,
)
def num_nodes_plot(
self,
):
fig, ax = plt.subplots(1, 1, figsize=(8, 8))
ax.hist(self.num_nodes_list)
ax.set_title("Histogram of graph size in test set")
ax.set_xlabel("number of nodes")
fig.savefig(f"./logs/{self.model_with_config_name}/num_nodes.png")
plt.close()
| 38.773098
| 199
| 0.51428
|
fa706d8a2d41fde3ba277a1c2697224fcd405ebc
| 2,321
|
py
|
Python
|
examples/BeerDistributionProblem.py
|
betterhours/pulp
|
0618ec018bb33b1b77e5ff868162f7ffde9f1566
|
[
"MIT"
] | 4
|
2019-08-13T20:26:15.000Z
|
2021-07-05T15:52:45.000Z
|
examples/BeerDistributionProblem.py
|
betterhours/pulp
|
0618ec018bb33b1b77e5ff868162f7ffde9f1566
|
[
"MIT"
] | null | null | null |
examples/BeerDistributionProblem.py
|
betterhours/pulp
|
0618ec018bb33b1b77e5ff868162f7ffde9f1566
|
[
"MIT"
] | 1
|
2019-12-03T17:11:35.000Z
|
2019-12-03T17:11:35.000Z
|
"""
The Beer Distribution Problem for the PuLP Modeller
Authors: Antony Phillips, Dr Stuart Mitchell 2007
"""
# Import PuLP modeler functions
from pulp import *
# Creates a list of all the supply nodes
Warehouses = ["A", "B"]
# Creates a dictionary for the number of units of supply for each supply node
supply = {"A": 1000,
"B": 4000}
# Creates a list of all demand nodes
Bars = ["1", "2", "3", "4", "5"]
# Creates a dictionary for the number of units of demand for each demand node
demand = {"1":500,
"2":900,
"3":1800,
"4":200,
"5":700,}
# Creates a list of costs of each transportation path
costs = [ #Bars
#1 2 3 4 5
[2,4,5,2,1],#A Warehouses
[3,1,3,2,3] #B
]
# The cost data is made into a dictionary
costs = makeDict([Warehouses,Bars],costs,0)
# Creates the 'prob' variable to contain the problem data
prob = LpProblem("Beer Distribution Problem",LpMinimize)
# Creates a list of tuples containing all the possible routes for transport
Routes = [(w,b) for w in Warehouses for b in Bars]
# A dictionary called 'Vars' is created to contain the referenced variables(the routes)
vars = LpVariable.dicts("Route",(Warehouses,Bars),0,None,LpInteger)
# The objective function is added to 'prob' first
prob += lpSum([vars[w][b]*costs[w][b] for (w,b) in Routes]), "Sum_of_Transporting_Costs"
# The supply maximum constraints are added to prob for each supply node (warehouse)
for w in Warehouses:
prob += lpSum([vars[w][b] for b in Bars])<=supply[w], "Sum_of_Products_out_of_Warehouse_%s"%w
# The demand minimum constraints are added to prob for each demand node (bar)
for b in Bars:
prob += lpSum([vars[w][b] for w in Warehouses])>=demand[b], "Sum_of_Products_into_Bar%s"%b
# The problem data is written to an .lp file
prob.writeLP("BeerDistributionProblem.lp")
# The problem is solved using PuLP's choice of Solver
prob.solve()
# The status of the solution is printed to the screen
print("Status:", LpStatus[prob.status])
# Each of the variables is printed with it's resolved optimum value
for v in prob.variables():
print(v.name, "=", v.varValue)
# The optimised objective function value is printed to the screen
print("Total Cost of Transportation = ", value(prob.objective))
| 32.236111
| 97
| 0.686342
|
7f5ff4231f4b0d9373600e812fc0898e661da928
| 37,153
|
py
|
Python
|
pybind/nos/v7_1_0/interface/tengigabitethernet/ipv6/vrrpv3_group/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/nos/v7_1_0/interface/tengigabitethernet/ipv6/vrrpv3_group/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/nos/v7_1_0/interface/tengigabitethernet/ipv6/vrrpv3_group/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import virtual_ip
import track
class vrrpv3_group(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-interface - based on the path /interface/tengigabitethernet/ipv6/vrrpv3-group. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Start vrrpv3 configuration
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__vrid','__use_v2_checksum','__virtual_ip','__track','__advertisement_interval','__enable','__hold_time','__preempt_mode','__priority','__description',)
_yang_name = 'vrrpv3-group'
_rest_name = 'vrrp-group'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__enable = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
self.__use_v2_checksum = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="use-v2-checksum", rest_name="use-v2-checksum", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Use v2 checksum'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
self.__track = YANGDynClass(base=track.track, is_container='container', presence=False, yang_name="track", rest_name="track", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface to be tracked', u'callpoint': u'vrrpv3TrackPortPhyIntf', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True)
self.__vrid = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..255']}), is_leaf=True, yang_name="vrid", rest_name="vrid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-range': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='vrid-type', is_config=True)
self.__priority = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..254']}), is_leaf=True, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set router priority within virtual router'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint8', is_config=True)
self.__advertisement_interval = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'100..40900']}), is_leaf=True, yang_name="advertisement-interval", rest_name="advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set advertisement interval'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
self.__virtual_ip = YANGDynClass(base=YANGListType("virtual_ipaddr",virtual_ip.virtual_ip, yang_name="virtual-ip", rest_name="virtual-ip", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='virtual-ipaddr', extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3VirtualIPPhyIntf'}}), is_container='list', yang_name="virtual-ip", rest_name="virtual-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3VirtualIPPhyIntf'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='list', is_config=True)
self.__hold_time = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..3600']}), is_leaf=True, yang_name="hold-time", rest_name="hold-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure hold time for this session'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
self.__preempt_mode = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="preempt-mode", rest_name="preempt-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set preempt mode for the session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
self.__description = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface specific description', u'cli-multi-value': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='string', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'interface', u'tengigabitethernet', u'ipv6', u'vrrpv3-group']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'interface', u'TenGigabitEthernet', u'ipv6', u'vrrp-group']
def _get_vrid(self):
"""
Getter method for vrid, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/vrid (vrid-type)
"""
return self.__vrid
def _set_vrid(self, v, load=False):
"""
Setter method for vrid, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/vrid (vrid-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_vrid is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vrid() directly.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..255']}), is_leaf=True, yang_name="vrid", rest_name="vrid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-range': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='vrid-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vrid must be of a type compatible with vrid-type""",
'defined-type': "brocade-vrrpv3:vrid-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..255']}), is_leaf=True, yang_name="vrid", rest_name="vrid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-range': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='vrid-type', is_config=True)""",
})
self.__vrid = t
if hasattr(self, '_set'):
self._set()
def _unset_vrid(self):
self.__vrid = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..255']}), is_leaf=True, yang_name="vrid", rest_name="vrid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-range': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='vrid-type', is_config=True)
def _get_use_v2_checksum(self):
"""
Getter method for use_v2_checksum, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/use_v2_checksum (empty)
YANG Description: Use v2 checksum
"""
return self.__use_v2_checksum
def _set_use_v2_checksum(self, v, load=False):
"""
Setter method for use_v2_checksum, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/use_v2_checksum (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_use_v2_checksum is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_use_v2_checksum() directly.
YANG Description: Use v2 checksum
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="use-v2-checksum", rest_name="use-v2-checksum", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Use v2 checksum'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """use_v2_checksum must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="use-v2-checksum", rest_name="use-v2-checksum", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Use v2 checksum'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)""",
})
self.__use_v2_checksum = t
if hasattr(self, '_set'):
self._set()
def _unset_use_v2_checksum(self):
self.__use_v2_checksum = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="use-v2-checksum", rest_name="use-v2-checksum", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Use v2 checksum'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
def _get_virtual_ip(self):
"""
Getter method for virtual_ip, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/virtual_ip (list)
"""
return self.__virtual_ip
def _set_virtual_ip(self, v, load=False):
"""
Setter method for virtual_ip, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/virtual_ip (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_virtual_ip is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_virtual_ip() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("virtual_ipaddr",virtual_ip.virtual_ip, yang_name="virtual-ip", rest_name="virtual-ip", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='virtual-ipaddr', extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3VirtualIPPhyIntf'}}), is_container='list', yang_name="virtual-ip", rest_name="virtual-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3VirtualIPPhyIntf'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """virtual_ip must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("virtual_ipaddr",virtual_ip.virtual_ip, yang_name="virtual-ip", rest_name="virtual-ip", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='virtual-ipaddr', extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3VirtualIPPhyIntf'}}), is_container='list', yang_name="virtual-ip", rest_name="virtual-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3VirtualIPPhyIntf'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='list', is_config=True)""",
})
self.__virtual_ip = t
if hasattr(self, '_set'):
self._set()
def _unset_virtual_ip(self):
self.__virtual_ip = YANGDynClass(base=YANGListType("virtual_ipaddr",virtual_ip.virtual_ip, yang_name="virtual-ip", rest_name="virtual-ip", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='virtual-ipaddr', extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3VirtualIPPhyIntf'}}), is_container='list', yang_name="virtual-ip", rest_name="virtual-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3VirtualIPPhyIntf'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='list', is_config=True)
def _get_track(self):
"""
Getter method for track, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/track (container)
YANG Description: Interface to be tracked
"""
return self.__track
def _set_track(self, v, load=False):
"""
Setter method for track, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/track (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_track is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_track() directly.
YANG Description: Interface to be tracked
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=track.track, is_container='container', presence=False, yang_name="track", rest_name="track", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface to be tracked', u'callpoint': u'vrrpv3TrackPortPhyIntf', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """track must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=track.track, is_container='container', presence=False, yang_name="track", rest_name="track", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface to be tracked', u'callpoint': u'vrrpv3TrackPortPhyIntf', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True)""",
})
self.__track = t
if hasattr(self, '_set'):
self._set()
def _unset_track(self):
self.__track = YANGDynClass(base=track.track, is_container='container', presence=False, yang_name="track", rest_name="track", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface to be tracked', u'callpoint': u'vrrpv3TrackPortPhyIntf', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True)
def _get_advertisement_interval(self):
"""
Getter method for advertisement_interval, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/advertisement_interval (uint32)
YANG Description: Set advertisement interval
"""
return self.__advertisement_interval
def _set_advertisement_interval(self, v, load=False):
"""
Setter method for advertisement_interval, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/advertisement_interval (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_advertisement_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_advertisement_interval() directly.
YANG Description: Set advertisement interval
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'100..40900']}), is_leaf=True, yang_name="advertisement-interval", rest_name="advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set advertisement interval'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """advertisement_interval must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'100..40900']}), is_leaf=True, yang_name="advertisement-interval", rest_name="advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set advertisement interval'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)""",
})
self.__advertisement_interval = t
if hasattr(self, '_set'):
self._set()
def _unset_advertisement_interval(self):
self.__advertisement_interval = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'100..40900']}), is_leaf=True, yang_name="advertisement-interval", rest_name="advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set advertisement interval'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
def _get_enable(self):
"""
Getter method for enable, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/enable (empty)
YANG Description: Enable Session
"""
return self.__enable
def _set_enable(self, v, load=False):
"""
Setter method for enable, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/enable (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_enable is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enable() directly.
YANG Description: Enable Session
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enable must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)""",
})
self.__enable = t
if hasattr(self, '_set'):
self._set()
def _unset_enable(self):
self.__enable = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
def _get_hold_time(self):
"""
Getter method for hold_time, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/hold_time (uint32)
YANG Description: Configure hold time for this session
"""
return self.__hold_time
def _set_hold_time(self, v, load=False):
"""
Setter method for hold_time, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/hold_time (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_hold_time is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_hold_time() directly.
YANG Description: Configure hold time for this session
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..3600']}), is_leaf=True, yang_name="hold-time", rest_name="hold-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure hold time for this session'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """hold_time must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..3600']}), is_leaf=True, yang_name="hold-time", rest_name="hold-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure hold time for this session'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)""",
})
self.__hold_time = t
if hasattr(self, '_set'):
self._set()
def _unset_hold_time(self):
self.__hold_time = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..3600']}), is_leaf=True, yang_name="hold-time", rest_name="hold-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure hold time for this session'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
def _get_preempt_mode(self):
"""
Getter method for preempt_mode, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/preempt_mode (empty)
YANG Description: Set preempt mode for the session
"""
return self.__preempt_mode
def _set_preempt_mode(self, v, load=False):
"""
Setter method for preempt_mode, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/preempt_mode (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_preempt_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_preempt_mode() directly.
YANG Description: Set preempt mode for the session
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="preempt-mode", rest_name="preempt-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set preempt mode for the session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """preempt_mode must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="preempt-mode", rest_name="preempt-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set preempt mode for the session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)""",
})
self.__preempt_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_preempt_mode(self):
self.__preempt_mode = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="preempt-mode", rest_name="preempt-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set preempt mode for the session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
def _get_priority(self):
"""
Getter method for priority, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/priority (uint8)
YANG Description: Set router priority within virtual router
"""
return self.__priority
def _set_priority(self, v, load=False):
"""
Setter method for priority, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/priority (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_priority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_priority() directly.
YANG Description: Set router priority within virtual router
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..254']}), is_leaf=True, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set router priority within virtual router'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint8', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """priority must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..254']}), is_leaf=True, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set router priority within virtual router'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint8', is_config=True)""",
})
self.__priority = t
if hasattr(self, '_set'):
self._set()
def _unset_priority(self):
self.__priority = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..254']}), is_leaf=True, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set router priority within virtual router'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint8', is_config=True)
def _get_description(self):
"""
Getter method for description, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/description (string)
YANG Description: Interface specific description
"""
return self.__description
def _set_description(self, v, load=False):
"""
Setter method for description, mapped from YANG variable /interface/tengigabitethernet/ipv6/vrrpv3_group/description (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_description is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_description() directly.
YANG Description: Interface specific description
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface specific description', u'cli-multi-value': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """description must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface specific description', u'cli-multi-value': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='string', is_config=True)""",
})
self.__description = t
if hasattr(self, '_set'):
self._set()
def _unset_description(self):
self.__description = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface specific description', u'cli-multi-value': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='string', is_config=True)
vrid = __builtin__.property(_get_vrid, _set_vrid)
use_v2_checksum = __builtin__.property(_get_use_v2_checksum, _set_use_v2_checksum)
virtual_ip = __builtin__.property(_get_virtual_ip, _set_virtual_ip)
track = __builtin__.property(_get_track, _set_track)
advertisement_interval = __builtin__.property(_get_advertisement_interval, _set_advertisement_interval)
enable = __builtin__.property(_get_enable, _set_enable)
hold_time = __builtin__.property(_get_hold_time, _set_hold_time)
preempt_mode = __builtin__.property(_get_preempt_mode, _set_preempt_mode)
priority = __builtin__.property(_get_priority, _set_priority)
description = __builtin__.property(_get_description, _set_description)
_pyangbind_elements = {'vrid': vrid, 'use_v2_checksum': use_v2_checksum, 'virtual_ip': virtual_ip, 'track': track, 'advertisement_interval': advertisement_interval, 'enable': enable, 'hold_time': hold_time, 'preempt_mode': preempt_mode, 'priority': priority, 'description': description, }
| 77.563674
| 999
| 0.738675
|
e258d2536b8f324ea16aa8e818ddb322ef30922e
| 1,374
|
py
|
Python
|
scene/util.py
|
jyf588/pytorch-rl-bullet
|
3ac1835d01e658b2078126895ffa0eb11304abb4
|
[
"MIT"
] | null | null | null |
scene/util.py
|
jyf588/pytorch-rl-bullet
|
3ac1835d01e658b2078126895ffa0eb11304abb4
|
[
"MIT"
] | null | null | null |
scene/util.py
|
jyf588/pytorch-rl-bullet
|
3ac1835d01e658b2078126895ffa0eb11304abb4
|
[
"MIT"
] | null | null | null |
import copy
from typing import *
def convert_scene_for_placing(opt, scene: List) -> Tuple:
"""Converts the scene into a modified scene for placing, with the following steps:
1. Denote the (x, y) location of object index 0 as the placing destination (x, y).
2. Remove object index 0 from the scene list.
Args:
scene: The original scene.
Returns:
new_scene: The scene modified for placing.
place_dst_xy: The (x, y) placing destination for the placing task.
"""
# Make sure that removing the destination object from the scene won't modify the
# source object index.
assert opt.scene_place_src_idx < opt.scene_place_dst_idx
# Remove the destination object from the scene.
new_scene = copy.deepcopy(
scene[: opt.scene_place_dst_idx] + scene[opt.scene_place_dst_idx + 1 :]
)
# Use the location of the destination object as the (x, y) placing destination.
place_dst_xy = scene[opt.scene_place_dst_idx]["position"][:2]
# Construct an imaginary object with the same shape attribute as the source object
# to visualize the placing destination.
place_dest_object = copy.deepcopy(scene[opt.scene_place_src_idx])
place_dest_object["position"] = place_dst_xy + [0.0]
place_dest_object["height"] = 0.005
return new_scene, place_dst_xy, place_dest_object
| 39.257143
| 90
| 0.705968
|
2dfd548e03b5375d2580ca5731e5cbe1e6805e82
| 478
|
py
|
Python
|
math_operation.py
|
Te020370/python_lesson_2
|
8f91f3784497091a5c618731fbbb7fea2c5ee6b9
|
[
"MIT"
] | 1
|
2020-03-30T16:26:46.000Z
|
2020-03-30T16:26:46.000Z
|
math_operation.py
|
Te020370/python_lesson_2
|
8f91f3784497091a5c618731fbbb7fea2c5ee6b9
|
[
"MIT"
] | null | null | null |
math_operation.py
|
Te020370/python_lesson_2
|
8f91f3784497091a5c618731fbbb7fea2c5ee6b9
|
[
"MIT"
] | null | null | null |
a = input('First number: ')
b = input('Second number: ')
a = int(a)
b = int(b)
print(a+b)
print(a-b)
print(a*b)
result = a/b
print(type(result))
print(result)
print(a**2)
# Логические операции
a_1 = True
b_2 = False
# Логическое отрицание
print('Ответ')
print(not a_1)
# Логическо И
print('Ответ')
print(a_1 and b_2)
# Лщгическое или
print(a_1 or b_2)
a_2 = 10
print(a_2 > 100)
print(a_2 < 100)
print(a_2 >= 100)
print(a_2 <= 100)
print(a_2 == 100)
print(a_2 != 100)
| 11.95
| 28
| 0.650628
|
673b661beac523be480b7c7316b53b919dde60a9
| 4,193
|
py
|
Python
|
test/functional/feature_notifications.py
|
foxdproject/foxdcoin
|
9db505f6f32bd3e51bd2b2da533744c98cee23af
|
[
"MIT"
] | 7
|
2020-06-19T20:49:02.000Z
|
2022-01-31T09:12:18.000Z
|
test/functional/feature_notifications.py
|
foxdproject/foxdcoin
|
9db505f6f32bd3e51bd2b2da533744c98cee23af
|
[
"MIT"
] | 1
|
2021-02-26T19:14:11.000Z
|
2021-02-26T19:14:11.000Z
|
test/functional/feature_notifications.py
|
foxdproject/foxdcoin
|
9db505f6f32bd3e51bd2b2da533744c98cee23af
|
[
"MIT"
] | 12
|
2020-05-02T20:01:44.000Z
|
2022-03-03T11:02:13.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Copyright (c) 2017-2019 The Raven Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test the -alertnotify, -blocknotify and -walletnotify options.
"""
import os
from test_framework.test_framework import FoxdcoinTestFramework
from test_framework.util import assert_equal, wait_until, connect_nodes_bi
class NotificationsTest(FoxdcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def setup_network(self):
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
self.block_filename = os.path.join(self.options.tmpdir, "blocks.txt")
self.tx_filename = os.path.join(self.options.tmpdir, "transactions.txt")
# -alertnotify and -blocknotify on node0, walletnotify on node1
self.extra_args = [["-blockversion=536870912",
"-alertnotify=echo %%s >> %s" % self.alert_filename,
"-blocknotify=echo %%s >> %s" % self.block_filename],
["-blockversion=1610612736",
"-rescan",
"-walletnotify=echo %%s >> %s" % self.tx_filename]]
super().setup_network()
def run_test(self):
self.log.info("test -blocknotify")
block_count = 10
blocks = self.nodes[1].generate(block_count)
# wait at most 10 seconds for expected file size before reading the content
wait_until(lambda: os.path.isfile(self.block_filename) and os.stat(self.block_filename).st_size >= (block_count * 65), err_msg="Wait for FileSize", timeout=10)
# file content should equal the generated blocks hashes
with open(self.block_filename, 'r') as f:
assert_equal(sorted(blocks), sorted(f.read().splitlines()))
self.log.info("test -walletnotify")
# wait at most 10 seconds for expected file size before reading the content
wait_until(lambda: os.path.isfile(self.tx_filename) and os.stat(self.tx_filename).st_size >= (block_count * 65), err_msg="Wait for FileSize", timeout=10)
# file content should equal the generated transaction hashes
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
with open(self.tx_filename, 'r') as f:
assert_equal(sorted(txids_rpc), sorted(f.read().splitlines()))
os.remove(self.tx_filename)
self.log.info("test -walletnotify after rescan")
# restart node to rescan to force wallet notifications
self.restart_node(1)
connect_nodes_bi(self.nodes, 0, 1)
wait_until(lambda: os.path.isfile(self.tx_filename) and os.stat(self.tx_filename).st_size >= (block_count * 65), err_msg="Wait for FileSize", timeout=10)
# file content should equal the generated transaction hashes
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
with open(self.tx_filename, 'r') as f:
assert_equal(sorted(txids_rpc), sorted(f.read().splitlines()))
# Mine another 41 up-version blocks. -alertnotify should trigger on the 51st.
self.log.info("test -alertnotify")
self.nodes[1].generate(41)
self.sync_all()
# Give foxdcoind 10 seconds to write the alert notification
wait_until(lambda: os.path.isfile(self.alert_filename) and os.path.getsize(self.alert_filename), err_msg="Wait for FileSize", timeout=10)
with open(self.alert_filename, 'r', encoding='utf8') as f:
alert_text = f.read()
# Mine more up-version blocks, should not get more alerts:
self.nodes[1].generate(2)
self.sync_all()
with open(self.alert_filename, 'r', encoding='utf8') as f:
alert_text2 = f.read()
self.log.info("-alertnotify should not continue notifying for more unknown version blocks")
assert_equal(alert_text, alert_text2)
if __name__ == '__main__':
NotificationsTest().main()
| 46.076923
| 167
| 0.663487
|
d465025ae1279331eef7d3c008bd68144efa9088
| 4,646
|
py
|
Python
|
optimization/rgbd/step2_sparse_fusion.py
|
AvatarWorld/hifi3dface
|
fc2a7294f916e5a097247a5dffeac73ae5d0898e
|
[
"Apache-2.0"
] | 2
|
2021-09-10T10:04:01.000Z
|
2021-09-10T10:05:12.000Z
|
optimization/rgbd/step2_sparse_fusion.py
|
xieydd/hifi3dface
|
39b7fef458458d95cb4f8c6679a1ce941cb191e4
|
[
"Apache-2.0"
] | null | null | null |
optimization/rgbd/step2_sparse_fusion.py
|
xieydd/hifi3dface
|
39b7fef458458d95cb4f8c6679a1ce941cb191e4
|
[
"Apache-2.0"
] | 1
|
2021-09-10T10:03:55.000Z
|
2021-09-10T10:03:55.000Z
|
# -*- coding:utf8 -*-
"""
This file is part of the repo: https://github.com/tencent-ailab/hifi3dface
If you find the code useful, please cite our paper:
"High-Fidelity 3D Digital Human Creation from RGB-D Selfies."
Xiangkai Lin*, Yajing Chen*, Linchao Bao*, Haoxian Zhang, Sheng Wang, Xuefei Zhe, Xinwei Jiang, Jue Wang, Dong Yu, and Zhengyou Zhang.
arXiv: https://arxiv.org/abs/2010.05562
Copyright (c) [2020] [Tencent AI Lab]
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import os
from absl import app, flags, logging
import scipy.io as sio
import numpy as np
import sys
sys.path.append("../..")
from RGBD_utils.SparseFusion import (
find_3d_keypoints_from_landmark_and_depth_86,
get_trans_base_to_camera,
)
from RGBD_utils.CropMask import crop_depth_for_fusion
from RGBD_utils.PoseTools import PoseTools
from utils.ply import write_ply, write_obj
import cv2
def load_from_npz(base_dir):
data = np.load(os.path.join(base_dir, "step1_data_for_fusion.npz"))
K = data["K"]
lmk3d_select = data["lmk3d_select"]
img_select = data["img_select"]
depth_select = data["depth_select"]
pt3d_select = data["pt3d_select"]
first_trans_select = data["first_trans_select"]
return img_select, depth_select, lmk3d_select, pt3d_select, first_trans_select, K
def run(prefit_dir):
print("---- step2 start -----")
print("running base:", prefit_dir)
(
img_select,
depth_select,
lmk3d_select,
pt3d_select,
first_trans_select,
K,
) = load_from_npz(prefit_dir)
# 1. get mask depth
_, _, depth_ori_select = crop_depth_for_fusion(
img_select, depth_select, lmk3d_select
)
# 2. fusion 3d points
pt3d_remove_outliers_camera = find_3d_keypoints_from_landmark_and_depth_86(
first_trans_select, pt3d_select, lmk3d_select, depth_ori_select, img_select, K
)
# get trans
trans_base_2_camera = get_trans_base_to_camera(
pt3d_remove_outliers_camera.transpose(), FLAGS.is_bfm
)
if FLAGS.is_bfm:
thr1 = 20.0
thr2 = 20.0
else:
thr1 = 2.0
thr2 = 2.0
pt3d_remove_outliers = PoseTools.backtrans(
pt3d_remove_outliers_camera, trans_base_2_camera
)
bad_idx = pt3d_remove_outliers_camera[2, :] < 1
pt3d_remove_outliers[:, bad_idx] = -10000
# # ues y and z coord
s1 = pt3d_remove_outliers[1:, 0:8]
s2 = pt3d_remove_outliers[1:, np.array(range(16, 8, -1))]
loss = np.sum(np.array(abs(abs(s1) - abs(s2))), axis=0)
mid = np.median(loss[loss < thr1])
error = np.array(loss > min(mid + 0.5, thr2)).astype(np.int32)
error_countour = np.concatenate(
(np.concatenate((error, [0])), error[np.array(range(7, -1, -1))])
)
bad_idx = np.where(error_countour[:] > 0)
pt3d_remove_outliers[:, bad_idx] = -10000
# save
np.savez(
os.path.join(prefit_dir, "step2_fusion.npz"),
pt3d_remove_outliers=pt3d_remove_outliers,
trans_base_2_camera=trans_base_2_camera,
first_trans_select=first_trans_select,
depth_ori_select=depth_ori_select,
)
temp = pt3d_remove_outliers[:, pt3d_remove_outliers[2, :] > -100] # 3 * n
write_ply(
os.path.join(prefit_dir, "vis_pt3d_remove_outliers.ply"),
temp.transpose(),
None,
None,
True,
)
print("---- step2 succeed -----")
def main(_):
prefit_dir = FLAGS.prefit
run(prefit_dir)
if __name__ == "__main__":
FLAGS = flags.FLAGS
flags.DEFINE_string("prefit", "prefit_bfm/", "output data directory")
flags.DEFINE_boolean("is_bfm", False, "default: False")
app.run(main)
| 32.263889
| 135
| 0.702109
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.