hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
38b4efdc66b2eccdd02a4324cf9beeee9bd3f6bc
| 2,801
|
py
|
Python
|
expense_record/apps/api/migrations/0002_auto_20210915_1906.py
|
vieirafrancisco/InvoiceRegister
|
e98baf8db85c1ed17a69c0930c4e07e23814972b
|
[
"MIT"
] | null | null | null |
expense_record/apps/api/migrations/0002_auto_20210915_1906.py
|
vieirafrancisco/InvoiceRegister
|
e98baf8db85c1ed17a69c0930c4e07e23814972b
|
[
"MIT"
] | 5
|
2020-07-23T22:54:45.000Z
|
2021-09-16T19:58:47.000Z
|
expense_record/apps/api/migrations/0002_auto_20210915_1906.py
|
vieirafrancisco/InvoiceRegister
|
e98baf8db85c1ed17a69c0930c4e07e23814972b
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.7 on 2021-09-15 22:06
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('api', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='creditcard',
options={'verbose_name': 'Cartão de Crédito', 'verbose_name_plural': 'Cartões de Crédito'},
),
migrations.AlterField(
model_name='creditcard',
name='created_at',
field=models.DateTimeField(auto_now_add=True, verbose_name='Data de Criação'),
),
migrations.AlterField(
model_name='creditcard',
name='invoice_close_day',
field=models.IntegerField(verbose_name='Dia de fechamento da Fatura'),
),
migrations.AlterField(
model_name='creditcard',
name='name',
field=models.CharField(max_length=255, verbose_name='Nome'),
),
migrations.AlterField(
model_name='creditcard',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Usuário'),
),
migrations.CreateModel(
name='Bill',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='Nome')),
('created_at', models.DateTimeField(auto_now_add=True)),
('date', models.DateTimeField(verbose_name='Data')),
('value', models.DecimalField(decimal_places=2, max_digits=19, verbose_name='Valor')),
('is_service', models.BooleanField(default=False, verbose_name='É um serviço?')),
('is_installment', models.BooleanField(default=False, verbose_name='É parcelado?')),
('end_date', models.DateTimeField(blank=True, null=True)),
('has_promotion', models.BooleanField(default=False, verbose_name='Tem promoção?')),
('promotion_num_months', models.SmallIntegerField(default=0, verbose_name='Número de mêses com a promoção')),
('credit_card', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.creditcard', verbose_name='Cartão de Crédito')),
],
),
migrations.AddConstraint(
model_name='bill',
constraint=models.CheckConstraint(check=models.Q(('is_service', True), ('is_installment', True), _negated=True), name='service_and_installment_not_both_true'),
),
]
| 45.918033
| 171
| 0.626205
|
7768152fd42f25bd67798e0907e4431ff6554c61
| 3,500
|
py
|
Python
|
Algorithm.Python/MarginCallEventsAlgorithm.py
|
BlackBoxAM/Lean
|
5ea9f04b104d27f0fcfe3a383a3a60ca12206d99
|
[
"Apache-2.0"
] | 6,580
|
2015-01-12T16:48:44.000Z
|
2022-03-31T22:05:09.000Z
|
Algorithm.Python/MarginCallEventsAlgorithm.py
|
BlackBoxAM/Lean
|
5ea9f04b104d27f0fcfe3a383a3a60ca12206d99
|
[
"Apache-2.0"
] | 3,392
|
2015-01-12T17:44:07.000Z
|
2022-03-30T20:34:03.000Z
|
Algorithm.Python/MarginCallEventsAlgorithm.py
|
BlackBoxAM/Lean
|
5ea9f04b104d27f0fcfe3a383a3a60ca12206d99
|
[
"Apache-2.0"
] | 3,354
|
2015-01-12T16:58:31.000Z
|
2022-03-31T00:56:03.000Z
|
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from AlgorithmImports import *
### <summary>
### This algorithm showcases two margin related event handlers.
### OnMarginCallWarning: Fired when a portfolio's remaining margin dips below 5% of the total portfolio value
### OnMarginCall: Fired immediately before margin call orders are execued, this gives the algorithm a change to regain margin on its own through liquidation
### </summary>
### <meta name="tag" content="securities and portfolio" />
### <meta name="tag" content="margin models" />
class MarginCallEventsAlgorithm(QCAlgorithm):
"""
This algorithm showcases two margin related event handlers.
OnMarginCallWarning: Fired when a portfolio's remaining margin dips below 5% of the total portfolio value
OnMarginCall: Fired immediately before margin call orders are execued, this gives the algorithm a change to regain margin on its own through liquidation
"""
def Initialize(self):
self.SetCash(100000)
self.SetStartDate(2013,10,1)
self.SetEndDate(2013,12,11)
self.AddEquity("SPY", Resolution.Second)
# cranking up the leverage increases the odds of a margin call
# when the security falls in value
self.Securities["SPY"].SetLeverage(100)
def OnData(self, data):
if not self.Portfolio.Invested:
self.SetHoldings("SPY",100)
def OnMarginCall(self, requests):
# Margin call event handler. This method is called right before the margin call orders are placed in the market.
# <param name="requests">The orders to be executed to bring this algorithm within margin limits</param>
# this code gets called BEFORE the orders are placed, so we can try to liquidate some of our positions
# before we get the margin call orders executed. We could also modify these orders by changing their quantities
for order in requests:
# liquidate an extra 10% each time we get a margin call to give us more padding
newQuantity = int(np.sign(order.Quantity) * order.Quantity * 1.1)
requests.remove(order)
requests.append(SubmitOrderRequest(order.OrderType, order.SecurityType, order.Symbol, newQuantity, order.StopPrice, order.LimitPrice, self.Time, "OnMarginCall"))
return requests
def OnMarginCallWarning(self):
# Margin call warning event handler.
# This method is called when Portfolio.MarginRemaining is under 5% of your Portfolio.TotalPortfolioValue
# a chance to prevent a margin call from occurring
spyHoldings = self.Securities["SPY"].Holdings.Quantity
shares = int(-spyHoldings * 0.005)
self.Error("{0} - OnMarginCallWarning(): Liquidating {1} shares of SPY to avoid margin call.".format(self.Time, shares))
self.MarketOrder("SPY", shares)
| 50.724638
| 173
| 0.724286
|
6ef1a4f9e1afd2418cdb813a6b5b028f267f82f6
| 11,923
|
py
|
Python
|
discord/components.py
|
rldnyt/discord.pyc
|
e9d190b79ce6df798f144d1abea19f863e3fdbb9
|
[
"MIT"
] | 23
|
2021-08-28T10:14:19.000Z
|
2021-12-24T15:10:58.000Z
|
discord/components.py
|
Sengolda/discord.py
|
5cd31dd6c2fe1a7b5d9538ff949fc371e92ec26b
|
[
"MIT"
] | 2
|
2021-08-31T08:16:17.000Z
|
2021-08-31T15:21:40.000Z
|
discord/components.py
|
Sengolda/discord.py
|
5cd31dd6c2fe1a7b5d9538ff949fc371e92ec26b
|
[
"MIT"
] | 3
|
2021-08-31T07:37:40.000Z
|
2021-09-14T11:59:47.000Z
|
"""
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
from typing import (
Any,
ClassVar,
Dict,
List,
Optional,
TYPE_CHECKING,
Tuple,
Type,
TypeVar,
Union,
)
from .enums import try_enum, ComponentType, ButtonStyle
from .utils import get_slots, MISSING
from .partial_emoji import PartialEmoji, _EmojiTag
if TYPE_CHECKING:
from .types.components import (
Component as ComponentPayload,
ButtonComponent as ButtonComponentPayload,
SelectMenu as SelectMenuPayload,
SelectOption as SelectOptionPayload,
ActionRow as ActionRowPayload,
)
from .emoji import Emoji
__all__ = (
"Component",
"ActionRow",
"Button",
"SelectMenu",
"SelectOption",
)
C = TypeVar("C", bound="Component")
class Component:
"""Represents a Discord Bot UI Kit Component.
Currently, the only components supported by Discord are:
- :class:`ActionRow`
- :class:`Button`
- :class:`SelectMenu`
This class is abstract and cannot be instantiated.
.. versionadded:: 2.0
Attributes
------------
type: :class:`ComponentType`
The type of component.
"""
__slots__: Tuple[str, ...] = ("type",)
__repr_info__: ClassVar[Tuple[str, ...]]
type: ComponentType
def __repr__(self) -> str:
attrs = " ".join(f"{key}={getattr(self, key)!r}" for key in self.__repr_info__)
return f"<{self.__class__.__name__} {attrs}>"
@classmethod
def _raw_construct(cls: Type[C], **kwargs) -> C:
self: C = cls.__new__(cls)
for slot in get_slots(cls):
try:
value = kwargs[slot]
except KeyError:
pass
else:
setattr(self, slot, value)
return self
def to_dict(self) -> Dict[str, Any]:
raise NotImplementedError
class ActionRow(Component):
"""Represents a Discord Bot UI Kit Action Row.
This is a component that holds up to 5 children components in a row.
This inherits from :class:`Component`.
.. versionadded:: 2.0
Attributes
------------
type: :class:`ComponentType`
The type of component.
children: List[:class:`Component`]
The children components that this holds, if any.
"""
__slots__: Tuple[str, ...] = ("children",)
__repr_info__: ClassVar[Tuple[str, ...]] = __slots__
def __init__(self, data: ComponentPayload):
self.type: ComponentType = try_enum(ComponentType, data["type"])
self.children: List[Component] = [
_component_factory(d) for d in data.get("components", [])
]
def to_dict(self) -> ActionRowPayload:
return {
"type": int(self.type),
"components": [child.to_dict() for child in self.children],
} # type: ignore
class Button(Component):
"""Represents a button from the Discord Bot UI Kit.
This inherits from :class:`Component`.
.. note::
The user constructible and usable type to create a button is :class:`discord.ui.Button`
not this one.
.. versionadded:: 2.0
Attributes
-----------
style: :class:`.ButtonStyle`
The style of the button.
custom_id: Optional[:class:`str`]
The ID of the button that gets received during an interaction.
If this button is for a URL, it does not have a custom ID.
url: Optional[:class:`str`]
The URL this button sends you to.
disabled: :class:`bool`
Whether the button is disabled or not.
label: Optional[:class:`str`]
The label of the button, if any.
emoji: Optional[:class:`PartialEmoji`]
The emoji of the button, if available.
"""
__slots__: Tuple[str, ...] = (
"style",
"custom_id",
"url",
"disabled",
"label",
"emoji",
)
__repr_info__: ClassVar[Tuple[str, ...]] = __slots__
def __init__(self, data: ButtonComponentPayload):
self.type: ComponentType = try_enum(ComponentType, data["type"])
self.style: ButtonStyle = try_enum(ButtonStyle, data["style"])
self.custom_id: Optional[str] = data.get("custom_id")
self.url: Optional[str] = data.get("url")
self.disabled: bool = data.get("disabled", False)
self.label: Optional[str] = data.get("label")
self.emoji: Optional[PartialEmoji]
try:
self.emoji = PartialEmoji.from_dict(data["emoji"])
except KeyError:
self.emoji = None
def to_dict(self) -> ButtonComponentPayload:
payload = {
"type": 2,
"style": int(self.style),
"label": self.label,
"disabled": self.disabled,
}
if self.custom_id:
payload["custom_id"] = self.custom_id
if self.url:
payload["url"] = self.url
if self.emoji:
payload["emoji"] = self.emoji.to_dict()
return payload # type: ignore
class SelectMenu(Component):
"""Represents a select menu from the Discord Bot UI Kit.
A select menu is functionally the same as a dropdown, however
on mobile it renders a bit differently.
.. note::
The user constructible and usable type to create a select menu is
:class:`discord.ui.Select` not this one.
.. versionadded:: 2.0
Attributes
------------
custom_id: Optional[:class:`str`]
The ID of the select menu that gets received during an interaction.
placeholder: Optional[:class:`str`]
The placeholder text that is shown if nothing is selected, if any.
min_values: :class:`int`
The minimum number of items that must be chosen for this select menu.
Defaults to 1 and must be between 1 and 25.
max_values: :class:`int`
The maximum number of items that must be chosen for this select menu.
Defaults to 1 and must be between 1 and 25.
options: List[:class:`SelectOption`]
A list of options that can be selected in this menu.
disabled: :class:`bool`
Whether the select is disabled or not.
"""
__slots__: Tuple[str, ...] = (
"custom_id",
"placeholder",
"min_values",
"max_values",
"options",
"disabled",
)
__repr_info__: ClassVar[Tuple[str, ...]] = __slots__
def __init__(self, data: SelectMenuPayload):
self.type = ComponentType.select
self.custom_id: str = data["custom_id"]
self.placeholder: Optional[str] = data.get("placeholder")
self.min_values: int = data.get("min_values", 1)
self.max_values: int = data.get("max_values", 1)
self.options: List[SelectOption] = [
SelectOption.from_dict(option) for option in data.get("options", [])
]
self.disabled: bool = data.get("disabled", False)
def to_dict(self) -> SelectMenuPayload:
payload: SelectMenuPayload = {
"type": self.type.value,
"custom_id": self.custom_id,
"min_values": self.min_values,
"max_values": self.max_values,
"options": [op.to_dict() for op in self.options],
"disabled": self.disabled,
}
if self.placeholder:
payload["placeholder"] = self.placeholder
return payload
class SelectOption:
"""Represents a select menu's option.
These can be created by users.
.. versionadded:: 2.0
Attributes
-----------
label: :class:`str`
The label of the option. This is displayed to users.
Can only be up to 100 characters.
value: :class:`str`
The value of the option. This is not displayed to users.
If not provided when constructed then it defaults to the
label. Can only be up to 100 characters.
description: Optional[:class:`str`]
An additional description of the option, if any.
Can only be up to 100 characters.
emoji: Optional[Union[:class:`str`, :class:`Emoji`, :class:`PartialEmoji`]]
The emoji of the option, if available.
default: :class:`bool`
Whether this option is selected by default.
"""
__slots__: Tuple[str, ...] = (
"label",
"value",
"description",
"emoji",
"default",
)
def __init__(
self,
*,
label: str,
value: str = MISSING,
description: Optional[str] = None,
emoji: Optional[Union[str, Emoji, PartialEmoji]] = None,
default: bool = False,
) -> None:
self.label = label
self.value = label if value is MISSING else value
self.description = description
if emoji is not None:
if isinstance(emoji, str):
emoji = PartialEmoji.from_str(emoji)
elif isinstance(emoji, _EmojiTag):
emoji = emoji._to_partial()
else:
raise TypeError(
f"expected emoji to be str, Emoji, or PartialEmoji not {emoji.__class__}"
)
self.emoji = emoji
self.default = default
def __repr__(self) -> str:
return (
f"<SelectOption label={self.label!r} value={self.value!r} description={self.description!r} "
f"emoji={self.emoji!r} default={self.default!r}>"
)
def __str__(self) -> str:
if self.emoji:
base = f"{self.emoji} {self.label}"
else:
base = self.label
if self.description:
return f"{base}\n{self.description}"
return base
@classmethod
def from_dict(cls, data: SelectOptionPayload) -> SelectOption:
try:
emoji = PartialEmoji.from_dict(data["emoji"])
except KeyError:
emoji = None
return cls(
label=data["label"],
value=data["value"],
description=data.get("description"),
emoji=emoji,
default=data.get("default", False),
)
def to_dict(self) -> SelectOptionPayload:
payload: SelectOptionPayload = {
"label": self.label,
"value": self.value,
"default": self.default,
}
if self.emoji:
payload["emoji"] = self.emoji.to_dict() # type: ignore
if self.description:
payload["description"] = self.description
return payload
def _component_factory(data: ComponentPayload) -> Component:
component_type = data["type"]
if component_type == 1:
return ActionRow(data)
elif component_type == 2:
return Button(data) # type: ignore
elif component_type == 3:
return SelectMenu(data) # type: ignore
else:
as_enum = try_enum(ComponentType, component_type)
return Component._raw_construct(type=as_enum)
| 29.733167
| 104
| 0.610249
|
60eb4ee0ef17e0cd9c15fd42ca2b7c15bc3eb40a
| 325
|
py
|
Python
|
ulinalg/tests/test_torch.py
|
MridulS/uarray
|
09d6ad25eebd5547ac30bd3b5c93b10697156413
|
[
"BSD-3-Clause"
] | null | null | null |
ulinalg/tests/test_torch.py
|
MridulS/uarray
|
09d6ad25eebd5547ac30bd3b5c93b10697156413
|
[
"BSD-3-Clause"
] | null | null | null |
ulinalg/tests/test_torch.py
|
MridulS/uarray
|
09d6ad25eebd5547ac30bd3b5c93b10697156413
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
import ulinalg as ula
import ulinalg.torch_backend
torch = pytest.importorskip('torch')
def test_svd():
arr = torch.eye(5)
assert all(isinstance(obj, torch.Tensor) for obj in ula.svd(arr))
def test_svd2():
arr = torch.eye(5)
assert isinstance(ula.svd(arr, compute_uv=False), torch.Tensor)
| 19.117647
| 69
| 0.713846
|
90ad56b779801644c87811afa95f8954b3f6ea39
| 89
|
py
|
Python
|
genius_purchase/__init__.py
|
antoniodavid/addons_genius
|
9838ee523e39b3db22a3c03016100d7696403d48
|
[
"MIT"
] | 1
|
2019-02-10T21:29:14.000Z
|
2019-02-10T21:29:14.000Z
|
genius_purchase/__init__.py
|
antoniodavid/addons_genius
|
9838ee523e39b3db22a3c03016100d7696403d48
|
[
"MIT"
] | null | null | null |
genius_purchase/__init__.py
|
antoniodavid/addons_genius
|
9838ee523e39b3db22a3c03016100d7696403d48
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from . import api
from . import controllers
from . import models
| 17.8
| 25
| 0.674157
|
5932f469d03f4d1a13f57854e2cf399e357a6797
| 7,755
|
py
|
Python
|
examples/asr/emformer_rnnt/mustc/lightning.py
|
LaudateCorpus1/audio
|
a007e922d34028270197c0549bf452b79499d039
|
[
"BSD-2-Clause"
] | null | null | null |
examples/asr/emformer_rnnt/mustc/lightning.py
|
LaudateCorpus1/audio
|
a007e922d34028270197c0549bf452b79499d039
|
[
"BSD-2-Clause"
] | null | null | null |
examples/asr/emformer_rnnt/mustc/lightning.py
|
LaudateCorpus1/audio
|
a007e922d34028270197c0549bf452b79499d039
|
[
"BSD-2-Clause"
] | null | null | null |
from functools import partial
from typing import List
import sentencepiece as spm
import torch
import torchaudio
from common import (
Batch,
FunctionalModule,
GlobalStatsNormalization,
WarmupLR,
batch_by_token_count,
piecewise_linear_log,
post_process_hypos,
spectrogram_transform,
)
from pytorch_lightning import LightningModule
from torchaudio.models import RNNTBeamSearch, emformer_rnnt_base
from .dataset import MUSTC
class CustomDataset(torch.utils.data.Dataset):
r"""Sort samples by target length and batch to max token count."""
def __init__(self, base_dataset, max_token_limit, max_len):
super().__init__()
self.base_dataset = base_dataset
idx_target_lengths = self.base_dataset.idx_target_lengths
idx_target_lengths = [ele for ele in idx_target_lengths if ele[1] <= max_len]
idx_target_lengths = sorted(idx_target_lengths, key=lambda x: x[1])
self.batches = batch_by_token_count(idx_target_lengths, max_token_limit)
def __getitem__(self, idx):
return [self.base_dataset[subidx] for subidx in self.batches[idx]]
def __len__(self):
return len(self.batches)
class MuSTCRNNTModule(LightningModule):
def __init__(
self,
*,
mustc_path: str,
sp_model_path: str,
global_stats_path: str,
):
super().__init__()
self.model = emformer_rnnt_base(num_symbols=501)
self.loss = torchaudio.transforms.RNNTLoss(reduction="mean", clamp=1.0)
self.optimizer = torch.optim.Adam(self.model.parameters(), lr=5e-4, betas=(0.9, 0.999), eps=1e-8)
self.warmup_lr_scheduler = WarmupLR(self.optimizer, 10000)
self.train_data_pipeline = torch.nn.Sequential(
FunctionalModule(piecewise_linear_log),
GlobalStatsNormalization(global_stats_path),
FunctionalModule(partial(torch.transpose, dim0=1, dim1=2)),
torchaudio.transforms.FrequencyMasking(27),
torchaudio.transforms.FrequencyMasking(27),
torchaudio.transforms.TimeMasking(100, p=0.2),
torchaudio.transforms.TimeMasking(100, p=0.2),
FunctionalModule(partial(torch.nn.functional.pad, pad=(0, 4))),
FunctionalModule(partial(torch.transpose, dim0=1, dim1=2)),
)
self.valid_data_pipeline = torch.nn.Sequential(
FunctionalModule(piecewise_linear_log),
GlobalStatsNormalization(global_stats_path),
FunctionalModule(partial(torch.transpose, dim0=1, dim1=2)),
FunctionalModule(partial(torch.nn.functional.pad, pad=(0, 4))),
FunctionalModule(partial(torch.transpose, dim0=1, dim1=2)),
)
self.mustc_path = mustc_path
self.sp_model = spm.SentencePieceProcessor(model_file=sp_model_path)
self.blank_idx = self.sp_model.get_piece_size()
def _extract_labels(self, samples: List):
"""Convert text transcript into int labels."""
targets = [self.sp_model.encode(sample[1]) for sample in samples]
lengths = torch.tensor([len(elem) for elem in targets]).to(dtype=torch.int32)
targets = torch.nn.utils.rnn.pad_sequence(
[torch.tensor(elem) for elem in targets],
batch_first=True,
padding_value=1.0,
).to(dtype=torch.int32)
return targets, lengths
def _train_extract_features(self, samples: List):
mel_features = [spectrogram_transform(sample[0].squeeze()).transpose(1, 0) for sample in samples]
features = torch.nn.utils.rnn.pad_sequence(mel_features, batch_first=True)
features = self.train_data_pipeline(features)
lengths = torch.tensor([elem.shape[0] for elem in mel_features], dtype=torch.int32)
return features, lengths
def _valid_extract_features(self, samples: List):
mel_features = [spectrogram_transform(sample[0].squeeze()).transpose(1, 0) for sample in samples]
features = torch.nn.utils.rnn.pad_sequence(mel_features, batch_first=True)
features = self.valid_data_pipeline(features)
lengths = torch.tensor([elem.shape[0] for elem in mel_features], dtype=torch.int32)
return features, lengths
def _train_collate_fn(self, samples: List):
features, feature_lengths = self._train_extract_features(samples)
targets, target_lengths = self._extract_labels(samples)
return Batch(features, feature_lengths, targets, target_lengths)
def _valid_collate_fn(self, samples: List):
features, feature_lengths = self._valid_extract_features(samples)
targets, target_lengths = self._extract_labels(samples)
return Batch(features, feature_lengths, targets, target_lengths)
def _test_collate_fn(self, samples: List):
return self._valid_collate_fn(samples), [sample[1] for sample in samples]
def _step(self, batch, batch_idx, step_type):
if batch is None:
return None
prepended_targets = batch.targets.new_empty([batch.targets.size(0), batch.targets.size(1) + 1])
prepended_targets[:, 1:] = batch.targets
prepended_targets[:, 0] = self.blank_idx
prepended_target_lengths = batch.target_lengths + 1
output, src_lengths, _, _ = self.model(
batch.features,
batch.feature_lengths,
prepended_targets,
prepended_target_lengths,
)
loss = self.loss(output, batch.targets, src_lengths, batch.target_lengths)
self.log(f"Losses/{step_type}_loss", loss, on_step=True, on_epoch=True)
return loss
def configure_optimizers(self):
return (
[self.optimizer],
[
{"scheduler": self.warmup_lr_scheduler, "interval": "step"},
],
)
def forward(self, batch: Batch):
decoder = RNNTBeamSearch(self.model, self.blank_idx)
hypotheses = decoder(batch.features.to(self.device), batch.feature_lengths.to(self.device), 20)
return post_process_hypos(hypotheses, self.sp_model)[0][0]
def training_step(self, batch: Batch, batch_idx):
return self._step(batch, batch_idx, "train")
def validation_step(self, batch, batch_idx):
return self._step(batch, batch_idx, "val")
def test_step(self, batch_tuple, batch_idx):
return self._step(batch_tuple[0], batch_idx, "test")
def train_dataloader(self):
dataset = CustomDataset(MUSTC(self.mustc_path, subset="train"), 100, 20)
dataloader = torch.utils.data.DataLoader(
dataset,
batch_size=None,
collate_fn=self._train_collate_fn,
num_workers=10,
shuffle=True,
)
return dataloader
def val_dataloader(self):
dataset = CustomDataset(MUSTC(self.mustc_path, subset="dev"), 100, 20)
dataloader = torch.utils.data.DataLoader(
dataset,
batch_size=None,
collate_fn=self._valid_collate_fn,
num_workers=10,
)
return dataloader
def test_common_dataloader(self):
dataset = MUSTC(self.mustc_path, subset="tst-COMMON")
dataloader = torch.utils.data.DataLoader(dataset, batch_size=1, collate_fn=self._test_collate_fn)
return dataloader
def test_he_dataloader(self):
dataset = MUSTC(self.mustc_path, subset="tst-HE")
dataloader = torch.utils.data.DataLoader(dataset, batch_size=1, collate_fn=self._test_collate_fn)
return dataloader
def dev_dataloader(self):
dataset = MUSTC(self.mustc_path, subset="dev")
dataloader = torch.utils.data.DataLoader(dataset, batch_size=1, collate_fn=self._test_collate_fn)
return dataloader
| 39.974227
| 105
| 0.674404
|
6a62b1244a102629d257be1553abb87512d886eb
| 22,875
|
py
|
Python
|
ckanext/issues/controller/controller.py
|
rhabbachi/ckanext-issues
|
d8c3dde8372e88dd5dc173023df34c90034ca777
|
[
"MIT"
] | null | null | null |
ckanext/issues/controller/controller.py
|
rhabbachi/ckanext-issues
|
d8c3dde8372e88dd5dc173023df34c90034ca777
|
[
"MIT"
] | null | null | null |
ckanext/issues/controller/controller.py
|
rhabbachi/ckanext-issues
|
d8c3dde8372e88dd5dc173023df34c90034ca777
|
[
"MIT"
] | null | null | null |
import collections
from logging import getLogger
import re
from sqlalchemy import func
from pylons.i18n import _
from pylons import request, config, tmpl_context as c
from ckan.lib.base import BaseController, render, abort
import ckan.lib.helpers as h
from ckan.lib import mailer
import ckan.model as model
import ckan.logic as logic
import ckan.plugins as p
from ckan.plugins import toolkit
import ckanext.issues.model as issuemodel
from ckanext.issues.controller import show
from ckanext.issues.exception import ReportAlreadyExists
from ckanext.issues.lib import helpers as issues_helpers
from ckanext.issues.logic import schema
from ckanext.issues.lib.helpers import (Pagination, get_issues_per_page,
get_issue_subject)
log = getLogger(__name__)
AUTOCOMPLETE_LIMIT = 10
VALID_CATEGORY = re.compile(r"[0-9a-z\-\._]+")
ISSUES_PER_PAGE = (15, 30, 50)
class IssueController(BaseController):
def _before_dataset(self, dataset_id):
'''Returns the dataset dict and checks issues are enabled for it.'''
self.context = {'for_view': True}
try:
pkg = logic.get_action('package_show')(self.context,
{'id': dataset_id})
# need this as some templates in core explicitly reference
# c.pkg_dict
c.pkg = pkg
c.pkg_dict = c.pkg
# keep the above lines to keep current code working till it's all
# refactored out, otherwise, we should pass pkg as an extra_var
# directly that's returned from this function
if not issues_helpers.issues_enabled(pkg):
abort(404, _('Issues have not been enabled for this dataset'))
return pkg
except logic.NotFound:
abort(404, _('Dataset not found'))
except p.toolkit.NotAuthorized:
p.toolkit.abort(401,
_('Unauthorized to view issues for this dataset'))
def _before_org(self, org_id):
'''Returns the organization dict and checks issues are enabled for it.'''
self.context = {'for_view': True}
try:
org = logic.get_action('organization_show')(self.context,
{'id': org_id})
# we should pass org to the template as an extra_var
# directly that's returned from this function
if not issues_helpers.issues_enabled_for_organization(org):
abort(404, _('Issues have not been enabled for this organization'))
return org
except logic.NotFound:
abort(404, _('Dataset not found'))
except p.toolkit.NotAuthorized:
p.toolkit.abort(401,
_('Unauthorized to view issues for this organization'))
def new(self, dataset_id, resource_id=None):
dataset_dict = self._before_dataset(dataset_id)
if not c.user:
abort(401, _('Please login to add a new issue'))
data_dict = {
'dataset_id': dataset_dict['id'],
'creator_id': c.userobj.id
}
try:
logic.check_access('issue_create', self.context, data_dict)
except logic.NotAuthorized:
abort(401, _('Not authorized to add a new issue'))
resource = model.Resource.get(resource_id) if resource_id else None
if resource:
data_dict['resource_id'] = resource.id
c.errors, c.error_summary = {}, {}
if request.method == 'POST':
# TODO: ? use dictization etc
# data = logic.clean_dict(
# df.unflatten(
# logic.tuplize_dict(
# logic.parse_params(request.params))))
data_dict.update({
'title': request.POST.get('title'),
'description': request.POST.get('description')
})
if not data_dict['title']:
c.error_summary[_('title')] = [_("Please enter a title")]
c.errors = c.error_summary
if not c.error_summary: # save and redirect
issue_dict = logic.get_action('issue_create')(
data_dict=data_dict
)
h.flash_success(_('Your issue has been registered, '
'thank you for the feedback'))
p.toolkit.redirect_to(
'issues_show',
dataset_id=dataset_dict['name'],
issue_number=issue_dict['number'])
c.data_dict = data_dict
return render("issues/add.html")
def show(self, issue_number, dataset_id):
dataset = self._before_dataset(dataset_id)
try:
extra_vars = show.show(issue_number,
dataset_id,
session=model.Session)
except toolkit.ValidationError, e:
p.toolkit.abort(
404, toolkit._(u'Issue not found: {0}').format(e.error_summary))
except toolkit.ObjectNotFound, e:
p.toolkit.abort(
404, toolkit._(u'Issue not found: {0}').format(e))
extra_vars['dataset'] = dataset
return p.toolkit.render('issues/show.html', extra_vars=extra_vars)
def edit(self, dataset_id, issue_number):
self._before_dataset(dataset_id)
issue = p.toolkit.get_action('issue_show')(
data_dict={
'issue_number': issue_number,
'dataset_id': dataset_id,
}
)
if request.method == 'GET':
return p.toolkit.render(
'issues/edit.html',
extra_vars={
'issue': issue,
'errors': None,
},
)
elif request.method == 'POST':
data_dict = dict(request.params)
data_dict['issue_number'] = issue_number
data_dict['dataset_id'] = dataset_id
try:
p.toolkit.get_action('issue_update')(data_dict=data_dict)
return p.toolkit.redirect_to('issues_show',
issue_number=issue_number,
dataset_id=dataset_id)
except p.toolkit.ValidationError, e:
errors = e.error_dict
return p.toolkit.render(
'issues/edit.html',
extra_vars={
'issue': issue,
'errors': errors,
},
)
except p.toolkit.NotAuthorized, e:
p.toolkit.abort(401, e.message)
def comments(self, dataset_id, issue_number):
# POST only
if request.method != 'POST':
abort(500, _('Invalid request'))
dataset = self._before_dataset(dataset_id)
auth_dict = {
'dataset_id': c.pkg['id'],
'issue_number': issue_number
}
# Are we not repeating stuff in logic ???
try:
logic.check_access('issue_create', self.context, auth_dict)
except logic.NotAuthorized:
abort(401, _('Not authorized'))
next_url = h.url_for('issues_show',
dataset_id=c.pkg['name'],
issue_number=issue_number)
# TODO: (?) move validation somewhere better than controller
comment = request.POST.get('comment')
if not comment or comment.strip() == '':
h.flash_error(_('Comment cannot be empty'))
p.toolkit.redirect_to(next_url)
return
# do this first because will error here if not allowed and do not want
# comment created in that case
if 'close' in request.POST or 'reopen' in request.POST:
status = (issuemodel.ISSUE_STATUS.closed if 'close' in request.POST
else issuemodel.ISSUE_STATUS.open)
issue_dict = {
'issue_number': issue_number,
'dataset_id': dataset['id'],
'status': status
}
try:
logic.get_action('issue_update')(self.context, issue_dict)
except p.toolkit.NotAuthorized as e:
p.toolkit.abort(401, e.message)
if 'close' in request.POST:
h.flash_success(_("Issue closed"))
else:
h.flash_success(_("Issue re-opened"))
data_dict = {
'author_id': c.userobj.id,
'comment': comment.strip(),
'dataset_id': dataset['id'],
'issue_number': issue_number,
}
logic.get_action('issue_comment_create')(self.context, data_dict)
p.toolkit.redirect_to(next_url)
def dataset(self, dataset_id):
"""
Display a page containing a list of all issues items for a dataset,
sorted by category.
"""
pkg_dict = self._before_dataset(dataset_id)
try:
extra_vars = issues_for_dataset(dataset_id, request.GET)
log.debug(extra_vars);
except toolkit.ValidationError, e:
_dataset_handle_error(dataset_id, e)
extra_vars['pkg_dict'] = pkg_dict
return render("issues/dataset.html", extra_vars=extra_vars)
def delete(self, dataset_id, issue_number):
dataset = self._before_dataset(dataset_id)
if 'cancel' in request.params:
p.toolkit.redirect_to('issues_show',
dataset_id=dataset_id,
issue_number=issue_number)
if request.method == 'POST':
try:
toolkit.get_action('issue_delete')(
data_dict={'issue_number': issue_number,
'dataset_id': dataset_id}
)
except toolkit.NotAuthorized:
msg = _(u'Unauthorized to delete issue {0}').format(
issue_number)
toolkit.abort(401, msg)
h.flash_notice(_(u'Issue has been deleted.'))
p.toolkit.redirect_to('issues_dataset', dataset_id=dataset_id)
else:
return render('issues/confirm_delete.html',
extra_vars={
'issue_number': issue_number,
'pkg': dataset,
})
def assign(self, dataset_id, issue_number):
dataset = self._before_dataset(dataset_id)
if request.method == 'POST':
try:
assignee_id = request.POST.get('assignee')
assignee = toolkit.get_action('user_show')(
data_dict={'id': assignee_id})
except toolkit.ObjectNotFound:
h.flash_error(_(u'User {0} does not exist').format(assignee_id))
return p.toolkit.redirect_to('issues_show',
issue_number=issue_number,
dataset_id=dataset_id)
try:
issue = toolkit.get_action('issue_update')(
data_dict={
'issue_number': issue_number,
'assignee_id': assignee['id'],
'dataset_id': dataset_id
}
)
notifications = p.toolkit.asbool(
config.get('ckanext.issues.send_email_notifications')
)
if notifications:
subject = get_issue_subject(issue)
msg = toolkit._("Assigned to %s")
body = msg % assignee['display_name']
user_obj = model.User.get(assignee_id)
try:
mailer.mail_user(user_obj, subject, body)
except mailer.MailerException, e:
log.debug(e.message)
except toolkit.NotAuthorized:
msg = _(u'Unauthorized to assign users to issue')
toolkit.abort(401, msg)
except toolkit.ValidationError, e:
toolkit.abort(404)
return p.toolkit.redirect_to('issues_show',
issue_number=issue_number,
dataset_id=dataset_id)
def report(self, dataset_id, issue_number):
dataset = self._before_dataset(dataset_id)
if request.method == 'POST':
if not c.user:
msg = _('You must be logged in to report issues')
toolkit.abort(401, msg)
try:
report_info = toolkit.get_action('issue_report')(
data_dict={
'issue_number': issue_number,
'dataset_id': dataset_id
}
)
if report_info:
# we have this info if it is an admin
msgs = [_('Report acknowledged.')]
if report_info['abuse_status'] == \
issuemodel.AbuseStatus.abuse.value:
msgs.append(_('Marked as abuse/spam.'))
msgs.append(_('Issue is visible.')
if report_info['visibility'] == 'visible' else
_('Issue is invisible to normal users.'))
h.flash_success(' '.join(msgs))
else:
h.flash_success(_('Issue reported to an administrator'))
except toolkit.ValidationError:
toolkit.abort(404)
except toolkit.ObjectNotFound:
toolkit.abort(404)
except ReportAlreadyExists, e:
h.flash_error(e.message)
p.toolkit.redirect_to('issues_show',
dataset_id=dataset_id,
issue_number=issue_number)
def report_comment(self, dataset_id, issue_number, comment_id):
dataset = self._before_dataset(dataset_id)
if request.method == 'POST':
if not c.user:
msg = _('You must be logged in to report comments')
toolkit.abort(401, msg)
try:
report_info = toolkit.get_action('issue_comment_report')(
data_dict={
'comment_id': comment_id,
'issue_number': issue_number,
'dataset_id': dataset_id
}
)
if report_info:
# we have this info if it is an admin
msgs = [_('Report acknowledged.')]
if report_info['abuse_status'] == \
issuemodel.AbuseStatus.abuse.value:
msgs.append(_('Marked as abuse/spam.'))
msgs.append(_('Comment is visible.')
if report_info['visibility'] == 'visible' else
_('Comment is invisible to normal users.'))
h.flash_success(' '.join(msgs))
else:
h.flash_success(_('Comment has been reported to an administrator'))
p.toolkit.redirect_to('issues_show',
dataset_id=dataset_id,
issue_number=issue_number)
except toolkit.ValidationError:
toolkit.abort(404)
except toolkit.ObjectNotFound:
toolkit.abort(404)
except ReportAlreadyExists, e:
h.flash_error(e.message)
p.toolkit.redirect_to('issues_show', dataset_id=dataset_id,
issue_number=issue_number)
def report_clear(self, dataset_id, issue_number):
dataset = self._before_dataset(dataset_id)
if request.method == 'POST':
try:
toolkit.get_action('issue_report_clear')(
data_dict={
'issue_number': issue_number,
'dataset_id': dataset_id
}
)
h.flash_success(_('Issue report cleared'))
p.toolkit.redirect_to('issues_show',
dataset_id=dataset_id,
issue_number=issue_number)
except toolkit.NotAuthorized:
msg = _(u'You must be logged in clear abuse reports')
toolkit.abort(401, msg)
except toolkit.ValidationError:
toolkit.abort(404)
except toolkit.ObjectNotFound:
toolkit.abort(404)
def comment_report_clear(self, dataset_id, issue_number, comment_id):
dataset = self._before_dataset(dataset_id)
if request.method == 'POST':
try:
toolkit.get_action('issue_comment_report_clear')(
data_dict={'comment_id': comment_id,
'issue_number': issue_number,
'dataset_id': dataset_id}
)
h.flash_success(_('Spam/abuse report cleared'))
p.toolkit.redirect_to('issues_show',
dataset_id=dataset_id,
issue_number=issue_number)
except toolkit.NotAuthorized:
msg = _(u'You must be logged in to clear abuse reports')
toolkit.abort(401, msg)
except toolkit.ValidationError:
toolkit.abort(404)
except toolkit.ObjectNotFound:
toolkit.abort(404)
def issues_for_organization(self, org_id):
"""
Display a page containing a list of all issues for a given organization
"""
self._before_org(org_id)
try:
template_params = issues_for_org(org_id, request.GET)
except toolkit.ValidationError, e:
msg = toolkit._(u'Validation error: {0}').format(e.error_summary)
log.warning(msg + u' - Issues for org: %s', org_id)
h.flash(msg, category='alert-error')
return p.toolkit.redirect_to('issues_for_organization',
org_id=org_id)
return render("issues/organization_issues.html",
extra_vars=template_params)
# TO DELETE
c.org = model.Group.get(org_id)
q = """
SELECT table_id
FROM member
WHERE group_id='{gid}'
AND table_name='package'
AND state='active'
""".format(gid=c.org.id)
results = model.Session.execute(q)
dataset_ids = [x['table_id'] for x in results]
issues = model.Session.query(issuemodel.Issue)\
.filter(issuemodel.Issue.dataset_id.in_(dataset_ids))\
.order_by(issuemodel.Issue.created.desc())
c.results = collections.defaultdict(list)
for issue in issues:
c.results[issue.package].append(issue)
c.package_set = sorted(set(c.results.keys()), key=lambda x: x.title)
return render("issues/organization_issues.html")
def all_issues_page(self):
"""
Display a page containing a list of all issues items
"""
template_params = all_issues(request.GET)
return render("issues/all_issues.html", extra_vars=template_params)
def _dataset_handle_error(dataset_id, exc):
msg = toolkit._(u'Validation error: {0}').format(exc.error_summary)
h.flash(msg, category='alert-error')
return p.toolkit.redirect_to('issues_dataset', dataset_id=dataset_id)
def issues_for_dataset(dataset_id, get_query_dict):
query, errors = toolkit.navl_validate(
dict(get_query_dict),
schema.issue_dataset_controller_schema()
)
if errors:
raise toolkit.ValidationError(errors)
query.pop('__extras', None)
return _search_issues(dataset_id=dataset_id, **query)
def issues_for_org(org_id, get_query_dict):
query, errors = toolkit.navl_validate(
dict(get_query_dict),
schema.issue_dataset_controller_schema()
)
if errors:
raise toolkit.ValidationError(errors)
query.pop('__extras', None)
template_params = _search_issues(organization_id=org_id,
include_datasets=True,
**query)
template_params['org'] = \
logic.get_action('organization_show')({}, {'id': org_id})
return template_params
def all_issues(get_query_dict):
query, errors = toolkit.navl_validate(
dict(get_query_dict),
schema.issue_dataset_controller_schema()
)
if errors:
raise toolkit.ValidationError(errors)
query.pop('__extras', None)
return _search_issues(include_datasets=True,
**query)
def _search_issues(dataset_id=None,
organization_id=None,
status=issuemodel.ISSUE_STATUS.open,
sort='newest',
visibility=None,
abuse_status=None,
q='',
page=1,
per_page=get_issues_per_page()[0],
include_datasets=False,
include_reports=True):
# use the function params to set default for our arguments to our
# data_dict if needed
params = locals().copy()
# convert per_page, page parameters to api limit/offset
limit = per_page
offset = (page - 1) * limit
params.pop('page', None)
params.pop('per_page', None)
# fetch only the results for the current page
params.update({
'include_count': False,
'limit': limit,
'offset': offset,
})
results_for_current_page = toolkit.get_action('issue_search')(
data_dict=params
)
issues = results_for_current_page['results']
# fetch the total count of all the search results without dictizing
params['include_count'] = True
params['include_results'] = False
params.pop('limit', None)
params.pop('offset', None)
all_search_results = toolkit.get_action('issue_search')(data_dict=params)
issue_count = all_search_results['count']
pagination = Pagination(page, limit, issue_count)
template_variables = {
'issues': issues,
'status': status,
'sort': sort,
'q': q,
'pagination': pagination,
}
if visibility:
template_variables['visibility'] = visibility
return template_variables
| 39.439655
| 87
| 0.541727
|
60f808dd83f212992375ab334aa1635ed70aaeba
| 403
|
py
|
Python
|
coding_challenge/ship_manager/admin.py
|
jojacobsen/coding_challenge
|
94335f00f57a6c4d64cbc2b282a0ca099445e866
|
[
"MIT"
] | 1
|
2022-03-06T15:40:56.000Z
|
2022-03-06T15:40:56.000Z
|
coding_challenge/ship_manager/admin.py
|
jojacobsen/coding_challenge
|
94335f00f57a6c4d64cbc2b282a0ca099445e866
|
[
"MIT"
] | null | null | null |
coding_challenge/ship_manager/admin.py
|
jojacobsen/coding_challenge
|
94335f00f57a6c4d64cbc2b282a0ca099445e866
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from coding_challenge.ship_manager.models import Ship
@admin.register(Ship)
class ShipAdmin(admin.ModelAdmin):
"""
Register the Ship model within the admin interface,
for easy maintenance and manual object creation.
"""
list_display = ["code", "name", "length", "width"]
search_fields = ["name", "code"]
list_filter = ["length", "width"]
| 25.1875
| 55
| 0.694789
|
079668f6b5a9c6adf905f75543b3c3a3bd83b8f9
| 266
|
py
|
Python
|
inventory/fill_db.py
|
Tallot/FilmStore
|
ca4ca749397470fee204838790266a9dc60f578f
|
[
"MIT"
] | null | null | null |
inventory/fill_db.py
|
Tallot/FilmStore
|
ca4ca749397470fee204838790266a9dc60f578f
|
[
"MIT"
] | 16
|
2020-04-22T15:24:18.000Z
|
2021-06-10T22:55:58.000Z
|
inventory/fill_db.py
|
Tallot/FilmStore
|
ca4ca749397470fee204838790266a9dc60f578f
|
[
"MIT"
] | null | null | null |
import json
import django
django.setup()
from service_app.models import Film
with open('fixtures/mongo_init_data.json') as data_file:
json_data = json.loads(data_file.read())
for film_data in json_data:
film = Film(**film_data)
film.save()
| 22.166667
| 56
| 0.710526
|
4df5ad99c7bc3e967cb51970fc520f9a7d0f100f
| 7,255
|
py
|
Python
|
django_cas_ng/views.py
|
clavay/django-cas-ng
|
a0ef1fdf1667e1d9439657fbf956d8f432e7ff1b
|
[
"MIT"
] | null | null | null |
django_cas_ng/views.py
|
clavay/django-cas-ng
|
a0ef1fdf1667e1d9439657fbf956d8f432e7ff1b
|
[
"MIT"
] | null | null | null |
django_cas_ng/views.py
|
clavay/django-cas-ng
|
a0ef1fdf1667e1d9439657fbf956d8f432e7ff1b
|
[
"MIT"
] | null | null | null |
"""CAS login/logout replacement views"""
from __future__ import absolute_import
from __future__ import unicode_literals
import sys
import types
from django.utils.six.moves import urllib_parse
from django.conf import settings
from django.http import HttpResponseRedirect
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.utils import timezone
from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth import (
logout as auth_logout,
login as auth_login,
authenticate
)
from django.contrib import messages
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.http import require_http_methods
from importlib import import_module
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
from datetime import timedelta
from .signals import cas_user_logout
from .models import ProxyGrantingTicket, SessionTicket
from .utils import (get_cas_client, get_service_url,
get_protocol, get_redirect_url,
get_user_from_session)
__all__ = ['login', 'logout', 'callback']
@csrf_exempt
@require_http_methods(["GET", "POST"])
def login(request, next_page=None, required=False):
"""Forwards to CAS login URL or verifies CAS ticket"""
service_url = get_service_url(request, next_page)
client = get_cas_client(service_url=service_url, request=request)
if not next_page and settings.CAS_STORE_NEXT and 'CASNEXT' in request.session:
next_page = request.session['CASNEXT']
del request.session['CASNEXT']
if not next_page:
next_page = get_redirect_url(request)
if request.method == 'POST' and request.POST.get('logoutRequest'):
clean_sessions(client, request)
return HttpResponseRedirect(next_page)
# backward compability for django < 2.0
is_user_authenticated = False
if sys.version_info >= (3, 0):
bool_type = bool
else:
bool_type = types.BooleanType
if isinstance(request.user.is_authenticated, bool_type):
is_user_authenticated = request.user.is_authenticated
else:
is_user_authenticated = request.user.is_authenticated()
if is_user_authenticated:
if settings.CAS_LOGGED_MSG is not None:
message = settings.CAS_LOGGED_MSG % request.user.get_username()
messages.success(request, message)
return HttpResponseRedirect(next_page)
ticket = request.GET.get('ticket')
if ticket:
user = authenticate(ticket=ticket,
service=service_url,
request=request)
pgtiou = request.session.get("pgtiou")
if user is not None:
if not request.session.exists(request.session.session_key):
request.session.create()
auth_login(request, user)
SessionTicket.objects.create(
session_key=request.session.session_key,
ticket=ticket
)
if pgtiou and settings.CAS_PROXY_CALLBACK:
# Delete old PGT
ProxyGrantingTicket.objects.filter(
user=user,
session_key=request.session.session_key
).delete()
# Set new PGT ticket
try:
pgt = ProxyGrantingTicket.objects.get(pgtiou=pgtiou)
pgt.user = user
pgt.session_key = request.session.session_key
pgt.save()
except ProxyGrantingTicket.DoesNotExist:
pass
if settings.CAS_LOGIN_MSG is not None:
name = user.get_username()
message = settings.CAS_LOGIN_MSG % name
messages.success(request, message)
return HttpResponseRedirect(next_page)
elif settings.CAS_RETRY_LOGIN or required:
return HttpResponseRedirect(client.get_login_url())
else:
raise PermissionDenied(_('Login failed.'))
else:
if settings.CAS_STORE_NEXT:
request.session['CASNEXT'] = next_page
return HttpResponseRedirect(client.get_login_url())
@require_http_methods(["GET"])
def logout(request, next_page=None):
"""Redirects to CAS logout page"""
# try to find the ticket matching current session for logout signal
try:
st = SessionTicket.objects.get(session_key=request.session.session_key)
ticket = st.ticket
except SessionTicket.DoesNotExist:
ticket = None
# send logout signal
cas_user_logout.send(
sender="manual",
user=request.user,
session=request.session,
ticket=ticket,
request=request,
)
auth_logout(request)
# clean current session ProxyGrantingTicket and SessionTicket
ProxyGrantingTicket.objects.filter(session_key=request.session.session_key).delete()
SessionTicket.objects.filter(session_key=request.session.session_key).delete()
next_page = next_page or get_redirect_url(request)
if settings.CAS_LOGOUT_COMPLETELY:
protocol = get_protocol(request)
host = request.get_host()
redirect_url = urllib_parse.urlunparse(
(protocol, host, next_page, '', '', ''),
)
client = get_cas_client(request=request)
return HttpResponseRedirect(client.get_logout_url(redirect_url))
else:
# This is in most cases pointless if not CAS_RENEW is set. The user will
# simply be logged in again on next request requiring authorization.
return HttpResponseRedirect(next_page)
@csrf_exempt
@require_http_methods(["GET", "POST"])
def callback(request):
"""Read PGT and PGTIOU sent by CAS"""
if request.method == 'POST' and request.POST.get('logoutRequest'):
clean_sessions(get_cas_client(request=request), request)
return HttpResponse("{0}\n".format(_('ok')), content_type="text/plain")
elif request.method == 'GET':
pgtid = request.GET.get('pgtId')
pgtiou = request.GET.get('pgtIou')
pgt = ProxyGrantingTicket.objects.create(pgtiou=pgtiou, pgt=pgtid)
pgt.save()
ProxyGrantingTicket.objects.filter(
session_key=None,
date__lt=(timezone.now() - timedelta(seconds=60))
).delete()
return HttpResponse("{0}\n".format(_('ok')), content_type="text/plain")
def clean_sessions(client, request):
for slo in client.get_saml_slos(request.POST.get('logoutRequest')):
try:
st = SessionTicket.objects.get(ticket=slo.text)
session = SessionStore(session_key=st.session_key)
# send logout signal
cas_user_logout.send(
sender="slo",
user=get_user_from_session(session),
session=session,
ticket=slo.text,
request=request,
)
session.flush()
# clean logout session ProxyGrantingTicket and SessionTicket
ProxyGrantingTicket.objects.filter(session_key=st.session_key).delete()
SessionTicket.objects.filter(session_key=st.session_key).delete()
except SessionTicket.DoesNotExist:
pass
| 37.015306
| 88
| 0.660924
|
37480afbf3c63aea2ffc2c31f0de900dec306904
| 523
|
py
|
Python
|
nativeconfig/exceptions.py
|
GreatFruitAndy/nativeconfig
|
02a538ce6945e09291c319c99268dce641f14e7e
|
[
"MIT"
] | null | null | null |
nativeconfig/exceptions.py
|
GreatFruitAndy/nativeconfig
|
02a538ce6945e09291c319c99268dce641f14e7e
|
[
"MIT"
] | null | null | null |
nativeconfig/exceptions.py
|
GreatFruitAndy/nativeconfig
|
02a538ce6945e09291c319c99268dce641f14e7e
|
[
"MIT"
] | null | null | null |
class Error(Exception):
pass
class InitializationError(Error):
def __init__(self, msg):
super().__init__(msg)
class ValidationError(Error):
def __init__(self, msg, value):
self.value = value
super().__init__(msg)
class SerializationError(Error):
def __init__(self, msg, value):
self.value = value
super().__init__(msg)
class DeserializationError(Error):
def __init__(self, msg, raw_value):
self.raw_value = raw_value
super().__init__(msg)
| 20.115385
| 39
| 0.650096
|
7ef678445c4fe411cb00d4757ca5a8a331312fac
| 6,712
|
py
|
Python
|
run_all_scripts.py
|
phenylazide/MolecularSimilarity
|
429f64c3c18daa5d341110380f761aa003ad290b
|
[
"MIT"
] | 1
|
2020-09-14T16:01:50.000Z
|
2020-09-14T16:01:50.000Z
|
run_all_scripts.py
|
phenylazide/MolecularSimilarity
|
429f64c3c18daa5d341110380f761aa003ad290b
|
[
"MIT"
] | 5
|
2019-04-20T06:23:01.000Z
|
2019-07-25T17:28:05.000Z
|
run_all_scripts.py
|
phenylazide/MolecularSimilarity
|
429f64c3c18daa5d341110380f761aa003ad290b
|
[
"MIT"
] | 1
|
2020-07-07T14:55:14.000Z
|
2020-07-07T14:55:14.000Z
|
#!/usr/bin/env python3
""""
runs all scripts
Usage:
python run_all_scripts.py
-a (optional input file with active molecules)
-i (optional input file with inactive molecules)
-t (optional input file with test molecules)
-c (optional input file with activity of test molecules)
-m (input configuration json file)
-o (output file)
-p (optional, type of input molecules files, default smi)
-d (optional directory where to store intermediate results)
"""
import argparse
import logging
import tempfile
import json
import extract_fragments
import inputoutput_utils
import compute_descriptors
import model_factory
import add_activity
import compute_evaluation
def _main():
# run extract_fragments
configuration = _read_configuration()
with open(configuration["model_configuration"], "r", encoding="utf-8") as input_stream:
model_configuration = json.load(input_stream)
try:
new_model = model_factory.create_model(model_configuration["model_name"])
except:
print("Model does not exist!")
exit(1)
if "kekule" not in model_configuration:
model_configuration["kekule"] = False
else:
model_configuration["kekule"] = bool(model_configuration["kekule"])
if "isomeric" not in model_configuration:
model_configuration["isomeric"] = False
else:
model_configuration["isomeric"] = bool(model_configuration["isomeric"])
if "fragments" not in model_configuration:
model_configuration["fragments"] = "ecfp.6"
parsed_types = []
for item in model_configuration["fragments"].split(","):
item_split = item.split(".")
if item_split[0] != "ap":
if not len(item_split) == 2:
logging.error("Invalid fragment type: %s", item)
logging.info("Expected format {TYPE}.{SIZE} or ap")
exit(1)
parsed_types.append({
"name": item_split[0],
"size": int(item_split[1])
})
else:
parsed_types.append({
"name": item_split[0],
})
model_configuration["fragments"] = parsed_types
extraction_options = {
"kekule": model_configuration["kekule"],
"isomeric": model_configuration["isomeric"],
"fragments": model_configuration["fragments"]
}
input_files = [configuration["input_actives"], configuration["input_inactives"],
configuration["test"]]
directory = configuration["directory"]
fragments_output_files = [directory+"/fragmentsa.json", directory+"/fragmentsi.json",
directory+"/fragmentst.json"]
for file in fragments_output_files:
inputoutput_utils.create_parent_directory(file)
extract_fragments.extract_fragments(input_files, configuration["input_type"],
fragments_output_files, extraction_options)
# run extract_descriptors
descriptors_output_files = [directory+"/descriptorsa.csv", directory+"/descriptorsi.csv",
directory+"/descriptorst.csv"]
for file in descriptors_output_files:
inputoutput_utils.create_parent_directory(file)
if (model_configuration["model_name"] == "descriptors_model") |\
((model_configuration["model_name"] == "linear_regression_model") and (int(model_configuration["molecules"]) == 0)):
compute_descriptors.compute_descriptors(fragments_output_files, descriptors_output_files,
True)
else:
compute_descriptors.compute_descriptors(fragments_output_files, descriptors_output_files, False)
# run create_model and score_molecules
model = new_model.create_model(directory+"/fragmentsa.json", directory+"/fragmentsi.json",
directory+"/descriptorsa.csv", directory+"/descriptorsi.csv",
model_configuration)
new_model.score_model(model, directory+"/fragmentst.json",
directory+"/descriptorst.csv", directory+"/score.json")
# run add_activity
activity = add_activity.read_activity(configuration["activity"])
add_activity.add_activity_and_write_to_json(directory + "/score.json", activity,
directory + "/activity.json")
# run compute_evaluation
score_act = compute_evaluation.read_file_with_score_and_activity(directory + "/activity.json")
activity = compute_evaluation.sort_activity(score_act)
compute_evaluation.evaluation(activity, configuration["output"])
def _read_configuration() -> dict:
parser = argparse.ArgumentParser(description="run all scripts "
"See file header for more details.")
parser.add_argument("-a", type=str, dest="input_actives",
help="input file with active molecules", required=False,
default="data/actives.smi")
parser.add_argument("-i", type=str, dest="input_inactives",
help="input file with inactive molecules", required=False,
default="data/inactives.smi")
parser.add_argument("-t", type=str, dest="test",
help="input file with test molecules", required=False,
default="data/test.smi")
parser.add_argument("-c", type=str, dest="activity",
help="input file with activity of test molecules", required=False,
default="data/test_activity.json")
parser.add_argument("-m", type=str, dest="model_configuration",
help="input json file with model configuration", required=True)
parser.add_argument("-o", type=str, dest="output",
help="output json file", required=True)
parser.add_argument("-p", type=str, dest="input_type",
help="type of input files with molecules smi/sdf, default is smi",
default="smi")
parser.add_argument("-d", dest="directory",
help="directory where to store intermediate results", required=False)
configuration = vars(parser.parse_args())
if configuration["directory"] is None:
configuration["directory"] = tempfile.gettempdir()
configuration["input_type"] = configuration["input_type"].lower()
return configuration
if __name__ == "__main__":
_main()
| 44.157895
| 125
| 0.619934
|
71c27b940f6078047fc0c2b7726b0718e791514d
| 574
|
py
|
Python
|
backend/selic/views.py
|
felipemaion/financial-management-gate-django
|
5f93a7d28a55852fed0a16d1830f92b0ee065948
|
[
"MIT"
] | 2
|
2019-04-15T20:36:48.000Z
|
2020-02-09T23:20:27.000Z
|
backend/selic/views.py
|
felipemaion/financial-management-gate-django
|
5f93a7d28a55852fed0a16d1830f92b0ee065948
|
[
"MIT"
] | 5
|
2020-02-12T00:06:06.000Z
|
2020-06-05T05:09:45.000Z
|
backend/selic/views.py
|
felipemaion/financial-management-gate-django
|
5f93a7d28a55852fed0a16d1830f92b0ee065948
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from rest_framework.views import APIView
from rest_framework.response import Response
from selic.models import Selic
# Create your views here.
class Corrige(APIView):
def get(self, request):
request = request.GET
amount = request['amount']
date = request['date']
if 'final_date'in request:
final_date = request['final_date']
else:
final_date = None
selic = Selic.present_value(amount,date, final_date)
return Response(selic)
| 24.956522
| 60
| 0.639373
|
eaa5964139bd2cd616033fc0072b19604c1d541c
| 39
|
py
|
Python
|
tarkov/inventory_dispatcher/__init__.py
|
JustEmuTarkov/jet_py
|
2f352b5e6f5d88594d08afc46e9458e919271788
|
[
"MIT"
] | 14
|
2021-02-24T02:32:48.000Z
|
2022-01-03T05:51:45.000Z
|
tarkov/inventory_dispatcher/__init__.py
|
JustEmuTarkov/jet_py
|
2f352b5e6f5d88594d08afc46e9458e919271788
|
[
"MIT"
] | 1
|
2021-03-08T09:02:29.000Z
|
2021-03-08T09:02:29.000Z
|
tarkov/inventory_dispatcher/__init__.py
|
JustEmuTarkov/jet_py
|
2f352b5e6f5d88594d08afc46e9458e919271788
|
[
"MIT"
] | 4
|
2021-04-14T01:47:01.000Z
|
2021-11-29T02:18:32.000Z
|
from .manager import DispatcherManager
| 19.5
| 38
| 0.871795
|
607e12344a467b2d02ca4e5a3610811c4c6ff69f
| 5,494
|
py
|
Python
|
QGNN_pytorch/train_node_cls.py
|
bojesomo/QGNN
|
c289708a7fc90ccdb6390b084abda1e359179664
|
[
"MIT"
] | 1
|
2021-11-26T17:24:33.000Z
|
2021-11-26T17:24:33.000Z
|
QGNN_pytorch/train_node_cls.py
|
bojesomo/QGNN
|
c289708a7fc90ccdb6390b084abda1e359179664
|
[
"MIT"
] | null | null | null |
QGNN_pytorch/train_node_cls.py
|
bojesomo/QGNN
|
c289708a7fc90ccdb6390b084abda1e359179664
|
[
"MIT"
] | null | null | null |
from __future__ import division
from __future__ import print_function
import time
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import numpy as np
import torch
import torch.nn.functional as F
import torch.optim as optim
np.random.seed(123)
torch.manual_seed(123)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
if torch.cuda.is_available():
torch.cuda.manual_seed_all(123)
from utils_node_cls import *
from q4gnn import *
# Parameters
# ==================================================
parser = ArgumentParser("QGNN", formatter_class=ArgumentDefaultsHelpFormatter, conflict_handler='resolve')
parser.add_argument("--dataset", default="cora", help="Name of the dataset.")
parser.add_argument('--epochs', type=int, default=100, help='Number of epochs to train.')
parser.add_argument('--learning_rate', type=float, default=0.05, help='Initial learning rate.')
parser.add_argument('--weight_decay', type=float, default=5e-4, help='Weight decay (L2 loss on parameters).')
parser.add_argument('--hidden_size', type=int, default=16, help='Hidden_size//4 = number of quaternion units within each hidden layer.')
parser.add_argument('--dropout', type=float, default=0.5, help='Dropout rate (1 - keep probability).')
parser.add_argument('--fold', type=int, default=2, help='The fold index. 0-9.')
parser.add_argument('--fastmode', action='store_true', default=False, help='Validate during training pass.')
args = parser.parse_args()
# Load data
adj, features, labels, y_train, y_val, y_test, train_mask, val_mask, test_mask = \
load_data_new_split(args.dataset, '../splits/' + args.dataset + '_split_0.6_0.2_'+ str(args.fold) + '.npz')
labels = torch.from_numpy(labels).to(device)
labels = torch.where(labels==1)[1]
idx_train = torch.where(torch.from_numpy(train_mask)==True)
idx_val = torch.where(torch.from_numpy(val_mask)==True)
idx_test = torch.where(torch.from_numpy(test_mask)==True)
"""Convert a scipy sparse matrix to a torch sparse tensor."""
def sparse_mx_to_torch_sparse_tensor(sparse_mx):
sparse_mx = sparse_mx.tocoo().astype(np.float32)
indices = torch.from_numpy(
np.vstack((sparse_mx.row, sparse_mx.col)).astype(np.int64))
values = torch.from_numpy(sparse_mx.data)
shape = torch.Size(sparse_mx.shape)
return torch.sparse.FloatTensor(indices, values, shape).to(device)
""" quaternion preprocess for feature vectors """
def quaternion_preprocess_features(features):
"""Row-normalize feature matrix"""
rowsum = np.array(features.sum(1))
r_inv = np.power(rowsum, -1).flatten()
r_inv[np.isinf(r_inv)] = 0.
r_mat_inv = sp.diags(r_inv)
features = r_mat_inv.dot(features)
features = features.todense()
features = np.tile(features, 4) # A + Ai + Aj + Ak
return torch.from_numpy(features).to(device)
# Some preprocessing
features = quaternion_preprocess_features(features)
adj = normalize_adj(adj + sp.eye(adj.shape[0])).tocoo()
adj = sparse_mx_to_torch_sparse_tensor(adj)
# Accuracy
def accuracy(output, labels):
preds = output.max(1)[1].type_as(labels)
correct = preds.eq(labels).double()
correct = correct.sum()
return correct / len(labels)
'''Quaternion graph neural network! 2-layer Q4GNN!'''
class QGNN(torch.nn.Module):
def __init__(self, nfeat, nhid, nclass, dropout=0.5):
super(QGNN, self).__init__()
self.q4gnn1 = QGNNLayer(nfeat, nhid, dropout=dropout) # should tune whether relu or tanh
self.q4gnn2 = QGNNLayer(nhid, nclass, dropout=dropout, quaternion_ff=False, act=lambda x:x) # quaternion_ff=False --> QGNN becomes GCN
def forward(self, x, adj):
x = self.q4gnn1(x, adj)
x = self.q4gnn2(x, adj)
return F.log_softmax(x, dim=1)
# Model and optimizer
model = QGNN(nfeat=features.size(1), nhid=args.hidden_size, nclass=y_train.shape[1], dropout=args.dropout).to(device)
optimizer = optim.Adam(model.parameters(), lr=args.learning_rate, weight_decay=args.weight_decay)
"""Adapted from https://github.com/tkipf/pygcn/blob/master/pygcn/train.py"""
def train(epoch):
t = time.time()
model.train()
optimizer.zero_grad()
output = model(features, adj)
loss_train = F.nll_loss(output[idx_train], labels[idx_train])
acc_train = accuracy(output[idx_train], labels[idx_train])
loss_train.backward()
optimizer.step()
if not args.fastmode:
# Evaluate validation set performance separately, deactivates dropout during validation run.
model.eval()
output = model(features, adj)
loss_val = F.nll_loss(output[idx_val], labels[idx_val])
acc_val = accuracy(output[idx_val], labels[idx_val])
print('Epoch: {:04d}'.format(epoch+1),
'loss_train: {:.4f}'.format(loss_train.item()),
'acc_train: {:.4f}'.format(acc_train.item()),
'loss_val: {:.4f}'.format(loss_val.item()),
'acc_val: {:.4f}'.format(acc_val.item()),
'time: {:.4f}s'.format(time.time() - t))
def test():
model.eval()
output = model(features, adj)
loss_test = F.nll_loss(output[idx_test], labels[idx_test])
acc_test = accuracy(output[idx_test], labels[idx_test])
print("Test set results:",
"loss= {:.4f}".format(loss_test.item()),
"accuracy= {:.4f}".format(acc_test.item()))
# Train model
t_total = time.time()
for epoch in range(args.epochs):
train(epoch)
print("Optimization Finished!")
print("Total time elapsed: {:.4f}s".format(time.time() - t_total))
# Testing
test()
| 40.10219
| 142
| 0.7004
|
e1d43e9681fd79622b312fb67d916724e60e9218
| 15,245
|
py
|
Python
|
twilio/rest/verify/v1/service/__init__.py
|
ethan-schaffer/MassMessenger
|
3042ed98864d012a7276a6a365f81690431d5157
|
[
"MIT"
] | null | null | null |
twilio/rest/verify/v1/service/__init__.py
|
ethan-schaffer/MassMessenger
|
3042ed98864d012a7276a6a365f81690431d5157
|
[
"MIT"
] | 2
|
2019-09-20T19:08:16.000Z
|
2021-04-02T13:28:29.000Z
|
twilio/rest/verify/v1/service/__init__.py
|
ethan-schaffer/MassMessenger
|
3042ed98864d012a7276a6a365f81690431d5157
|
[
"MIT"
] | null | null | null |
# coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
from twilio.rest.verify.v1.service.verification import VerificationList
from twilio.rest.verify.v1.service.verification_check import VerificationCheckList
class ServiceList(ListResource):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version):
"""
Initialize the ServiceList
:param Version version: Version that contains the resource
:returns: twilio.rest.verify.v1.service.ServiceList
:rtype: twilio.rest.verify.v1.service.ServiceList
"""
super(ServiceList, self).__init__(version)
# Path Solution
self._solution = {}
self._uri = '/Services'.format(**self._solution)
def create(self, friendly_name, code_length=values.unset):
"""
Create a new ServiceInstance
:param unicode friendly_name: Friendly name of the service
:param unicode code_length: Length of verification code. Valid values are 4-10
:returns: Newly created ServiceInstance
:rtype: twilio.rest.verify.v1.service.ServiceInstance
"""
data = values.of({'FriendlyName': friendly_name, 'CodeLength': code_length, })
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return ServiceInstance(self._version, payload, )
def stream(self, limit=None, page_size=None):
"""
Streams ServiceInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.verify.v1.service.ServiceInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
"""
Lists ServiceInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.verify.v1.service.ServiceInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of ServiceInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of ServiceInstance
:rtype: twilio.rest.verify.v1.service.ServicePage
"""
params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(
'GET',
self._uri,
params=params,
)
return ServicePage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of ServiceInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of ServiceInstance
:rtype: twilio.rest.verify.v1.service.ServicePage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return ServicePage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a ServiceContext
:param sid: Verification Service Instance SID.
:returns: twilio.rest.verify.v1.service.ServiceContext
:rtype: twilio.rest.verify.v1.service.ServiceContext
"""
return ServiceContext(self._version, sid=sid, )
def __call__(self, sid):
"""
Constructs a ServiceContext
:param sid: Verification Service Instance SID.
:returns: twilio.rest.verify.v1.service.ServiceContext
:rtype: twilio.rest.verify.v1.service.ServiceContext
"""
return ServiceContext(self._version, sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Verify.V1.ServiceList>'
class ServicePage(Page):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, response, solution):
"""
Initialize the ServicePage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:returns: twilio.rest.verify.v1.service.ServicePage
:rtype: twilio.rest.verify.v1.service.ServicePage
"""
super(ServicePage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of ServiceInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.verify.v1.service.ServiceInstance
:rtype: twilio.rest.verify.v1.service.ServiceInstance
"""
return ServiceInstance(self._version, payload, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Verify.V1.ServicePage>'
class ServiceContext(InstanceContext):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, sid):
"""
Initialize the ServiceContext
:param Version version: Version that contains the resource
:param sid: Verification Service Instance SID.
:returns: twilio.rest.verify.v1.service.ServiceContext
:rtype: twilio.rest.verify.v1.service.ServiceContext
"""
super(ServiceContext, self).__init__(version)
# Path Solution
self._solution = {'sid': sid, }
self._uri = '/Services/{sid}'.format(**self._solution)
# Dependents
self._verifications = None
self._verification_checks = None
def fetch(self):
"""
Fetch a ServiceInstance
:returns: Fetched ServiceInstance
:rtype: twilio.rest.verify.v1.service.ServiceInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return ServiceInstance(self._version, payload, sid=self._solution['sid'], )
def update(self, friendly_name=values.unset, code_length=values.unset):
"""
Update the ServiceInstance
:param unicode friendly_name: Friendly name of the service
:param unicode code_length: Length of verification code. Valid values are 4-10
:returns: Updated ServiceInstance
:rtype: twilio.rest.verify.v1.service.ServiceInstance
"""
data = values.of({'FriendlyName': friendly_name, 'CodeLength': code_length, })
payload = self._version.update(
'POST',
self._uri,
data=data,
)
return ServiceInstance(self._version, payload, sid=self._solution['sid'], )
@property
def verifications(self):
"""
Access the verifications
:returns: twilio.rest.verify.v1.service.verification.VerificationList
:rtype: twilio.rest.verify.v1.service.verification.VerificationList
"""
if self._verifications is None:
self._verifications = VerificationList(self._version, service_sid=self._solution['sid'], )
return self._verifications
@property
def verification_checks(self):
"""
Access the verification_checks
:returns: twilio.rest.verify.v1.service.verification_check.VerificationCheckList
:rtype: twilio.rest.verify.v1.service.verification_check.VerificationCheckList
"""
if self._verification_checks is None:
self._verification_checks = VerificationCheckList(self._version, service_sid=self._solution['sid'], )
return self._verification_checks
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Verify.V1.ServiceContext {}>'.format(context)
class ServiceInstance(InstanceResource):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, payload, sid=None):
"""
Initialize the ServiceInstance
:returns: twilio.rest.verify.v1.service.ServiceInstance
:rtype: twilio.rest.verify.v1.service.ServiceInstance
"""
super(ServiceInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'sid': payload['sid'],
'account_sid': payload['account_sid'],
'friendly_name': payload['friendly_name'],
'code_length': deserialize.integer(payload['code_length']),
'date_created': deserialize.iso8601_datetime(payload['date_created']),
'date_updated': deserialize.iso8601_datetime(payload['date_updated']),
'url': payload['url'],
'links': payload['links'],
}
# Context
self._context = None
self._solution = {'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: ServiceContext for this ServiceInstance
:rtype: twilio.rest.verify.v1.service.ServiceContext
"""
if self._context is None:
self._context = ServiceContext(self._version, sid=self._solution['sid'], )
return self._context
@property
def sid(self):
"""
:returns: A string that uniquely identifies this Service.
:rtype: unicode
"""
return self._properties['sid']
@property
def account_sid(self):
"""
:returns: Account Sid.
:rtype: unicode
"""
return self._properties['account_sid']
@property
def friendly_name(self):
"""
:returns: Friendly name of the service
:rtype: unicode
"""
return self._properties['friendly_name']
@property
def code_length(self):
"""
:returns: Length of verification code. Valid values are 4-10
:rtype: unicode
"""
return self._properties['code_length']
@property
def date_created(self):
"""
:returns: The date this Service was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The date this Service was updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def url(self):
"""
:returns: The url
:rtype: unicode
"""
return self._properties['url']
@property
def links(self):
"""
:returns: The links
:rtype: unicode
"""
return self._properties['links']
def fetch(self):
"""
Fetch a ServiceInstance
:returns: Fetched ServiceInstance
:rtype: twilio.rest.verify.v1.service.ServiceInstance
"""
return self._proxy.fetch()
def update(self, friendly_name=values.unset, code_length=values.unset):
"""
Update the ServiceInstance
:param unicode friendly_name: Friendly name of the service
:param unicode code_length: Length of verification code. Valid values are 4-10
:returns: Updated ServiceInstance
:rtype: twilio.rest.verify.v1.service.ServiceInstance
"""
return self._proxy.update(friendly_name=friendly_name, code_length=code_length, )
@property
def verifications(self):
"""
Access the verifications
:returns: twilio.rest.verify.v1.service.verification.VerificationList
:rtype: twilio.rest.verify.v1.service.verification.VerificationList
"""
return self._proxy.verifications
@property
def verification_checks(self):
"""
Access the verification_checks
:returns: twilio.rest.verify.v1.service.verification_check.VerificationCheckList
:rtype: twilio.rest.verify.v1.service.verification_check.VerificationCheckList
"""
return self._proxy.verification_checks
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Verify.V1.ServiceInstance {}>'.format(context)
| 32.926566
| 113
| 0.63083
|
bf4e1dc8cd05f667adf0da0da06f8e6993411e5d
| 34,141
|
py
|
Python
|
mmdet/core/bbox/transforms_rbbox.py
|
Zebraside/RRPDet
|
31dd0b3e158dcd18edb9890ff1ac84b639a9e3e9
|
[
"Apache-2.0"
] | null | null | null |
mmdet/core/bbox/transforms_rbbox.py
|
Zebraside/RRPDet
|
31dd0b3e158dcd18edb9890ff1ac84b639a9e3e9
|
[
"Apache-2.0"
] | null | null | null |
mmdet/core/bbox/transforms_rbbox.py
|
Zebraside/RRPDet
|
31dd0b3e158dcd18edb9890ff1ac84b639a9e3e9
|
[
"Apache-2.0"
] | 1
|
2021-12-17T12:39:36.000Z
|
2021-12-17T12:39:36.000Z
|
import mmcv
import numpy as np
import torch
import math
import cv2
import copy
# TODO: check the angle and module operation
def dbbox2delta(proposals, gt, means = [0, 0, 0, 0, 0], stds=[1, 1, 1, 1, 1]):
"""
:param proposals: (x_ctr, y_ctr, w, h, angle)
shape (n, 5)
:param gt: (x_ctr, y_ctr, w, h, angle)
:param means:
:param stds:
:return: encoded targets: shape (n, 5)
"""
proposals = proposals.float()
gt = gt.float()
gt_widths = gt[..., 2]
gt_heights = gt[..., 3]
gt_angle = gt[..., 4]
proposals_widths = proposals[..., 2]
proposals_heights = proposals[..., 3]
proposals_angle = proposals[..., 4]
coord = gt[..., 0:2] - proposals[..., 0:2]
dx = (torch.cos(proposals[..., 4]) * coord[..., 0] +
torch.sin(proposals[..., 4]) * coord[..., 1]) / proposals_widths
dy = (-torch.sin(proposals[..., 4]) * coord[..., 0] +
torch.cos(proposals[..., 4]) * coord[..., 1]) / proposals_heights
dw = torch.log(gt_widths / proposals_widths)
dh = torch.log(gt_heights / proposals_heights)
dangle = (gt_angle - proposals_angle) % (2 * math.pi) / (2 * math.pi)
deltas = torch.stack((dx, dy, dw, dh, dangle), -1)
means = deltas.new_tensor(means).unsqueeze(0)
stds = deltas.new_tensor(stds).unsqueeze(0)
deltas = deltas.sub_(means).div_(stds)
# TODO: expand bbox regression
return deltas
def delta2dbbox(Rrois,
deltas,
means=[0, 0, 0, 0, 0],
stds=[1, 1, 1, 1, 1],
max_shape=None,
wh_ratio_clip=16 / 1000):
"""
:param Rrois: (cx, cy, w, h, theta)
:param deltas: (dx, dy, dw, dh, dtheta)
:param means:
:param stds:
:param max_shape:
:param wh_ratio_clip:
:return:
"""
means = deltas.new_tensor(means).repeat(1, deltas.size(1) // 5)
stds = deltas.new_tensor(stds).repeat(1, deltas.size(1) // 5)
denorm_deltas = deltas * stds + means
dx = denorm_deltas[:, 0::5]
dy = denorm_deltas[:, 1::5]
dw = denorm_deltas[:, 2::5]
dh = denorm_deltas[:, 3::5]
dangle = denorm_deltas[:, 4::5]
max_ratio = np.abs(np.log(wh_ratio_clip))
dw = dw.clamp(min=-max_ratio, max=max_ratio)
dh = dh.clamp(min=-max_ratio, max=max_ratio)
Rroi_x = (Rrois[:, 0]).unsqueeze(1).expand_as(dx)
Rroi_y = (Rrois[:, 1]).unsqueeze(1).expand_as(dy)
Rroi_w = (Rrois[:, 2]).unsqueeze(1).expand_as(dw)
Rroi_h = (Rrois[:, 3]).unsqueeze(1).expand_as(dh)
Rroi_angle = (Rrois[:, 4]).unsqueeze(1).expand_as(dangle)
# import pdb
# pdb.set_trace()
gx = dx * Rroi_w * torch.cos(Rroi_angle) \
- dy * Rroi_h * torch.sin(Rroi_angle) + Rroi_x
gy = dx * Rroi_w * torch.sin(Rroi_angle) \
+ dy * Rroi_h * torch.cos(Rroi_angle) + Rroi_y
gw = Rroi_w * dw.exp()
gh = Rroi_h * dh.exp()
# TODO: check the hard code
gangle = (2 * np.pi) * dangle + Rroi_angle
gangle = gangle % ( 2 * np.pi)
if max_shape is not None:
pass
bboxes = torch.stack([gx, gy, gw, gh, gangle], dim=-1).view_as(deltas)
return bboxes
def dbbox2delta_v3(proposals, gt, means = [0, 0, 0, 0, 0], stds=[1, 1, 1, 1, 1]):
"""
This version removes the module operation
:param proposals: (x_ctr, y_ctr, w, h, angle)
shape (n, 5)
:param gt: (x_ctr, y_ctr, w, h, angle)
:param means:
:param stds:
:return: encoded targets: shape (n, 5)
"""
proposals = proposals.float()
gt = gt.float()
gt_widths = gt[..., 2]
gt_heights = gt[..., 3]
gt_angle = gt[..., 4]
proposals_widths = proposals[..., 2]
proposals_heights = proposals[..., 3]
proposals_angle = proposals[..., 4]
coord = gt[..., 0:2] - proposals[..., 0:2]
dx = (torch.cos(proposals[..., 4]) * coord[..., 0] +
torch.sin(proposals[..., 4]) * coord[..., 1]) / proposals_widths
dy = (-torch.sin(proposals[..., 4]) * coord[..., 0] +
torch.cos(proposals[..., 4]) * coord[..., 1]) / proposals_heights
dw = torch.log(gt_widths / proposals_widths)
dh = torch.log(gt_heights / proposals_heights)
# import pdb
# print('in dbbox2delta v3')
# pdb.set_trace()
# dangle = (gt_angle - proposals_angle) % (2 * math.pi) / (2 * math.pi)
# TODO: debug for it, proposals_angle are -1.5708, gt_angle should close to -1.57, actully they close to 5.0153
dangle = gt_angle - proposals_angle
deltas = torch.stack((dx, dy, dw, dh, dangle), -1)
means = deltas.new_tensor(means).unsqueeze(0)
stds = deltas.new_tensor(stds).unsqueeze(0)
deltas = deltas.sub_(means).div_(stds)
return deltas
def delta2dbbox_v3(Rrois,
deltas,
means=[0, 0, 0, 0, 0],
stds=[1, 1, 1, 1, 1],
max_shape=None,
wh_ratio_clip=16 / 1000):
"""
This version removes the module operation
:param Rrois: (cx, cy, w, h, theta)
:param deltas: (dx, dy, dw, dh, dtheta)
:param means:
:param stds:
:param max_shape:
:param wh_ratio_clip:
:return:
"""
means = deltas.new_tensor(means).repeat(1, deltas.size(1) // 5)
stds = deltas.new_tensor(stds).repeat(1, deltas.size(1) // 5)
denorm_deltas = deltas * stds + means
dx = denorm_deltas[:, 0::5]
dy = denorm_deltas[:, 1::5]
dw = denorm_deltas[:, 2::5]
dh = denorm_deltas[:, 3::5]
dangle = denorm_deltas[:, 4::5]
max_ratio = np.abs(np.log(wh_ratio_clip))
dw = dw.clamp(min=-max_ratio, max=max_ratio)
dh = dh.clamp(min=-max_ratio, max=max_ratio)
Rroi_x = (Rrois[:, 0]).unsqueeze(1).expand_as(dx)
Rroi_y = (Rrois[:, 1]).unsqueeze(1).expand_as(dy)
Rroi_w = (Rrois[:, 2]).unsqueeze(1).expand_as(dw)
Rroi_h = (Rrois[:, 3]).unsqueeze(1).expand_as(dh)
Rroi_angle = (Rrois[:, 4]).unsqueeze(1).expand_as(dangle)
# import pdb
# pdb.set_trace()
gx = dx * Rroi_w * torch.cos(Rroi_angle) \
- dy * Rroi_h * torch.sin(Rroi_angle) + Rroi_x
gy = dx * Rroi_w * torch.sin(Rroi_angle) \
+ dy * Rroi_h * torch.cos(Rroi_angle) + Rroi_y
gw = Rroi_w * dw.exp()
gh = Rroi_h * dh.exp()
# TODO: check the hard code
# gangle = (2 * np.pi) * dangle + Rroi_angle
gangle = dangle + Rroi_angle
# gangle = gangle % ( 2 * np.pi)
if max_shape is not None:
pass
bboxes = torch.stack([gx, gy, gw, gh, gangle], dim=-1).view_as(deltas)
return bboxes
def dbbox2delta_v2(proposals, gt, means = [0, 0, 0, 0, 0], stds=[1, 1, 1, 1, 1]):
"""
:param proposals: (x_ctr, y_ctr, w, h, angle)
shape (n, 5)
:param gt: (x_ctr, y_ctr, w, h, angle)
:param means:
:param stds:
:return: encoded targets: shape (n, 5)
"""
gt_widths = gt[..., 2]
gt_heights = gt[..., 3]
gt_angle = gt[..., 4]
roi_widths = proposals[..., 2]
roi_heights = proposals[..., 3]
roi_angle = proposals[..., 4]
coord = gt[..., 0:2] - proposals[..., 0:2]
targets_dx = (torch.cos(roi_angle) * coord[..., 0] + torch.sin(roi_angle) * coord[:, 1]) / roi_widths
targets_dy = (-torch.sin(roi_angle) * coord[..., 0] + torch.cos(roi_angle) * coord[:, 1]) / roi_heights
targets_dw = torch.log(gt_widths / roi_widths)
targets_dh = torch.log(gt_heights / roi_heights)
targets_dangle = (gt_angle - roi_angle)
dist = targets_dangle % (2 * np.pi)
dist = torch.min(dist, np.pi * 2 - dist)
try:
assert np.all(dist.cpu().numpy() <= (np.pi/2. + 0.001) )
except:
import pdb
pdb.set_trace()
inds = torch.sin(targets_dangle) < 0
dist[inds] = -dist[inds]
# TODO: change the norm value
dist = dist / (np.pi / 2.)
deltas = torch.stack((targets_dx, targets_dy, targets_dw, targets_dh, dist), -1)
means = deltas.new_tensor(means).unsqueeze(0)
stds = deltas.new_tensor(stds).unsqueeze(0)
deltas = deltas.sub_(means).div_(stds)
return deltas
def delta2dbbox_v2(Rrois,
deltas,
means=[0, 0, 0, 0, 0],
stds=[1, 1, 1, 1, 1],
max_shape=None,
wh_ratio_clip=16 / 1000):
means = deltas.new_tensor(means).repeat(1, deltas.size(1) // 5)
stds = deltas.new_tensor(stds).repeat(1, deltas.size(1) // 5)
denorm_deltas = deltas * stds + means
dx = denorm_deltas[:, 0::5]
dy = denorm_deltas[:, 1::5]
dw = denorm_deltas[:, 2::5]
dh = denorm_deltas[:, 3::5]
dangle = denorm_deltas[:, 4::5]
max_ratio = np.abs(np.log(wh_ratio_clip))
dw = dw.clamp(min=-max_ratio, max=max_ratio)
dh = dh.clamp(min=-max_ratio, max=max_ratio)
Rroi_x = (Rrois[:, 0]).unsqueeze(1).expand_as(dx)
Rroi_y = (Rrois[:, 1]).unsqueeze(1).expand_as(dy)
Rroi_w = (Rrois[:, 2]).unsqueeze(1).expand_as(dw)
Rroi_h = (Rrois[:, 3]).unsqueeze(1).expand_as(dh)
Rroi_angle = (Rrois[:, 4]).unsqueeze(1).expand_as(dangle)
gx = dx * Rroi_w * torch.cos(Rroi_angle) \
- dy * Rroi_h * torch.sin(Rroi_angle) + Rroi_x
gy = dx * Rroi_w * torch.sin(Rroi_angle) \
+ dy * Rroi_h * torch.cos(Rroi_angle) + Rroi_y
gw = Rroi_w * dw.exp()
gh = Rroi_h * dh.exp()
gangle = (np.pi / 2.) * dangle + Rroi_angle
if max_shape is not None:
# TODO: finish it
pass
print("GANG", torch.max(gangle))
bboxes = torch.stack([gx, gy, gw, gh, gangle], dim=-1).view_as(deltas)
return bboxes
def choose_best_match_batch(Rrois, gt_rois):
"""
choose best match representation of gt_rois for a Rrois
:param Rrois: (x_ctr, y_ctr, w, h, angle)
shape: (n, 5)
:param gt_rois: (x_ctr, y_ctr, w, h, angle)
shape: (n, 5)
:return: gt_roi_news: gt_roi with new representation
shape: (n, 5)
"""
# TODO: check the dimensions
Rroi_angles = Rrois[:, 4].unsqueeze(1)
gt_xs, gt_ys, gt_ws, gt_hs, gt_angles = copy.deepcopy(gt_rois[:, 0]), copy.deepcopy(gt_rois[:, 1]), \
copy.deepcopy(gt_rois[:, 2]), copy.deepcopy(gt_rois[:, 3]), \
copy.deepcopy(gt_rois[:, 4])
gt_angle_extent = torch.cat((gt_angles[:, np.newaxis], (gt_angles + np.pi/2.)[:, np.newaxis],
(gt_angles + np.pi)[:, np.newaxis], (gt_angles + np.pi * 3/2.)[:, np.newaxis]), 1)
dist = (Rroi_angles - gt_angle_extent) % (2 * np.pi)
dist = torch.min(dist, np.pi * 2 - dist)
min_index = torch.argmin(dist, 1)
gt_rois_extent0 = copy.deepcopy(gt_rois)
gt_rois_extent1 = torch.cat((gt_xs.unsqueeze(1), gt_ys.unsqueeze(1), \
gt_hs.unsqueeze(1), gt_ws.unsqueeze(1), gt_angles.unsqueeze(1) + np.pi/2.), 1)
gt_rois_extent2 = torch.cat((gt_xs.unsqueeze(1), gt_ys.unsqueeze(1), \
gt_ws.unsqueeze(1), gt_hs.unsqueeze(1), gt_angles.unsqueeze(1) + np.pi), 1)
gt_rois_extent3 = torch.cat((gt_xs.unsqueeze(1), gt_ys.unsqueeze(1), \
gt_hs.unsqueeze(1), gt_ws.unsqueeze(1), gt_angles.unsqueeze(1) + np.pi * 3/2.), 1)
gt_rois_extent = torch.cat((gt_rois_extent0.unsqueeze(1),
gt_rois_extent1.unsqueeze(1),
gt_rois_extent2.unsqueeze(1),
gt_rois_extent3.unsqueeze(1)), 1)
gt_rois_new = torch.zeros_like(gt_rois)
# TODO: add pool.map here
for curiter, index in enumerate(min_index):
gt_rois_new[curiter, :] = gt_rois_extent[curiter, index, :]
gt_rois_new[:, 4] = gt_rois_new[:, 4] % (2 * np.pi)
return gt_rois_new
def choose_best_Rroi_batch(Rroi):
"""
There are many instances with large aspect ratio, so we choose the point, previous is long side,
after is short side, so it makes sure h < w
then angle % 180,
:param Rroi: (x_ctr, y_ctr, w, h, angle)
shape: (n, 5)
:return: Rroi_new: Rroi with new representation
"""
x_ctr, y_ctr, w, h, angle = copy.deepcopy(Rroi[:, 0]), copy.deepcopy(Rroi[:, 1]), \
copy.deepcopy(Rroi[:, 2]), copy.deepcopy(Rroi[:, 3]), copy.deepcopy(Rroi[:, 4])
indexes = w < h
Rroi[indexes, 2] = h[indexes]
Rroi[indexes, 3] = w[indexes]
Rroi[indexes, 4] = Rroi[indexes, 4] + np.pi / 2.
# TODO: check the module
Rroi[:, 4] = Rroi[:, 4] % np.pi
return Rroi
def best_match_dbbox2delta(Rrois, gt, means = [0, 0, 0, 0, 0], stds=[1, 1, 1, 1, 1]):
"""
:param Rrois: (x_ctr, y_ctr, w, h, angle)
shape (n, 5)
:param gt: (x_ctr, y_ctr, w, h, angle)
:param means:
:param stds:
:return: encoded targets: shape (n, 5)
"""
# TODO: for comparision, dot not change the regression range for angle in 2 stage currently
# This is a simplified version
# TODO: preprocess angle of gt_boxes according to the catogeries
# Here, use a choose beste match angle, similar to choose best point instead
gt_boxes_new = choose_best_match_batch(Rrois, gt)
try:
assert np.all(Rrois.cpu().numpy()[:, 4] <= (np.pi + 0.001))
except:
import pdb
pdb.set_trace()
bbox_targets = dbbox2delta_v2(Rrois, gt_boxes_new, means, stds)
return bbox_targets
# TODO: check the negative situation of flip
def dbbox_flip(dbboxes, img_shape):
"""
Flip dbboxes horizontally
:param dbboxes: (Tensor): Shape (..., 5*k), (x_ctr, y_ctr, w, h, angle)
:param img_shape: (tuple): Image shape.
:return: Same type as 'dbboxes': Flipped dbboxes
"""
assert dbboxes.shape[-1] % 5 == 0
flipped = dbboxes.clone()
# flip x
flipped[:, 0::5] = img_shape[1] - dbboxes[:, 0::5] - 1
# flip angle
flipped[:, 4::5] = np.pi - dbboxes[:, 4::5]
return flipped
def dbbox_mapping(dbboxes, img_shape, scale_factor, flip):
"""
Map dbboxes from testing scale to original image scale
:param dbboxes:
:param img_shape:
:param scale_factor:
:param flip:
:return:
"""
new_dbboxes = dbboxes.clone()
new_dbboxes[..., 0::5] = dbboxes[..., 0::5] * scale_factor
new_dbboxes[..., 1::5] = dbboxes[..., 1::5] * scale_factor
new_dbboxes[..., 2::5] = dbboxes[..., 2::5] * scale_factor
new_dbboxes[..., 3::5] = dbboxes[..., 3::5] * scale_factor
if flip:
new_dbboxes = dbbox_flip(new_dbboxes, img_shape)
return new_dbboxes
def dbbox_mapping_back(dbboxes, img_shape, scale_factor, flip):
"""
Map dbboxes from testing scael to original image scale
:param dbboxes:
:param img_shape:
:param scale_factor:
:param flip:
:return:
"""
new_dbboxes = dbbox_flip(dbboxes, img_shape) if flip else dbboxes
new_dbboxes[..., 0::5] = new_dbboxes[..., 0::5] / scale_factor
new_dbboxes[..., 1::5] = new_dbboxes[..., 1::5] / scale_factor
new_dbboxes[..., 2::5] = new_dbboxes[..., 2::5] / scale_factor
new_dbboxes[..., 3::5] = new_dbboxes[..., 3::5] / scale_factor
return new_dbboxes
def dbbox_rotate_mapping(bboxes, img_shape, angle):
"""
map bboxes from the original image angle to testing angle
only support descrete angle currently,
do not consider the single class currently, do not consider batch images currently
:param bboxes: [n, 5 * #C] (x, y, w, h, theta) repeat #C
:param img_shape:
:param angle: angle in degeree
:return:
"""
# print('angle: ', angle)
assert angle in [0, 90, 180, 270, -90, -180, -270]
assert len(bboxes.size()) == 2
num = bboxes.size(0)
h, w = img_shape[:2]
if angle in [90, 270] :
new_h, new_w = w, h
else:
new_h, new_w = h, w
center = torch.FloatTensor([(w) * 0.5, (h) * 0.5]).to(bboxes.device)
# import pdb; pdb.set_trace()
xys = torch.cat((bboxes[..., 0::5].view(-1, 1), bboxes[..., 1::5].view(-1, 1)), -1)
norm_xys = xys - center
rotate_matrix = torch.FloatTensor([[np.cos(angle/180 * np.pi), np.sin(angle/180 * np.pi)],
[-np.sin(angle/180 * np.pi), np.cos(angle/180 * np.pi)]]).to(bboxes.device)
norm_rotated_xys = torch.matmul(norm_xys, rotate_matrix)
new_center = torch.FloatTensor([(new_w) * 0.5, (new_h) * 0.5]).to(bboxes.device)
rotated_xys = norm_rotated_xys + new_center
rotated_xys = rotated_xys.view(num, -1)
rotated_dbboxes = torch.zeros(bboxes.size()).to(bboxes.device)
rotated_dbboxes[..., 0::5] = rotated_xys[..., 0::2]
rotated_dbboxes[..., 1::5] = rotated_xys[..., 1::2]
rotated_dbboxes[..., 2::5] = bboxes[..., 2::5]
rotated_dbboxes[..., 3::5] = bboxes[..., 3::5]
rotated_dbboxes[..., 4::5] = bboxes[..., 4::5] + angle/180 * np.pi
return rotated_dbboxes
def bbox_rotate_mapping(bboxes, img_shape, angle):
"""TODO: test this code
map bboxes from the original image angle to testing angle
only support descrete angle currently,
do not consider the single class currently, do not consider batch images currently
:param bboxes: [n, 4 * #C] (xmin, ymin, xmax, ymax) repeat #C
:param img_shape:
:param angle: angle in degeree
:return:
"""
assert angle in [0, 90, 180, 270, -90, -180, -270]
assert len(bboxes.size()) == 2
num = bboxes.size(0)
h, w = img_shape[:2]
if angle in [90, 270]:
new_h, new_w = w, h
else:
new_h, new_w = h, w
# TODO: check (w - 1) or (w)
center = torch.FloatTensor([(w) * 0.5, (h) * 0.5]).to(bboxes.device)
c_bboxes = xy2wh_c(bboxes)
if angle in [90, 270]:
new_box_hs, new_box_ws = c_bboxes[..., 2::4], c_bboxes[..., 3::4]
else:
new_box_hs, new_box_ws = c_bboxes[..., 3::4], c_bboxes[..., 2::4]
xys = torch.cat((c_bboxes[..., 0::4].view(-1, 1), c_bboxes[..., 1::4].view(-1, 1)), -1)
norm_xys = xys - center
rotate_matrix = torch.FloatTensor([[np.cos(angle / 180 * np.pi), np.sin(angle / 180 * np.pi)],
[-np.sin(angle / 180 * np.pi), np.cos(angle / 180 * np.pi)]]).to(bboxes.device)
norm_rotated_xys = torch.matmul(norm_xys, rotate_matrix)
new_center = torch.FloatTensor([(new_w) * 0.5, (new_h) * 0.5]).to(bboxes.device)
rotated_xys = norm_rotated_xys + new_center
rotated_xys = rotated_xys.view(num, -1)
rotated_cbboxes = torch.zeros(bboxes.size()).to(bboxes.device)
rotated_cbboxes[..., 0::4] = rotated_xys[..., 0::2]
rotated_cbboxes[..., 1::4] = rotated_xys[..., 1::2]
rotated_cbboxes[..., 2::4] = new_box_ws
rotated_cbboxes[..., 3::4] = new_box_hs
rotated_bboxes = wh2xy_c(rotated_cbboxes)
return rotated_bboxes
def dbbox2delta_warp(proposals, gt, means = [0, 0, 0, 0, 0], stds=[1, 1, 1, 1, 1]):
"""
:param proposals: (xmin, ymin, xmax, ymax)
:param gt: (x1, y_ctr, w, h, angle)
:param means:
:param stds:
:return:
"""
def TuplePoly2Poly(poly):
outpoly = [poly[0][0], poly[0][1],
poly[1][0], poly[1][1],
poly[2][0], poly[2][1],
poly[3][0], poly[3][1]
]
return outpoly
def Tuplelist2Polylist(tuple_poly_list):
polys = map(TuplePoly2Poly, tuple_poly_list)
return list(polys)
#
# def mask2poly_single(binary_mask):
# """
#
# :param binary_mask:
# :return:
# """
# # try:
# contours, hierarchy = cv2.findContours(binary_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
# contour_lens = np.array(list(map(len, contours)))
# max_id = contour_lens.argmax()
# max_contour = contours[max_id]
# rect = cv2.minAreaRect(max_contour)
# poly = cv2.boxPoints(rect)
# # poly = TuplePoly2Poly(poly)
#
# return poly
# except:
# # TODO: assure there is no empty mask_poly
# return []
# TODO: test the function
def mask2poly_single(binary_mask):
"""
:param binary_mask:
:return:
"""
try:
contours, hierarchy = cv2.findContours(binary_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
# contour_lens = np.array(list(map(len, contours)))
# max_id = contour_lens.argmax()
# max_contour = contours[max_id]
max_contour = max(contours, key=len)
rect = cv2.minAreaRect(max_contour)
poly = cv2.boxPoints(rect)
# poly = TuplePoly2Poly(poly)
except:
import pdb
pdb.set_trace()
return poly
def mask2poly(binary_mask_list):
polys = map(mask2poly_single, binary_mask_list)
# polys = np.stack(polys
return list(polys)
def gt_mask_bp_obbs(gt_masks, with_module=True):
# trans gt_masks to gt_obbs
gt_polys = mask2poly(gt_masks)
gt_bp_polys = get_best_begin_point(gt_polys)
gt_obbs = polygonToRotRectangle_batch(gt_bp_polys, with_module)
return gt_obbs
def gt_mask_bp_obbs_list(gt_masks_list):
gt_obbs_list = map(gt_mask_bp_obbs, gt_masks_list)
return list(gt_obbs_list)
def cal_line_length(point1, point2):
return math.sqrt( math.pow(point1[0] - point2[0], 2) + math.pow(point1[1] - point2[1], 2))
def get_best_begin_point_single(coordinate):
x1 = coordinate[0][0]
y1 = coordinate[0][1]
x2 = coordinate[1][0]
y2 = coordinate[1][1]
x3 = coordinate[2][0]
y3 = coordinate[2][1]
x4 = coordinate[3][0]
y4 = coordinate[3][1]
xmin = min(x1, x2, x3, x4)
ymin = min(y1, y2, y3, y4)
xmax = max(x1, x2, x3, x4)
ymax = max(y1, y2, y3, y4)
combinate = [[[x1, y1], [x2, y2], [x3, y3], [x4, y4]], [[x2, y2], [x3, y3], [x4, y4], [x1, y1]],
[[x3, y3], [x4, y4], [x1, y1], [x2, y2]], [[x4, y4], [x1, y1], [x2, y2], [x3, y3]]]
dst_coordinate = [[xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax]]
force = 100000000.0
force_flag = 0
for i in range(4):
temp_force = cal_line_length(combinate[i][0], dst_coordinate[0]) + cal_line_length(combinate[i][1],
dst_coordinate[
1]) + cal_line_length(
combinate[i][2], dst_coordinate[2]) + cal_line_length(combinate[i][3], dst_coordinate[3])
if temp_force < force:
force = temp_force
force_flag = i
if force_flag != 0:
pass
# print("choose one direction!")
return combinate[force_flag]
def get_best_begin_point_warp_single(coordinate):
return TuplePoly2Poly(get_best_begin_point_single(coordinate))
def get_best_begin_point(coordinate_list):
best_coordinate_list = map(get_best_begin_point_warp_single, coordinate_list)
# import pdb
# pdb.set_trace()
best_coordinate_list = np.stack(list(best_coordinate_list))
return best_coordinate_list
# def polygonToRotRectangle(polys):
# """
# pytorch version, batch operation
# :param polys: The polygon stored in format [x1, y1, x2, y2, x3, y3, x4, y4]
# shape [num_boxes, 8]
# :return: Rotated Rectangle in format [cx, cy, w, h, theta]
# shape [num_rot_recs, 5]
# """
# polys = polys.view(-1, 4, 2)
def xy2wh(boxes):
"""
:param boxes: (xmin, ymin, xmax, ymax) (n, 4)
:return: out_boxes: (x_ctr, y_ctr, w, h) (n, 4)
"""
num_boxes = boxes.size(0)
ex_widths = boxes[..., 2] - boxes[..., 0] + 1.0
ex_heights = boxes[..., 3] - boxes[..., 1] + 1.0
ex_ctr_x = boxes[..., 0] + 0.5 * (ex_widths - 1.0)
ex_ctr_y = boxes[..., 1] + 0.5 * (ex_heights - 1.0)
return torch.cat((ex_ctr_x.unsqueeze(1), ex_ctr_y.unsqueeze(1), ex_widths.unsqueeze(1), ex_heights.unsqueeze(1)), 1)
def xy2wh_c(boxes):
"""
:param boxes: (xmin, ymin, xmax, ymax) (n, 4 * #C)
:return: out_boxes: (x_ctr, y_ctr, w, h) (n, 4 * #C)
"""
num_boxes = boxes.size(0)
out_boxes = boxes.clone()
ex_widths = boxes[..., 2::4] - boxes[..., 0::4] + 1.0
ex_heights = boxes[..., 3::4] - boxes[..., 1::4] + 1.0
ex_ctr_x = boxes[..., 0::4] + 0.5 * (ex_widths - 1.0)
ex_ctr_y = boxes[..., 1::4] + 0.5 * (ex_heights - 1.0)
# import pdb; pdb.set_trace()
out_boxes[..., 2::4] = ex_widths
out_boxes[..., 3::4] = ex_heights
out_boxes[..., 0::4] = ex_ctr_x
out_boxes[..., 1::4] = ex_ctr_y
return out_boxes
def wh2xy(bboxes):
"""
:param bboxes: (x_ctr, y_ctr, w, h) (n, 4)
:return: out_bboxes: (xmin, ymin, xmax, ymax) (n, 4)
"""
num_boxes = bboxes.size(0)
xmins = bboxes[..., 0] - (bboxes[..., 2] - 1) / 2.0
ymins = bboxes[..., 1] - (bboxes[..., 3] - 1) / 2.0
xmaxs = bboxes[..., 0] + (bboxes[..., 2] - 1) / 2.0
ymaxs = bboxes[..., 1] + (bboxes[..., 3] - 1) / 2.0
return torch.cat((xmins.unsqueeze(1), ymins.unsqueeze(1), xmaxs.unsqueeze(1), ymaxs.unsqueeze(1)), 1)
def wh2xy_c(bboxes):
"""
:param bboxes: (x_ctr, y_ctr, w, h) (n, 4 * #C)
:return: out_bboxes: (xmin, ymin, xmax, ymax) (n, 4)
"""
num_boxes = bboxes.size(0)
out_bboxes = bboxes.clone()
xmins = bboxes[..., 0::4] - (bboxes[..., 2::4] - 1) / 2.0
ymins = bboxes[..., 1::4] - (bboxes[..., 3::4] - 1) / 2.0
xmaxs = bboxes[..., 0::4] + (bboxes[..., 2::4] - 1) / 2.0
ymaxs = bboxes[..., 1::4] + (bboxes[..., 3::4] - 1) / 2.0
out_bboxes[..., 0::4] = xmins
out_bboxes[..., 1::4] = ymins
out_bboxes[..., 2::4] = xmaxs
out_bboxes[..., 3::4] = ymaxs
return out_bboxes
def hbb2obb(bboxes):
"""
:param bboxes: shape (n, 4) (xmin, ymin, xmax, ymax)
:return: dbboxes: shape (n, 5) (x_ctr, y_ctr, w, h, angle)
"""
num_boxes = bboxes.size(0)
c_bboxes = xy2wh(bboxes)
initial_angles = -c_bboxes.new_ones((num_boxes, 1)) * np.pi / 2
# initial_angles = -torch.ones((num_boxes, 1)) * np.pi/2
dbboxes = torch.cat((c_bboxes, initial_angles), 1)
return dbboxes
def hbb2obb_v2(boxes):
"""
fix a bug
:param boxes: shape (n, 4) (xmin, ymin, xmax, ymax)
:return: dbboxes: shape (n, 5) (x_ctr, y_ctr, w, h, angle)
"""
num_boxes = boxes.size(0)
ex_heights = boxes[..., 2] - boxes[..., 0] + 1.0
ex_widths = boxes[..., 3] - boxes[..., 1] + 1.0
ex_ctr_x = boxes[..., 0] + 0.5 * (ex_heights - 1.0)
ex_ctr_y = boxes[..., 1] + 0.5 * (ex_widths - 1.0)
c_bboxes = torch.cat((ex_ctr_x.unsqueeze(1), ex_ctr_y.unsqueeze(1), ex_widths.unsqueeze(1), ex_heights.unsqueeze(1)), 1)
initial_angles = -c_bboxes.new_ones((num_boxes, 1)) * np.pi / 2
# initial_angles = -torch.ones((num_boxes, 1)) * np.pi/2
dbboxes = torch.cat((c_bboxes, initial_angles), 1)
return dbboxes
def roi2droi(rois):
"""
:param rois: Tensor: shape (n, 5), [batch_ind, x1, y1, x2, y2]
:return: drois: Tensor: shape (n, 6), [batch_ind, x, y, w, h, theta]
"""
hbbs = rois[:, 1:]
obbs = hbb2obb_v2(hbbs)
return torch.cat((rois[:, 0].unsqueeze(1), obbs), 1)
def polygonToRotRectangle_batch(bbox, with_module=True):
"""
:param bbox: The polygon stored in format [x1, y1, x2, y2, x3, y3, x4, y4]
shape [num_boxes, 8]
:return: Rotated Rectangle in format [cx, cy, w, h, theta]
shape [num_rot_recs, 5]
"""
# print('bbox: ', bbox)
bbox = np.array(bbox,dtype=np.float32)
bbox = np.reshape(bbox,newshape=(-1, 2, 4),order='F')
# angle = math.atan2(-(bbox[0,1]-bbox[0,0]),bbox[1,1]-bbox[1,0])
# print('bbox: ', bbox)
angle = np.arctan2(-(bbox[:, 0,1]-bbox[:, 0,0]),bbox[:, 1,1]-bbox[:, 1,0])
# angle = np.arctan2(-(bbox[:, 0,1]-bbox[:, 0,0]),bbox[:, 1,1]-bbox[:, 1,0])
# center = [[0],[0]] ## shape [2, 1]
# print('angle: ', angle)
center = np.zeros((bbox.shape[0], 2, 1))
for i in range(4):
center[:, 0, 0] += bbox[:, 0,i]
center[:, 1, 0] += bbox[:, 1,i]
center = np.array(center,dtype=np.float32)/4.0
# R = np.array([[math.cos(angle), -math.sin(angle)], [math.sin(angle), math.cos(angle)]], dtype=np.float32)
R = np.array([[np.cos(angle), -np.sin(angle)], [np.sin(angle), np.cos(angle)]], dtype=np.float32)
normalized = np.matmul(R.transpose((2, 1, 0)),bbox-center)
xmin = np.min(normalized[:, 0, :], axis=1)
# print('diff: ', (xmin - normalized[:, 0, 3]))
# assert sum((abs(xmin - normalized[:, 0, 3])) > eps) == 0
xmax = np.max(normalized[:, 0, :], axis=1)
# assert sum(abs(xmax - normalized[:, 0, 1]) > eps) == 0
# print('diff2: ', xmax - normalized[:, 0, 1])
ymin = np.min(normalized[:, 1, :], axis=1)
# assert sum(abs(ymin - normalized[:, 1, 3]) > eps) == 0
# print('diff3: ', ymin - normalized[:, 1, 3])
ymax = np.max(normalized[:, 1, :], axis=1)
# assert sum(abs(ymax - normalized[:, 1, 1]) > eps) == 0
# print('diff4: ', ymax - normalized[:, 1, 1])
w = xmax - xmin + 1
h = ymax - ymin + 1
w = w[:, np.newaxis]
h = h[:, np.newaxis]
# TODO: check it
if with_module:
angle = angle[:, np.newaxis] % ( 2 * np.pi)
else:
angle = angle[:, np.newaxis]
dboxes = np.concatenate((center[:, 0].astype(np.float), center[:, 1].astype(np.float), w, h, angle), axis=1)
return dboxes
def RotBox2Polys(dboxes):
"""
:param dboxes: (x_ctr, y_ctr, w, h, angle)
(numboxes, 5)
:return: quadranlges:
(numboxes, 8)
"""
cs = np.cos(dboxes[:, 4])
ss = np.sin(dboxes[:, 4])
w = dboxes[:, 2] - 1
h = dboxes[:, 3] - 1
## change the order to be the initial definition
x_ctr = dboxes[:, 0]
y_ctr = dboxes[:, 1]
x1 = x_ctr + cs * (w / 2.0) - ss * (-h / 2.0)
x2 = x_ctr + cs * (w / 2.0) - ss * (h / 2.0)
x3 = x_ctr + cs * (-w / 2.0) - ss * (h / 2.0)
x4 = x_ctr + cs * (-w / 2.0) - ss * (-h / 2.0)
y1 = y_ctr + ss * (w / 2.0) + cs * (-h / 2.0)
y2 = y_ctr + ss * (w / 2.0) + cs * (h / 2.0)
y3 = y_ctr + ss * (-w / 2.0) + cs * (h / 2.0)
y4 = y_ctr + ss * (-w / 2.0) + cs * (-h / 2.0)
x1 = x1[:, np.newaxis]
y1 = y1[:, np.newaxis]
x2 = x2[:, np.newaxis]
y2 = y2[:, np.newaxis]
x3 = x3[:, np.newaxis]
y3 = y3[:, np.newaxis]
x4 = x4[:, np.newaxis]
y4 = y4[:, np.newaxis]
polys = np.concatenate((x1, y1, x2, y2, x3, y3, x4, y4), axis=1)
return polys
def RotBox2Polys_torch(dboxes):
"""
:param dboxes:
:return:
"""
cs = torch.cos(dboxes[:, 4])
ss = torch.sin(dboxes[:, 4])
w = dboxes[:, 2] - 1
h = dboxes[:, 3] - 1
x_ctr = dboxes[:, 0]
y_ctr = dboxes[:, 1]
x1 = x_ctr + cs * (w / 2.0) - ss * (-h / 2.0)
x2 = x_ctr + cs * (w / 2.0) - ss * (h / 2.0)
x3 = x_ctr + cs * (-w / 2.0) - ss * (h / 2.0)
x4 = x_ctr + cs * (-w / 2.0) - ss * (-h / 2.0)
y1 = y_ctr + ss * (w / 2.0) + cs * (-h / 2.0)
y2 = y_ctr + ss * (w / 2.0) + cs * (h / 2.0)
y3 = y_ctr + ss * (-w / 2.0) + cs * (h / 2.0)
y4 = y_ctr + ss * (-w / 2.0) + cs * (-h / 2.0)
polys = torch.cat((x1.unsqueeze(1),
y1.unsqueeze(1),
x2.unsqueeze(1),
y2.unsqueeze(1),
x3.unsqueeze(1),
y3.unsqueeze(1),
x4.unsqueeze(1),
y4.unsqueeze(1)), 1)
return polys
def poly2bbox(polys):
"""
without label
:param polys: (x1, y1, ..., x4, y4) (n, 8)
:return: boxes: (xmin, ymin, xmax, ymax) (n, 4)
"""
n = polys.shape[0]
xs = np.reshape(polys, (n, 4, 2))[:, :, 0]
ys = np.reshape(polys, (n, 4, 2))[:, :, 1]
xmin = np.min(xs, axis=1)
ymin = np.min(ys, axis=1)
xmax = np.max(xs, axis=1)
ymax = np.max(ys, axis=1)
xmin = xmin[:, np.newaxis]
ymin = ymin[:, np.newaxis]
xmax = xmax[:, np.newaxis]
ymax = ymax[:, np.newaxis]
return np.concatenate((xmin, ymin, xmax, ymax), 1)
def dbbox2roi(dbbox_list):
"""
Convert a list of dbboxes to droi format.
:param dbbox_list: (list[Tensor]): a list of dbboxes corresponding to a batch of images
:return: Tensor: shape (n, 6) [batch_ind, x_ctr, y_ctr, w, h, angle]
"""
drois_list = []
for img_id, dbboxes in enumerate(dbbox_list):
if dbboxes.size(0) > 0:
img_inds = dbboxes.new_full((dbboxes.size(0), 1), img_id)
drois = torch.cat([img_inds, dbboxes[:, :5]], dim=-1)
else:
drois = dbboxes.new_zeros((0, 6))
drois_list.append(drois)
drois = torch.cat(drois_list, 0)
return drois
def droi2dbbox(drois):
dbbox_list = []
img_ids = torch.unique(drois[:, 0].cpu(), sorted=True)
for img_id in img_ids:
inds = (drois[:, 0] == img_id.item())
dbbox = drois[inds, 1:]
dbbox_list.append(dbbox)
return dbbox_list
def dbbox2result(dbboxes, labels, num_classes):
"""
Convert detection results to a list of numpy arrays.
:param dbboxes: (Tensor): shape (n, 9)
:param labels: (Tensor): shape (n, )
:param num_classes: (int), class number, including background class
:return: list (ndarray): dbbox results of each class
"""
# TODO: merge it with bbox2result
if dbboxes.shape[0] == 0:
return [
np.zeros((0, 9), dtype=np.float32) for i in range(num_classes - 1)
]
else:
dbboxes = dbboxes.cpu().numpy()
labels = labels.cpu().numpy()
return [dbboxes[labels == i, :] for i in range(num_classes - 1)]
def distance2bbox(points, distance, max_shape=None):
"""Decode distance prediction to bounding box.
Args:
points (Tensor): Shape (n, 2), [x, y].
distance (Tensor): Distance from the given point to 4
boundaries (left, top, right, bottom).
max_shape (tuple): Shape of the image.
Returns:
Tensor: Decoded bboxes.
"""
x1 = points[:, 0] - distance[:, 0]
y1 = points[:, 1] - distance[:, 1]
x2 = points[:, 0] + distance[:, 2]
y2 = points[:, 1] + distance[:, 3]
if max_shape is not None:
x1 = x1.clamp(min=0, max=max_shape[1] - 1)
y1 = y1.clamp(min=0, max=max_shape[0] - 1)
x2 = x2.clamp(min=0, max=max_shape[1] - 1)
y2 = y2.clamp(min=0, max=max_shape[0] - 1)
return torch.stack([x1, y1, x2, y2], -1)
| 34.87334
| 124
| 0.566943
|
bce7d96c9a7aa747eebfe1c95558baf99a2a6b5d
| 3,326
|
py
|
Python
|
tests/unit/returners/test_smtp_return.py
|
edusperoni/salt
|
c9bfb00c2a81a9d4734fa7d1aa80e893d5ef790b
|
[
"Apache-2.0"
] | 1
|
2017-06-26T18:14:56.000Z
|
2017-06-26T18:14:56.000Z
|
tests/unit/returners/test_smtp_return.py
|
edusperoni/salt
|
c9bfb00c2a81a9d4734fa7d1aa80e893d5ef790b
|
[
"Apache-2.0"
] | 1
|
2015-10-05T22:03:10.000Z
|
2015-10-05T22:03:10.000Z
|
tests/unit/returners/test_smtp_return.py
|
edusperoni/salt
|
c9bfb00c2a81a9d4734fa7d1aa80e893d5ef790b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Mike Place (mp@saltstack.com)`
tests.unit.returners.smtp_return_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
# Import salt libs
import salt.returners.smtp_return as smtp
from salt.utils.jinja import SaltCacheLoader
try:
import gnupg # pylint: disable=unused-import
HAS_GNUPG = True
except ImportError:
HAS_GNUPG = False
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SMTPReturnerTestCase(TestCase, LoaderModuleMockMixin):
'''
Test SMTP returner
'''
def setup_loader_modules(self):
return {smtp: {}}
def _test_returner(self, mocked_smtplib): # pylint: disable=unused-argument
'''
Test to see if the SMTP returner sends a message
'''
ret = {'id': '12345',
'fun': 'mytest.func',
'fun_args': 'myfunc args',
'jid': '54321',
'return': 'The room is on fire as shes fixing her hair'}
options = {'username': '',
'tls': '',
'from': '',
'fields': 'id,fun,fun_args,jid,return',
'to': '',
'host': '',
'renderer': 'jinja|yaml',
'template': '',
'password': '',
'gpgowner': '',
'subject': ''}
with patch('salt.returners.smtp_return._get_options', MagicMock(return_value=options)), \
patch.object(SaltCacheLoader, 'file_client', MagicMock()):
smtp.returner(ret)
self.assertTrue(mocked_smtplib.return_value.sendmail.called)
if HAS_GNUPG:
def test_returner(self):
with patch.dict(smtp.__opts__, {'extension_modules': '',
'renderer': 'jinja|yaml',
'renderer_blacklist': [],
'renderer_whitelist': [],
'file_roots': [],
'pillar_roots': [],
'cachedir': '/'}), \
patch('salt.returners.smtp_return.gnupg'), \
patch('salt.returners.smtp_return.smtplib.SMTP') as mocked_smtplib:
self._test_returner(mocked_smtplib)
else:
def test_returner(self):
with patch.dict(smtp.__opts__, {'extension_modules': '',
'renderer': 'jinja|yaml',
'renderer_blacklist': [],
'renderer_whitelist': [],
'file_roots': [],
'pillar_roots': [],
'cachedir': '/'}), \
patch('salt.returners.smtp_return.smtplib.SMTP') as mocked_smtplib:
self._test_returner(mocked_smtplib)
| 38.229885
| 97
| 0.492183
|
9b964fd9dc0f99e4da561a45971153b1aa80f1eb
| 661
|
py
|
Python
|
tcfcli/cmds/native/generate_event/cli.py
|
tencentyun/scfcli
|
ef15508ad34a851cf0d2750dfaa5202f6a600887
|
[
"Apache-2.0"
] | 103
|
2019-06-11T06:09:56.000Z
|
2021-12-18T22:48:59.000Z
|
tcfcli/cmds/native/generate_event/cli.py
|
TencentCloud/Serverless-cli
|
57f98b24cfd10712770a4806212cfb69d981a11a
|
[
"Apache-2.0"
] | 8
|
2019-07-12T12:08:40.000Z
|
2020-10-20T07:18:17.000Z
|
tcfcli/cmds/native/generate_event/cli.py
|
TencentCloud/Serverless-cli
|
57f98b24cfd10712770a4806212cfb69d981a11a
|
[
"Apache-2.0"
] | 49
|
2019-06-11T06:26:05.000Z
|
2020-02-19T08:13:36.000Z
|
# -*- coding: utf-8 -*-
import click
from tcfcli.cmds.local.generate_event.generate_event_service import GenerateEventService
@click.command(name="generate-event", cls=GenerateEventService, short_help="Simulate a service generation event.")
def generate_event():
"""
\b
Simulate a service generation event.
\b
Common usage:
\b
* Generate a cmq event
\b
$ scf native generate-event cmq notification --owner 19911112
\b
* Use the | to send to invoke
\b
$ scf native generate-event cmq notification --owner 19911112 | scf local invoke -t template.yaml
"""
pass
| 27.541667
| 114
| 0.647504
|
448093d395bfbfece5527a5ae64e86454f7f65cd
| 1,031
|
py
|
Python
|
main.py
|
jpstroop/issue-reporter
|
974e3f2534c15b8674c80cdca2e39dcab81a555b
|
[
"BSD-2-Clause"
] | null | null | null |
main.py
|
jpstroop/issue-reporter
|
974e3f2534c15b8674c80cdca2e39dcab81a555b
|
[
"BSD-2-Clause"
] | 4
|
2019-10-28T19:23:00.000Z
|
2021-04-30T21:42:14.000Z
|
main.py
|
jpstroop/issue-reporter
|
974e3f2534c15b8674c80cdca2e39dcab81a555b
|
[
"BSD-2-Clause"
] | null | null | null |
from github_reporter import timestamp
from github_reporter.app_setup import load_config, load_secrets
from github_reporter.github_reporter import GithubReporter
from sys import stderr
def main(event={}, context={}):
try:
is_google_run = event.get("is_google_run", True)
dump_to_stdout = event.get("dump_to_stdout", False)
secrets = load_secrets()
config = load_config()
if is_google_run:
print(f"{timestamp()} - Google called me")
if dump_to_stdout:
print(f"{timestamp()} - Local run, will dump JSON to stdout")
gh_reporter = GithubReporter(secrets, config, dump_to_stdout)
commit_success = gh_reporter.run_report()
print(f"{timestamp()} - Commit success: {commit_success}")
return True # Cloud Functions must return something
except Exception as e:
print(f"{timestamp()} - {e}", file=stderr)
return False
if __name__ == "__main__":
main(event={"is_google_run": False, "dump_to_stdout": True})
| 36.821429
| 73
| 0.673133
|
0e4a7b68bc112675b898fdce3dc63184655446a2
| 2,271
|
py
|
Python
|
python_app/tenhou-bot/main.py
|
0xsuu/Project-Mahjong
|
e82edc67651ff93c8ec158b590cd728f28504be9
|
[
"Apache-2.0"
] | 9
|
2018-06-08T00:09:08.000Z
|
2021-11-17T11:05:11.000Z
|
python_app/tenhou-bot/main.py
|
0xsuu/Project-Mahjong
|
e82edc67651ff93c8ec158b590cd728f28504be9
|
[
"Apache-2.0"
] | 1
|
2020-04-25T12:43:26.000Z
|
2020-04-25T12:43:26.000Z
|
python_app/tenhou-bot/main.py
|
0xsuu/Project-Mahjong
|
e82edc67651ff93c8ec158b590cd728f28504be9
|
[
"Apache-2.0"
] | 2
|
2019-05-30T07:18:45.000Z
|
2019-11-05T09:15:13.000Z
|
# -*- coding: utf-8 -*-
"""
Endpoint to run bot. It will play a game on tenhou.net
"""
from optparse import OptionParser
from tenhou.main import connect_and_play
from utils.logger import set_up_logging
from utils.settings_handler import settings
from mahjong.myAI.slcnn_player import SLCNNPlayer
def parse_args_and_set_up_settings():
parser = OptionParser()
parser.add_option('-u', '--user_id',
type='string',
default=settings.USER_ID,
help='Tenhou\'s user id. Example: IDXXXXXXXX-XXXXXXXX. Default is {0}'.format(settings.USER_ID))
parser.add_option('-g', '--game_type',
type='string',
default=settings.GAME_TYPE,
help='The game type in Tenhou.net. Examples: 1 or 9. Default is {0}'.format(settings.GAME_TYPE))
parser.add_option('-l', '--lobby',
type='string',
default=settings.LOBBY,
help='Lobby to play. Default is {0}'.format(settings.LOBBY))
parser.add_option('-t', '--timeout',
type='int',
default=settings.WAITING_GAME_TIMEOUT_MINUTES,
help='How much minutes bot will looking for a game. '
'If game is not started in timeout, script will be ended. '
'Default is {0}'.format(settings.WAITING_GAME_TIMEOUT_MINUTES))
parser.add_option('-c', '--championship',
type='string',
help='Tournament lobby to play.')
parser.add_option('-d', '--disable_ai',
action='store_false',
default=settings.ENABLE_AI,
help='Enable AI')
opts, _ = parser.parse_args()
settings.USER_ID = opts.user_id
settings.GAME_TYPE = opts.game_type
settings.LOBBY = opts.lobby
settings.WAITING_GAME_TIMEOUT_MINUTES = opts.timeout
settings.ENABLE_AI = opts.disable_ai
if opts.championship:
settings.IS_TOURNAMENT = True
settings.LOBBY = opts.championship
def main():
parse_args_and_set_up_settings()
set_up_logging()
connect_and_play()
if __name__ == '__main__':
main()
| 32.913043
| 118
| 0.587847
|
ceb3e1f51ed76dc34f4fc959fe56fccd6321e5cf
| 12,357
|
py
|
Python
|
Packs/RemoteAccess/Integrations/RemoteAccessv2/RemoteAccessv2.py
|
mazmat-panw/content
|
024a65c1dea2548e2637a9cbbe54966e9e34a722
|
[
"MIT"
] | 2
|
2021-12-06T21:38:24.000Z
|
2022-01-13T08:23:36.000Z
|
Packs/RemoteAccess/Integrations/RemoteAccessv2/RemoteAccessv2.py
|
mazmat-panw/content
|
024a65c1dea2548e2637a9cbbe54966e9e34a722
|
[
"MIT"
] | 87
|
2022-02-23T12:10:53.000Z
|
2022-03-31T11:29:05.000Z
|
Packs/RemoteAccess/Integrations/RemoteAccessv2/RemoteAccessv2.py
|
henry-sue-pa/content
|
043c6badfb4f9c80673cad9242fdea72efe301f7
|
[
"MIT"
] | 2
|
2022-01-05T15:27:01.000Z
|
2022-02-01T19:27:43.000Z
|
import tempfile
from io import StringIO
import paramiko
from paramiko import SSHClient, AutoAddPolicy, transport, Transport
from paramiko.ssh_exception import NoValidConnectionsError
from scp import SCPClient, SCPException
from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import
from CommonServerUserPython import * # noqa
# Disable insecure warnings
requests.packages.urllib3.disable_warnings() # pylint: disable=no-member
DEFAULT_TIMEOUT = 10.0
''' HELPER FUNCTIONS '''
def perform_copy_command(ssh_client: SSHClient, file_path: str, destination_path: str, copy_to_remote: bool,
socket_timeout: float) -> Union[str, bytes]:
"""
Function to perform copy to or copy from remote machine.
This helper function was separated from command functions mainly for easier mocking in tests.
Args:
ssh_client (SSHClient): SSH client to perform copy from or to.
file_path (str):
- Copy to remote machine from Cortex XSOAR - the Cortex XSOAR file path.
- Copy to Cortex XSOAR from remote machine - the remote machine file path.
destination_path (str):
- Copy to remote machine from Cortex XSOAR - the remote machine file path to contain the copied data.
- Copy to Cortex XSOAR from remote machine - Temp file name to save the file, before extracting its data.
copy_to_remote (bool): Whether a request to copy to remote was made.
socket_timeout(float): Socket timeout.
Returns:
(str): Empty str if command requested was copy to.
(str): Str containing the copied file from remote machine data.
Raises:
(Exception): if unexpected behaviour occurred.
"""
try:
with SCPClient(ssh_client.get_transport(), socket_timeout=socket_timeout) as scp_client:
if copy_to_remote:
scp_client.put(file_path, destination_path)
return ''
else:
with tempfile.TemporaryDirectory() as temp_dir:
scp_client.get(file_path, f'{temp_dir}/{destination_path}')
with open(f'{temp_dir}/{destination_path}', 'rb') as f:
return f.read()
except (FileNotFoundError, SCPException) as e:
if 'No such file or directory' in str(e):
raise DemistoException(f'Could not find the given path {file_path} in the local machine.\n'
'Please verify the path is correct.') from e
if 'Not a directory' in str(e):
raise DemistoException(f'Given destination path: {destination_path} does not exist in remote machine.\n'
'Please verify destination path is valid.') from e
if 'No such file or directory' in str(e):
raise DemistoException(f'Could not find the given path {file_path} in the remote machine.\n'
'Please verify the path is correct.') from e
raise e
def get_available_ciphers() -> Set[str]:
"""
Gets a set of the available ciphers supported by server.
Returns:
(Set[str]): Set of supported ciphers.
"""
tmp_socket = socket.socket()
opts = transport.Transport(tmp_socket).get_security_options()
tmp_socket.close()
return set(opts.ciphers)
def get_available_key_algorithms() -> Set[str]:
"""
Gets a set of the available key algorithms supported by server.
Returns:
(Set[str]): Set of supported ciphers.
"""
tmp_socket = socket.socket()
opts = transport.Transport(tmp_socket).get_security_options()
tmp_socket.close()
return set(opts.kex)
def create_paramiko_ssh_client(
host_name: str, user_name: str, password: str, ciphers: Set[str], key_algorithms: Set[str], private_key: str = ''
) -> SSHClient:
"""
Creates the Paramiko SSH client.
Args:
host_name (str): Hostname of the machine to create the SSH for.
user_name (str): User to create the SSH session with the given host.
password (str): Password of the given user.
ciphers (Set[str]): Set of ciphers to be used, if given.
key_algorithms (Set[str]): Set of key algorithms to be used, if given.
private_key (str): The SSH certificate (should be PEM file based certificate only).
Returns:
(SSHClient): Paramiko SSH client if connection was successful, exception otherwise.
"""
if ciphers:
# Getting available ciphers from server, in order to print an appropriate error message upon no cipher match.
available_ciphers = get_available_ciphers()
if not ciphers.intersection(available_ciphers):
raise DemistoException(f'Given ciphers are not available in server.\n'
f'Ciphers available in server are: {available_ciphers}')
Transport._preferred_ciphers = (*ciphers,) # type: ignore
if key_algorithms:
available_key_args = get_available_key_algorithms()
if not key_algorithms.intersection(available_key_args):
raise DemistoException(f'Given key algorithms are not available in server.\n'
f'Key algorithms available in server are: {available_key_args}')
Transport._preferred_kex = (*key_algorithms,) # type: ignore
client = SSHClient()
client.set_missing_host_key_policy(AutoAddPolicy())
try:
rsa_private_key = None
if private_key:
# authenticating with private key only works for certificates which are based on PEM files.
# (RSA private keys)
rsa_private_key = paramiko.RSAKey.from_private_key(StringIO(private_key)) # type: ignore # [assignment]
client.connect(hostname=host_name, username=user_name, password=password, port=22, pkey=rsa_private_key)
except NoValidConnectionsError as e:
raise DemistoException(f'Unable to connect to port 22 on {host_name}') from e
return client
''' COMMAND FUNCTIONS '''
def execute_shell_command(ssh_client: SSHClient, args: Dict[str, Any]) -> CommandResults:
"""
Executes shell command.
Args:
ssh_client (SSHClient): SSH client to perform the command with.
args (Dict[str, Any]): Cortex XSOAR arguments.
Returns:
(CommandResults).
"""
command: str = args.get('cmd', '')
timeout: Optional[int] = arg_to_number(args.get('timeout'))
# exec_command returns a tuple of stdin, stdout, stderr. No need to parse stdin because it does not contain data.
_, stdout, std_err = ssh_client.exec_command(command, timeout=timeout)
stdout_str: str = stdout.read().decode()
std_error_str: str = std_err.read().decode()
if stdout_str or std_error_str:
outputs: Optional[List[Dict]] = [{
'output': stdout_str,
'error': std_error_str,
'command': command,
'success': not std_error_str
}]
readable_output = tableToMarkdown(f'Command {command} Outputs', outputs, removeNull=True)
else:
outputs = None
readable_output = f'### Command {command} was executed successfully without any outputs.'
return CommandResults(
outputs_prefix='RemoteAccess.Command',
outputs=outputs,
readable_output=readable_output
)
def copy_to_command(ssh_client: SSHClient, args: Dict[str, Any]) -> CommandResults:
"""
Executes a safe copy from Cortex XSOAR to remote machine.
Args:
ssh_client (SSHClient): SSH client to perform the command with.
args (Dict[str, Any]): Cortex XSOAR arguments.
Returns:
(CommandResults).
"""
dest_dir_arg = args.get('dest-dir', '')
destination_path_arg = args.get('destination_path', '')
if dest_dir_arg and destination_path_arg:
raise DemistoException('Please provide at most one of "dest-dir" argument or "destination_path", not both.')
# Support `entry` argument to maintain BC:
entry: str = args.get('entry', '')
entry_id: str = args.get('entry_id', entry)
if not entry_id:
raise DemistoException('No entry ID path given. Please provide one of the "entry_id" (recommended) or "entry" inputs.')
if timeout := args.get('timeout'):
timeout = float(timeout)
else:
timeout = DEFAULT_TIMEOUT
file_path_data = demisto.getFilePath(entry_id)
if not (file_path := file_path_data.get('path', '')):
raise DemistoException('Could not find given entry ID path. Please assure given entry ID is correct.')
file_name = file_path_data.get('name', '')
if dest_dir_arg:
destination_path = os.path.join(dest_dir_arg, file_name)
destination_dir = dest_dir_arg
elif destination_path_arg:
destination_path = destination_path_arg
destination_dir = os.path.split(destination_path)[0]
else:
destination_path = file_name
destination_dir = ''
# Create all folders to destination_path in the remote machine
if destination_dir:
execute_shell_command(ssh_client, args={'cmd': f'mkdir -p {destination_dir}'})
perform_copy_command(ssh_client, file_path, destination_path, copy_to_remote=True, socket_timeout=timeout)
return CommandResults(readable_output=f'### The file corresponding to entry ID: {entry_id} was copied to remote'
' host.')
def copy_from_command(ssh_client: SSHClient, args: Dict[str, Any]) -> Dict:
"""
Executes a safe copy from remote machine to Cortex XSOAR machine.
Args:
ssh_client (SSHClient): SSH client to perform the command with.
args (Dict[str, Any]): Cortex XSOAR arguments.
Returns:
(Dict): FileResult data.
"""
if timeout := args.get('timeout'):
timeout = float(timeout)
else:
timeout = DEFAULT_TIMEOUT
# Support `file` argument to maintain BC:
file: str = args.get('file', '')
file_path: str = args.get('file_path', file)
file_name: str = args.get('file_name', os.path.basename(file_path))
remote_file_data = perform_copy_command(ssh_client, file_path, file_name, copy_to_remote=False,
socket_timeout=timeout)
return fileResult(file_name, remote_file_data)
''' MAIN FUNCTION '''
def main() -> None:
params = demisto.params()
args = demisto.args()
command = demisto.command()
credentials: Dict[str, Any] = params.get('credentials') or {}
user: str = credentials.get('identifier', '')
password: str = credentials.get('password', '')
certificate: str = (credentials.get('credentials') or {}).get('sshkey', '')
host_name: str = params.get('hostname', '')
ciphers: Set[str] = set(argToList(params.get('ciphers')))
key_algorithms: Set[str] = set(argToList(params.get('key_algorithms')))
demisto.debug(f'Command being called is {demisto.command()}')
if password_param := params.get('additional_password', {}).get('password'):
if command != 'test-module' and password_param != args.get('additional_password'):
raise DemistoException('Additional password to use the module have been supplied.\n'
'Please supply "additional_password" argument that matches the "Additional Password"'
' parameter value.')
client = None
try:
client = create_paramiko_ssh_client(host_name, user, password, ciphers, key_algorithms, certificate)
if command == 'test-module':
return_results('ok')
elif command == 'ssh':
return_results(execute_shell_command(client, args))
elif command == 'copy-to':
return_results(copy_to_command(client, args))
elif command == 'copy-from':
return_results(copy_from_command(client, args))
else:
raise NotImplementedError(f'''Command '{command}' is not implemented.''')
client.close()
# Log exceptions and return errors
except Exception as e:
if client:
client.close()
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| 42.463918
| 127
| 0.658655
|
27b1d1c830175c294f891ea1bfb100948decab4c
| 5,701
|
py
|
Python
|
bindsnet_master/bindsnet/conversion/nodes.py
|
Singular-Brain/ProjectBrain
|
2d22d45c13a86825c0dcaf517a59e02f2c4f6164
|
[
"MIT"
] | 6
|
2021-06-01T03:43:35.000Z
|
2022-02-11T10:41:06.000Z
|
bindsnet_master/bindsnet/conversion/nodes.py
|
Singular-Brain/ProjectBrain
|
2d22d45c13a86825c0dcaf517a59e02f2c4f6164
|
[
"MIT"
] | 1
|
2022-03-31T03:22:14.000Z
|
2022-03-31T03:22:14.000Z
|
bindsnet_master/bindsnet/conversion/nodes.py
|
Singular-Brain/ProjectBrain
|
2d22d45c13a86825c0dcaf517a59e02f2c4f6164
|
[
"MIT"
] | 3
|
2021-10-30T02:30:40.000Z
|
2021-11-16T04:23:12.000Z
|
from typing import Optional, Iterable, Union
import torch
from bindsnet.network import nodes
class SubtractiveResetIFNodes(nodes.Nodes):
# language=rst
"""
Layer of `integrate-and-fire (IF) neurons <https://bit.ly/2EOk6YN>` using
reset by subtraction.
"""
def __init__(
self,
n: Optional[int] = None,
shape: Optional[Iterable[int]] = None,
traces: bool = False,
traces_additive: bool = False,
tc_trace: Union[float, torch.Tensor] = 20.0,
trace_scale: Union[float, torch.Tensor] = 1.0,
sum_input: bool = False,
thresh: Union[float, torch.Tensor] = -52.0,
reset: Union[float, torch.Tensor] = -65.0,
refrac: Union[int, torch.Tensor] = 5,
lbound: float = None,
**kwargs,
) -> None:
# language=rst
"""
Instantiates a layer of IF neurons with the subtractive reset mechanism
from `this paper <https://bit.ly/2ShuwrQ>`_.
:param n: The number of neurons in the layer.
:param shape: The dimensionality of the layer.
:param traces: Whether to record spike traces.
:param traces_additive: Whether to record spike traces additively.
:param tc_trace: Time constant of spike trace decay.
:param trace_scale: Scaling factor for spike trace.
:param sum_input: Whether to sum all inputs.
:param thresh: Spike threshold voltage.
:param reset: Post-spike reset voltage.
:param refrac: Refractory (non-firing) period of the neuron.
:param lbound: Lower bound of the voltage.
"""
super().__init__(
n=n,
shape=shape,
traces=traces,
traces_additive=traces_additive,
tc_trace=tc_trace,
trace_scale=trace_scale,
sum_input=sum_input,
)
self.register_buffer(
"reset", torch.tensor(reset, dtype=torch.float)
) # Post-spike reset voltage.
self.register_buffer(
"thresh", torch.tensor(thresh, dtype=torch.float)
) # Spike threshold voltage.
self.register_buffer(
"refrac", torch.tensor(refrac)
) # Post-spike refractory period.
self.register_buffer("v", torch.FloatTensor()) # Neuron voltages.
self.register_buffer(
"refrac_count", torch.FloatTensor()
) # Refractory period counters.
self.lbound = lbound # Lower bound of voltage.
def forward(self, x: torch.Tensor) -> None:
# language=rst
"""
Runs a single simulation step.
:param x: Inputs to the layer.
"""
# Integrate input voltages.
self.v += (self.refrac_count == 0).float() * x
# Decrement refractory counters.
self.refrac_count = (self.refrac_count > 0).float() * (
self.refrac_count - self.dt
)
# Check for spiking neurons.
self.s = self.v >= self.thresh
# Refractoriness and voltage reset.
self.refrac_count.masked_fill_(self.s, self.refrac)
self.v[self.s] = self.v[self.s] - self.thresh
# Voltage clipping to lower bound.
if self.lbound is not None:
self.v.masked_fill_(self.v < self.lbound, self.lbound)
super().forward(x)
def reset_state_variables(self) -> None:
# language=rst
"""
Resets relevant state variables.
"""
super().reset_state_variables()
self.v.fill_(self.reset) # Neuron voltages.
self.refrac_count.zero_() # Refractory period counters.
def set_batch_size(self, batch_size) -> None:
# language=rst
"""
Sets mini-batch size. Called when layer is added to a network.
:param batch_size: Mini-batch size.
"""
super().set_batch_size(batch_size=batch_size)
self.v = self.reset * torch.ones(batch_size, *self.shape, device=self.v.device)
self.refrac_count = torch.zeros_like(self.v, device=self.refrac_count.device)
class PassThroughNodes(nodes.Nodes):
# language=rst
"""
Layer of `integrate-and-fire (IF) neurons
<http://neuronaldynamics.epfl.ch/online/Ch1.S3.html>`_ with using reset by
subtraction.
"""
def __init__(
self,
n: Optional[int] = None,
shape: Optional[Iterable[int]] = None,
traces: bool = False,
traces_additive: bool = False,
tc_trace: Union[float, torch.Tensor] = 20.0,
trace_scale: Union[float, torch.Tensor] = 1.0,
sum_input: bool = False,
) -> None:
# language=rst
"""
Instantiates a layer of IF neurons.
:param n: The number of neurons in the layer.
:param shape: The dimensionality of the layer.
:param traces: Whether to record spike traces.
:param trace_tc: Time constant of spike trace decay.
:param sum_input: Whether to sum all inputs.
"""
super().__init__(
n=n,
shape=shape,
traces=traces,
traces_additive=traces_additive,
tc_trace=tc_trace,
trace_scale=trace_scale,
sum_input=sum_input,
)
self.register_buffer("v", torch.zeros(self.shape))
def forward(self, x: torch.Tensor) -> None:
# language=rst
"""
Runs a single simulation step.
:param inputs: Inputs to the layer.
:param dt: Simulation time step.
"""
self.s = x
def reset_state_variables(self) -> None:
# language=rst
"""
Resets relevant state variables.
"""
self.s.zero_()
| 32.20904
| 87
| 0.591651
|
a64bfdc4c8b28d9a26f518d1dc1545bb7a66f797
| 3,770
|
py
|
Python
|
pynsor/sensors/diskstats.py
|
dunkelstern/pynsor
|
4fc528ccf59cb1fd8ca69ab40fcd41241b285676
|
[
"BSD-3-Clause"
] | 2
|
2021-05-02T06:28:32.000Z
|
2021-05-14T15:44:40.000Z
|
pynsor/sensors/diskstats.py
|
dunkelstern/pynsor
|
4fc528ccf59cb1fd8ca69ab40fcd41241b285676
|
[
"BSD-3-Clause"
] | 2
|
2021-04-26T18:26:20.000Z
|
2021-04-29T23:50:06.000Z
|
pynsor/sensors/diskstats.py
|
dunkelstern/pynsor
|
4fc528ccf59cb1fd8ca69ab40fcd41241b285676
|
[
"BSD-3-Clause"
] | null | null | null |
from typing import Optional, Dict, Any, List
from datetime import datetime
from .sensor import Sensor
from pynsor.postgres import Connection
class DiskStats(Sensor):
def init(self, config: Dict[str, Any]) -> None:
super().init(config)
def create_datamodel(self, connection: Connection) -> None:
connection.create_table(
'diskstats',
[
{"name": "disk", "type": "TEXT", "null": "NOT NULL"},
{"name": "reads_completed", "type": "BIGINT", "null": "NULL"},
{"name": "reads_merged", "type": "BIGINT", "null": "NULL"},
{"name": "sectors_read", "type": "BIGINT", "null": "NULL"},
{"name": "millis_reading", "type": "BIGINT", "null": "NULL"},
{"name": "writes_completed", "type": "BIGINT", "null": "NULL"},
{"name": "writes_merged", "type": "BIGINT", "null": "NULL"},
{"name": "sectors_written", "type": "BIGINT", "null": "NULL"},
{"name": "millis_writing", "type": "BIGINT", "null": "NULL"},
{"name": "io_in_progress", "type": "BIGINT", "null": "NULL"},
{"name": "millis_io", "type": "BIGINT", "null": "NULL"},
{"name": "weighted_millis_io", "type": "BIGINT", "null": "NULL"},
{"name": "discards_completed", "type": "BIGINT", "null": "NULL"},
{"name": "discards_merged", "type": "BIGINT", "null": "NULL"},
{"name": "sectors_discarded", "type": "BIGINT", "null": "NULL"},
{"name": "millis_discarding", "type": "BIGINT", "null": "NULL"}
]
)
connection.create_index('diskstats', 'disk')
def gather(self, timestamp: datetime):
try:
with open('/proc/diskstats', 'r') as fp:
self.raw_data.append({
"time": timestamp,
"data": fp.read()
})
except FileNotFoundError:
pass
def data(self) -> Optional[List[Dict[str, Any]]]:
if self.raw_data is None:
return None
result = []
for item in self.raw_data:
data = {}
for line in item['data'].splitlines():
input = line.split()
data[input[2]] = {
"time": item['time'],
"disk": input[2],
"reads_completed": int(input[ 3]),
"reads_merged": int(input[ 4]),
"sectors_read": int(input[ 5]),
"millis_reading": int(input[ 6]),
"writes_completed": int(input[ 7]),
"writes_merged": int(input[ 8]),
"sectors_written": int(input[ 9]),
"millis_writing": int(input[10]),
"io_in_progress": int(input[11]),
"millis_io": int(input[12]),
"weighted_millis_io": int(input[13]),
"discards_completed": int(input[14]),
"discards_merged": int(input[15]),
"sectors_discarded": int(input[16]),
"millis_discarding": int(input[17]),
}
result.append(data)
return result
def save(self, connection: Connection) -> None:
data = self.data()
if data is None:
print("ERROR: Could not read sensordata from /proc/diskstats!")
return
for item in data:
for measurement in item.values():
connection.insert('diskstats', measurement)
self.raw_data = []
Sensor.register(DiskStats)
| 41.428571
| 81
| 0.472149
|
7865d1f8a448f4e483793c37a7963781a3e4b27b
| 15,947
|
py
|
Python
|
tests/integration/test_modelldcatnofactory.py
|
Informasjonsforvaltning/jsonschematordf
|
dfeb039411b5a9797ad3b7769e0dd3489abc5502
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_modelldcatnofactory.py
|
Informasjonsforvaltning/jsonschematordf
|
dfeb039411b5a9797ad3b7769e0dd3489abc5502
|
[
"Apache-2.0"
] | 24
|
2021-08-19T08:33:39.000Z
|
2021-10-06T07:43:28.000Z
|
tests/integration/test_modelldcatnofactory.py
|
Informasjonsforvaltning/jsonschematordf
|
dfeb039411b5a9797ad3b7769e0dd3489abc5502
|
[
"Apache-2.0"
] | null | null | null |
"""Pytests."""
# flake8: noqa
import pytest
from pytest_mock.plugin import MockerFixture
from rdflib.graph import Graph
from jsonschematordf.modelldcatnofactory import create_model_element
from jsonschematordf.schema import Schema
from jsonschematordf.utils import add_elements_to_graph
from tests.testutils import assert_isomorphic, mock_uri_generator
BASE_URI = "http://uri.com"
@pytest.mark.integration
def test_object_type_attribute_property(mocker: MockerFixture) -> None:
"""Test object attribute creation."""
in_dict = {
"Eiendom": {
"properties": {
"erstatter": {"type": "string"},
"eiendomAddress": {"$ref": "#/Address"},
}
},
"Address": {"type": "string"},
}
schema = Schema(BASE_URI, in_dict)
components = schema.get_components_by_path("#/Eiendom")
mocker.patch(
"skolemizer.Skolemizer.add_skolemization",
side_effect=mock_uri_generator(BASE_URI),
)
modelldcatno_representation = create_model_element(components[0], schema)
assert modelldcatno_representation is not None
expected = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://uri.com/#Eiendom> a modelldcatno:ObjectType ;
dct:title "Eiendom" ;
modelldcatno:hasProperty <http://uri.com/Eiendom#eiendomAddress>,
<http://uri.com/Eiendom#erstatter> .
<http://uri.com/#Address> a modelldcatno:SimpleType ;
dct:title "Address" ;
modelldcatno:hasProperty <http://uri.com/mock_uri_1> .
<http://uri.com/Eiendom#eiendomAddress> a modelldcatno:Attribute ;
dct:title "eiendomAddress" ;
xsd:maxOccurs "1"^^xsd:nonNegativeInteger ;
modelldcatno:hasSimpleType <http://uri.com/#Address> .
<http://uri.com/Eiendom#erstatter> a modelldcatno:Attribute ;
dct:title "erstatter" ;
xsd:maxOccurs "1"^^xsd:nonNegativeInteger ;
modelldcatno:hasSimpleType <http://uri.com/#string> .
<http://uri.com/mock_uri_1> a modelldcatno:Specialization ;
modelldcatno:hasGeneralConcept <http://uri.com/#string> .
<http://uri.com/#string> a modelldcatno:SimpleType ;
dct:title "string" ;
modelldcatno:typeDefinitionReference <https://www.w3.org/2019/wot/json-schema#stringschema> .
"""
g1 = Graph().parse(data=expected, format="turtle")
g2 = Graph().parse(data=modelldcatno_representation.to_rdf(), format="turtle")
assert_isomorphic(g1, g2)
@pytest.mark.integration
def test_object_type_role_property() -> None:
"""Test object role creation."""
in_dict = {
"EiendomResultat": {
"properties": {
"data": {"$ref": "#/Eiendom"},
"address": {"type": "object"},
},
"required": ["data"],
"type": "object",
},
"Eiendom": {"type": "object"},
}
schema = Schema(BASE_URI, in_dict)
components = schema.get_components_by_path("#/EiendomResultat")
modelldcatno_representation = create_model_element(components[0], schema)
assert modelldcatno_representation is not None
expected = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://uri.com/#EiendomResultat> a modelldcatno:ObjectType ;
dct:title "EiendomResultat" ;
modelldcatno:hasProperty <http://uri.com/EiendomResultat#address>,
<http://uri.com/EiendomResultat#data> .
<http://uri.com/#Eiendom> a modelldcatno:ObjectType ;
dct:title "Eiendom" .
<http://uri.com/EiendomResultat#address> a modelldcatno:Role ;
dct:title "address" ;
xsd:maxOccurs "1"^^xsd:nonNegativeInteger ;
modelldcatno:hasObjectType <http://uri.com/EiendomResultat/address#address> .
<http://uri.com/EiendomResultat#data> a modelldcatno:Role ;
dct:title "data" ;
xsd:maxOccurs "1"^^xsd:nonNegativeInteger ;
xsd:minOccurs 1 ;
modelldcatno:hasObjectType <http://uri.com/#Eiendom> .
<http://uri.com/EiendomResultat/address#address> a modelldcatno:ObjectType ;
dct:title "address" .
"""
g1 = Graph().parse(data=expected, format="turtle")
g2 = Graph().parse(data=modelldcatno_representation.to_rdf(), format="turtle")
assert_isomorphic(g1, g2)
@pytest.mark.integration
def test_object_array_property() -> None:
"""Test object array creation."""
in_dict = {
"KommuneResultat": {
"type": "object",
"properties": {
"erstatter": {"items": {"$ref": "#/Kommune"}, "type": "array"}
},
},
"Kommune": {"type": "object"},
}
schema = Schema(BASE_URI, in_dict)
components = schema.get_components_by_path("#/KommuneResultat")
modelldcatno_representation = create_model_element(components[0], schema)
assert modelldcatno_representation is not None
expected = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://uri.com/#KommuneResultat> a modelldcatno:ObjectType ;
dct:title "KommuneResultat" ;
modelldcatno:hasProperty <http://uri.com/KommuneResultat#erstatter> .
<http://uri.com/#Kommune> a modelldcatno:ObjectType ;
dct:title "Kommune" .
<http://uri.com/KommuneResultat#erstatter> a modelldcatno:Role ;
dct:title "erstatter" ;
xsd:maxOccurs "*" ;
modelldcatno:hasObjectType <http://uri.com/#Kommune> .
"""
g1 = Graph().parse(data=expected, format="turtle")
g2 = Graph().parse(data=modelldcatno_representation.to_rdf(), format="turtle")
assert_isomorphic(g1, g2)
@pytest.mark.integration
def test_simple_type_array_property(mocker: MockerFixture) -> None:
"""Test simple type array creation."""
in_dict = {
"KommuneResultat": {
"type": "object",
"properties": {
"erstatter": {"items": {"$ref": "#/Kommune"}, "type": "array"}
},
},
"Kommune": {"type": "string"},
}
schema = Schema(BASE_URI, in_dict)
components = schema.get_components_by_path("#/KommuneResultat")
mocker.patch(
"skolemizer.Skolemizer.add_skolemization",
side_effect=mock_uri_generator(BASE_URI),
)
modelldcatno_representation = create_model_element(components[0], schema)
assert modelldcatno_representation is not None
expected = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://uri.com/#KommuneResultat> a modelldcatno:ObjectType ;
dct:title "KommuneResultat" ;
modelldcatno:hasProperty <http://uri.com/KommuneResultat#erstatter> .
<http://uri.com/#Kommune> a modelldcatno:SimpleType ;
dct:title "Kommune" ;
modelldcatno:hasProperty <http://uri.com/mock_uri_0> .
<http://uri.com/#string> a modelldcatno:SimpleType ;
dct:title "string" ;
modelldcatno:typeDefinitionReference <https://www.w3.org/2019/wot/json-schema#stringschema> .
<http://uri.com/KommuneResultat#erstatter> a modelldcatno:Attribute ;
dct:title "erstatter" ;
xsd:maxOccurs "*" ;
modelldcatno:hasSimpleType <http://uri.com/#Kommune> .
<http://uri.com/mock_uri_0> a modelldcatno:Specialization ;
modelldcatno:hasGeneralConcept <http://uri.com/#string> .
"""
g1 = Graph().parse(data=expected, format="turtle")
g2 = Graph().parse(data=modelldcatno_representation.to_rdf(), format="turtle")
assert_isomorphic(g1, g2)
@pytest.mark.integration
def test_nested_objects() -> None:
"""Test handling of nested objects."""
in_dict = {
"KommuneResultat": {
"properties": {
"code": {"format": "int32", "type": "integer"},
"data": {
"required": ["kommune"],
"type": "object",
"properties": {
"erstatter": {"items": {"$ref": "#/Kommune"}, "type": "array"},
"erstattetav": {
"items": {"$ref": "#/Kommune"},
"type": "array",
},
"kommune": {"$ref": "#/Kommune", "type": "object"},
},
},
},
"required": ["code", "data"],
"type": "object",
},
"Kommune": {"type": "object"},
}
schema = Schema(BASE_URI, in_dict)
components = schema.get_components_by_path("#/KommuneResultat")
modelldcatno_representation = create_model_element(components[0], schema)
assert modelldcatno_representation is not None
expected = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://uri.com/#KommuneResultat> a modelldcatno:ObjectType ;
dct:title "KommuneResultat" ;
modelldcatno:hasProperty <http://uri.com/KommuneResultat#code>,
<http://uri.com/KommuneResultat#data> .
<http://uri.com/#int32> a modelldcatno:SimpleType ;
dct:title "int32" ;
modelldcatno:typeDefinitionReference <https://www.w3.org/2019/wot/json-schema#integerschema> .
<http://uri.com/KommuneResultat#code> a modelldcatno:Attribute ;
dct:title "code" ;
xsd:maxOccurs "1"^^xsd:nonNegativeInteger ;
xsd:minOccurs 1 ;
modelldcatno:hasSimpleType <http://uri.com/#int32> .
<http://uri.com/KommuneResultat#data> a modelldcatno:Role ;
dct:title "data" ;
xsd:maxOccurs "1"^^xsd:nonNegativeInteger ;
xsd:minOccurs 1 ;
modelldcatno:hasObjectType <http://uri.com/KommuneResultat/data#data> .
<http://uri.com/KommuneResultat/data#data> a modelldcatno:ObjectType ;
dct:title "data" ;
modelldcatno:hasProperty <http://uri.com/KommuneResultat/data#erstatter>,
<http://uri.com/KommuneResultat/data#erstattetav>,
<http://uri.com/KommuneResultat/data#kommune> .
<http://uri.com/KommuneResultat/data#erstatter> a modelldcatno:Role ;
dct:title "erstatter" ;
xsd:maxOccurs "*" ;
modelldcatno:hasObjectType <http://uri.com/#Kommune> .
<http://uri.com/KommuneResultat/data#erstattetav> a modelldcatno:Role ;
dct:title "erstattetav" ;
xsd:maxOccurs "*" ;
modelldcatno:hasObjectType <http://uri.com/#Kommune> .
<http://uri.com/KommuneResultat/data#kommune> a modelldcatno:Role ;
dct:title "kommune" ;
xsd:maxOccurs "1"^^xsd:nonNegativeInteger ;
xsd:minOccurs 1 ;
modelldcatno:hasObjectType <http://uri.com/#Kommune> .
<http://uri.com/#Kommune> a modelldcatno:ObjectType ;
dct:title "Kommune" .
"""
g1 = Graph().parse(data=expected, format="turtle")
g2 = Graph().parse(data=modelldcatno_representation.to_rdf(), format="turtle")
assert_isomorphic(g1, g2)
@pytest.mark.integration
def test_multiplicity(mocker: MockerFixture) -> None:
"""Test handling of array multiplicity."""
in_dict = {
"Account": {
"type": "object",
"properties": {
"links": {
"type": "array",
"items": {"$ref": "#/Link"},
"minItems": 0,
"maxItems": 10,
}
},
},
"Link": {"type": "string"},
}
schema = Schema(BASE_URI, in_dict)
components = schema.get_components_by_path("#/Account")
mocker.patch(
"skolemizer.Skolemizer.add_skolemization",
side_effect=mock_uri_generator(BASE_URI),
)
modelldcatno_representation = create_model_element(components[0], schema)
assert modelldcatno_representation is not None
expected = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://uri.com/#Account> a modelldcatno:ObjectType ;
dct:title "Account" ;
modelldcatno:hasProperty <http://uri.com/Account#links> .
<http://uri.com/#Link> a modelldcatno:SimpleType ;
dct:title "Link" ;
modelldcatno:hasProperty <http://uri.com/mock_uri_0> .
<http://uri.com/#string> a modelldcatno:SimpleType ;
dct:title "string" ;
modelldcatno:typeDefinitionReference <https://www.w3.org/2019/wot/json-schema#stringschema> .
<http://uri.com/Account#links> a modelldcatno:Attribute ;
dct:title "links" ;
xsd:maxOccurs "10"^^xsd:nonNegativeInteger ;
modelldcatno:hasSimpleType <http://uri.com/#Link> .
<http://uri.com/mock_uri_0> a modelldcatno:Specialization ;
modelldcatno:hasGeneralConcept <http://uri.com/#string> .
"""
g1 = Graph().parse(data=expected, format="turtle")
g2 = Graph().parse(data=modelldcatno_representation.to_rdf(), format="turtle")
assert_isomorphic(g1, g2)
@pytest.mark.integration
def test_code_list_and_element(mocker: MockerFixture) -> None:
"""Test code list and element creation."""
in_dict = {
"Alphabet": {
"type": "object",
"properties": {"letters": {"type": "string", "enum": ["A", "B", "C"]}},
}
}
schema = Schema(BASE_URI, in_dict)
components = schema.get_components_by_path("#/Alphabet")
mocker.patch(
"skolemizer.Skolemizer.add_skolemization",
side_effect=mock_uri_generator(BASE_URI),
)
modelldcatno_representation = create_model_element(components[0], schema)
assert modelldcatno_representation is not None
collected_graph = Graph()
collected_graph.parse(data=modelldcatno_representation.to_rdf(), format="turtle")
collected_graph.parse(
data=add_elements_to_graph(Graph(), schema.orphan_elements).serialize(
format="turtle"
),
format="turtle",
)
expected = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://uri.com/#Alphabet> a modelldcatno:ObjectType ;
dct:title "Alphabet" ;
modelldcatno:hasProperty <http://uri.com/Alphabet#letters> .
<http://uri.com/mock_uri_1> a modelldcatno:CodeElement ;
skos:inScheme <http://uri.com/Alphabet/letters#letters> ;
skos:notation "A" .
<http://uri.com/mock_uri_2> a modelldcatno:CodeElement ;
skos:inScheme <http://uri.com/Alphabet/letters#letters> ;
skos:notation "B" .
<http://uri.com/mock_uri_3> a modelldcatno:CodeElement ;
skos:inScheme <http://uri.com/Alphabet/letters#letters> ;
skos:notation "C" .
<http://uri.com/#string> a modelldcatno:SimpleType ;
dct:title "string" ;
modelldcatno:typeDefinitionReference <https://www.w3.org/2019/wot/json-schema#stringschema> .
<http://uri.com/Alphabet#letters> a modelldcatno:Attribute ;
dct:title "letters" ;
xsd:maxOccurs "1"^^xsd:nonNegativeInteger ;
modelldcatno:hasSimpleType <http://uri.com/#string> ;
modelldcatno:hasValueFrom <http://uri.com/Alphabet/letters#letters> .
<http://uri.com/Alphabet/letters#letters> a modelldcatno:CodeList ;
dct:title "letters" .
"""
g1 = Graph().parse(data=expected, format="turtle")
g2 = collected_graph
assert_isomorphic(g1, g2)
| 35.755605
| 102
| 0.630965
|
914a271e81805a398cf6e562e2ff20db45ef84e5
| 1,153
|
py
|
Python
|
Chapter10/example5.py
|
DeeMATT/AdvancedPythonProgramming
|
97091dae4f177fd2c06b20265be2aedf9d1c41e7
|
[
"MIT"
] | 66
|
2018-11-21T02:07:16.000Z
|
2021-11-08T13:13:31.000Z
|
Chapter10/example5.py
|
DeeMATT/AdvancedPythonProgramming
|
97091dae4f177fd2c06b20265be2aedf9d1c41e7
|
[
"MIT"
] | 2
|
2020-03-11T19:56:39.000Z
|
2021-11-15T14:07:05.000Z
|
Chapter10/example5.py
|
DeeMATT/AdvancedPythonProgramming
|
97091dae4f177fd2c06b20265be2aedf9d1c41e7
|
[
"MIT"
] | 58
|
2018-11-03T14:06:10.000Z
|
2022-03-17T14:06:55.000Z
|
# ch3/example5.py
import queue
import threading
import time
class MyThread(threading.Thread):
def __init__(self, name):
threading.Thread.__init__(self)
self.name = name
def run(self):
print('Starting thread %s.' % self.name)
process_queue()
print('Exiting thread %s.' % self.name)
def process_queue():
while True:
try:
x = my_queue.get(block=False)
except queue.Empty:
return
else:
print_factors(x)
time.sleep(1)
def print_factors(x):
result_string = 'Positive factors of %i are: ' % x
for i in range(1, x + 1):
if x % i == 0:
result_string += str(i) + ' '
result_string += '\n' + '_' * 20
print(result_string)
# setting up variables
input_ = [1, 10, 4, 3]
# filling the queue
my_queue = queue.Queue()
for x in input_:
my_queue.put(x)
# initializing and starting 3 threads
thread1 = MyThread('A')
thread2 = MyThread('B')
thread3 = MyThread('C')
thread1.start()
thread2.start()
thread3.start()
# joining all 3 threads
thread1.join()
thread2.join()
thread3.join()
print('Done.')
| 18.301587
| 54
| 0.60451
|
449c73af3bb15205bed8d6e74ee4afda47a00675
| 246,010
|
py
|
Python
|
sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/_generated/v7_1_preview/operations/_key_vault_client_operations.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 1
|
2020-05-12T23:29:15.000Z
|
2020-05-12T23:29:15.000Z
|
sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/_generated/v7_1_preview/operations/_key_vault_client_operations.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 226
|
2019-07-24T07:57:21.000Z
|
2019-10-15T01:07:24.000Z
|
sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_shared/_generated/v7_1_preview/operations/_key_vault_client_operations.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 1
|
2020-07-31T16:33:51.000Z
|
2020-07-31T16:33:51.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from azure.core.exceptions import map_error
from azure.core.paging import ItemPaged
from .. import models
import uuid
class KeyVaultClientOperationsMixin(object):
def get_certificates(
self, vault_base_url, maxresults=None, include_pending=None, cls=None, **kwargs):
"""List certificates in a specified key vault.
The GetCertificates operation returns the set of certificates resources
in the specified key vault. This operation requires the
certificates/list permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If
not specified the service will return up to 25 results.
:type maxresults: int
:param include_pending: Specifies whether to include certificates
which are not completely provisioned.
:type include_pending: bool
:return: An iterator like instance of CertificateItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.CertificateItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_certificates.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
if include_pending is not None:
query_parameters['includePending'] = self._serialize.query("include_pending", include_pending, 'bool')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('CertificateListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_certificates.metadata = {'url': '/certificates'}
def delete_certificate(self, vault_base_url, certificate_name, cls=None, **kwargs):
"""Deletes a certificate from a specified key vault.
Deletes all versions of a certificate object along with its associated
policy. Delete certificate cannot be used to remove individual versions
of a certificate object. This operation requires the
certificates/delete permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: DeletedCertificateBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.DeletedCertificateBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.delete_certificate.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeletedCertificateBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
delete_certificate.metadata = {'url': '/certificates/{certificate-name}'}
def set_certificate_contacts(self, vault_base_url, contact_list=None, cls=None, **kwargs):
"""Sets the certificate contacts for the specified key vault.
Sets the certificate contacts for the specified key vault. This
operation requires the certificates/managecontacts permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param contact_list: The contact list for the vault certificates.
:type contact_list: list[~azure.keyvault.v7_1.models.Contact]
:param callable cls: A custom type or function that will be passed the
direct response
:return: Contacts or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.Contacts
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
contacts = models.Contacts(contact_list=contact_list)
# Construct URL
url = self.set_certificate_contacts.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(contacts, 'Contacts')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Contacts', response)
if cls:
return cls(response, deserialized, None)
return deserialized
set_certificate_contacts.metadata = {'url': '/certificates/contacts'}
def get_certificate_contacts(self, vault_base_url, cls=None, **kwargs):
"""Lists the certificate contacts for a specified key vault.
The GetCertificateContacts operation returns the set of certificate
contact resources in the specified key vault. This operation requires
the certificates/managecontacts permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: Contacts or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.Contacts
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_certificate_contacts.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Contacts', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_certificate_contacts.metadata = {'url': '/certificates/contacts'}
def delete_certificate_contacts(self, vault_base_url, cls=None, **kwargs):
"""Deletes the certificate contacts for a specified key vault.
Deletes the certificate contacts for a specified key vault certificate.
This operation requires the certificates/managecontacts permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: Contacts or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.Contacts
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.delete_certificate_contacts.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Contacts', response)
if cls:
return cls(response, deserialized, None)
return deserialized
delete_certificate_contacts.metadata = {'url': '/certificates/contacts'}
def get_certificate_issuers(
self, vault_base_url, maxresults=None, cls=None, **kwargs):
"""List certificate issuers for a specified key vault.
The GetCertificateIssuers operation returns the set of certificate
issuer resources in the specified key vault. This operation requires
the certificates/manageissuers/getissuers permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If
not specified the service will return up to 25 results.
:type maxresults: int
:return: An iterator like instance of CertificateIssuerItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.CertificateIssuerItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_certificate_issuers.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('CertificateIssuerListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_certificate_issuers.metadata = {'url': '/certificates/issuers'}
def set_certificate_issuer(self, vault_base_url, issuer_name, provider, credentials=None, organization_details=None, attributes=None, cls=None, **kwargs):
"""Sets the specified certificate issuer.
The SetCertificateIssuer operation adds or updates the specified
certificate issuer. This operation requires the certificates/setissuers
permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param issuer_name: The name of the issuer.
:type issuer_name: str
:param provider: The issuer provider.
:type provider: str
:param credentials: The credentials to be used for the issuer.
:type credentials: ~azure.keyvault.v7_1.models.IssuerCredentials
:param organization_details: Details of the organization as provided
to the issuer.
:type organization_details:
~azure.keyvault.v7_1.models.OrganizationDetails
:param attributes: Attributes of the issuer object.
:type attributes: ~azure.keyvault.v7_1.models.IssuerAttributes
:param callable cls: A custom type or function that will be passed the
direct response
:return: IssuerBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.IssuerBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameter = models.CertificateIssuerSetParameters(provider=provider, credentials=credentials, organization_details=organization_details, attributes=attributes)
# Construct URL
url = self.set_certificate_issuer.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameter, 'CertificateIssuerSetParameters')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('IssuerBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
set_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'}
def update_certificate_issuer(self, vault_base_url, issuer_name, provider=None, credentials=None, organization_details=None, attributes=None, cls=None, **kwargs):
"""Updates the specified certificate issuer.
The UpdateCertificateIssuer operation performs an update on the
specified certificate issuer entity. This operation requires the
certificates/setissuers permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param issuer_name: The name of the issuer.
:type issuer_name: str
:param provider: The issuer provider.
:type provider: str
:param credentials: The credentials to be used for the issuer.
:type credentials: ~azure.keyvault.v7_1.models.IssuerCredentials
:param organization_details: Details of the organization as provided
to the issuer.
:type organization_details:
~azure.keyvault.v7_1.models.OrganizationDetails
:param attributes: Attributes of the issuer object.
:type attributes: ~azure.keyvault.v7_1.models.IssuerAttributes
:param callable cls: A custom type or function that will be passed the
direct response
:return: IssuerBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.IssuerBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameter = models.CertificateIssuerUpdateParameters(provider=provider, credentials=credentials, organization_details=organization_details, attributes=attributes)
# Construct URL
url = self.update_certificate_issuer.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameter, 'CertificateIssuerUpdateParameters')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('IssuerBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
update_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'}
def get_certificate_issuer(self, vault_base_url, issuer_name, cls=None, **kwargs):
"""Lists the specified certificate issuer.
The GetCertificateIssuer operation returns the specified certificate
issuer resources in the specified key vault. This operation requires
the certificates/manageissuers/getissuers permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param issuer_name: The name of the issuer.
:type issuer_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: IssuerBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.IssuerBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_certificate_issuer.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('IssuerBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'}
def delete_certificate_issuer(self, vault_base_url, issuer_name, cls=None, **kwargs):
"""Deletes the specified certificate issuer.
The DeleteCertificateIssuer operation permanently removes the specified
certificate issuer from the vault. This operation requires the
certificates/manageissuers/deleteissuers permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param issuer_name: The name of the issuer.
:type issuer_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: IssuerBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.IssuerBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.delete_certificate_issuer.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('IssuerBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
delete_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'}
def create_certificate(self, vault_base_url, certificate_name, certificate_policy=None, certificate_attributes=None, tags=None, cls=None, **kwargs):
"""Creates a new certificate.
If this is the first version, the certificate resource is created. This
operation requires the certificates/create permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param certificate_policy: The management policy for the certificate.
:type certificate_policy:
~azure.keyvault.v7_1.models.CertificatePolicy
:param certificate_attributes: The attributes of the certificate
(optional).
:type certificate_attributes:
~azure.keyvault.v7_1.models.CertificateAttributes
:param tags: Application specific metadata in the form of key-value
pairs.
:type tags: dict[str, str]
:param callable cls: A custom type or function that will be passed the
direct response
:return: CertificateOperation or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.CertificateOperation
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.CertificateCreateParameters(certificate_policy=certificate_policy, certificate_attributes=certificate_attributes, tags=tags)
# Construct URL
url = self.create_certificate.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str', pattern=r'^[0-9a-zA-Z-]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'CertificateCreateParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 202:
deserialized = self._deserialize('CertificateOperation', response)
if cls:
return cls(response, deserialized, None)
return deserialized
create_certificate.metadata = {'url': '/certificates/{certificate-name}/create'}
def import_certificate(self, vault_base_url, certificate_name, base64_encoded_certificate, password=None, certificate_policy=None, certificate_attributes=None, tags=None, cls=None, **kwargs):
"""Imports a certificate into a specified key vault.
Imports an existing valid certificate, containing a private key, into
Azure Key Vault. The certificate to be imported can be in either PFX or
PEM format. If the certificate is in PEM format the PEM file must
contain the key as well as x509 certificates. This operation requires
the certificates/import permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param base64_encoded_certificate: Base64 encoded representation of
the certificate object to import. This certificate needs to contain
the private key.
:type base64_encoded_certificate: str
:param password: If the private key in base64EncodedCertificate is
encrypted, the password used for encryption.
:type password: str
:param certificate_policy: The management policy for the certificate.
:type certificate_policy:
~azure.keyvault.v7_1.models.CertificatePolicy
:param certificate_attributes: The attributes of the certificate
(optional).
:type certificate_attributes:
~azure.keyvault.v7_1.models.CertificateAttributes
:param tags: Application specific metadata in the form of key-value
pairs.
:type tags: dict[str, str]
:param callable cls: A custom type or function that will be passed the
direct response
:return: CertificateBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.CertificateBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.CertificateImportParameters(base64_encoded_certificate=base64_encoded_certificate, password=password, certificate_policy=certificate_policy, certificate_attributes=certificate_attributes, tags=tags)
# Construct URL
url = self.import_certificate.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str', pattern=r'^[0-9a-zA-Z-]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'CertificateImportParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CertificateBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
import_certificate.metadata = {'url': '/certificates/{certificate-name}/import'}
def get_certificate_versions(
self, vault_base_url, certificate_name, maxresults=None, cls=None, **kwargs):
"""List the versions of a certificate.
The GetCertificateVersions operation returns the versions of a
certificate in the specified key vault. This operation requires the
certificates/list permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param maxresults: Maximum number of results to return in a page. If
not specified the service will return up to 25 results.
:type maxresults: int
:return: An iterator like instance of CertificateItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.CertificateItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_certificate_versions.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('CertificateListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_certificate_versions.metadata = {'url': '/certificates/{certificate-name}/versions'}
def get_certificate_policy(self, vault_base_url, certificate_name, cls=None, **kwargs):
"""Lists the policy for a certificate.
The GetCertificatePolicy operation returns the specified certificate
policy resources in the specified key vault. This operation requires
the certificates/get permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate in a given key
vault.
:type certificate_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: CertificatePolicy or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.CertificatePolicy
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_certificate_policy.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CertificatePolicy', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_certificate_policy.metadata = {'url': '/certificates/{certificate-name}/policy'}
def update_certificate_policy(self, vault_base_url, certificate_name, certificate_policy, cls=None, **kwargs):
"""Updates the policy for a certificate.
Set specified members in the certificate policy. Leave others as null.
This operation requires the certificates/update permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate in the given
vault.
:type certificate_name: str
:param certificate_policy: The policy for the certificate.
:type certificate_policy:
~azure.keyvault.v7_1.models.CertificatePolicy
:param callable cls: A custom type or function that will be passed the
direct response
:return: CertificatePolicy or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.CertificatePolicy
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.update_certificate_policy.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(certificate_policy, 'CertificatePolicy')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CertificatePolicy', response)
if cls:
return cls(response, deserialized, None)
return deserialized
update_certificate_policy.metadata = {'url': '/certificates/{certificate-name}/policy'}
def update_certificate(self, vault_base_url, certificate_name, certificate_version, certificate_policy=None, certificate_attributes=None, tags=None, cls=None, **kwargs):
"""Updates the specified attributes associated with the given certificate.
The UpdateCertificate operation applies the specified update on the
given certificate; the only elements updated are the certificate's
attributes. This operation requires the certificates/update permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate in the given key
vault.
:type certificate_name: str
:param certificate_version: The version of the certificate.
:type certificate_version: str
:param certificate_policy: The management policy for the certificate.
:type certificate_policy:
~azure.keyvault.v7_1.models.CertificatePolicy
:param certificate_attributes: The attributes of the certificate
(optional).
:type certificate_attributes:
~azure.keyvault.v7_1.models.CertificateAttributes
:param tags: Application specific metadata in the form of key-value
pairs.
:type tags: dict[str, str]
:param callable cls: A custom type or function that will be passed the
direct response
:return: CertificateBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.CertificateBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.CertificateUpdateParameters(certificate_policy=certificate_policy, certificate_attributes=certificate_attributes, tags=tags)
# Construct URL
url = self.update_certificate.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
'certificate-version': self._serialize.url("certificate_version", certificate_version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'CertificateUpdateParameters')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CertificateBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
update_certificate.metadata = {'url': '/certificates/{certificate-name}/{certificate-version}'}
def get_certificate(self, vault_base_url, certificate_name, certificate_version, cls=None, **kwargs):
"""Gets information about a certificate.
Gets information about a specific certificate. This operation requires
the certificates/get permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate in the given
vault.
:type certificate_name: str
:param certificate_version: The version of the certificate.
:type certificate_version: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: CertificateBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.CertificateBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_certificate.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'),
'certificate-version': self._serialize.url("certificate_version", certificate_version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CertificateBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_certificate.metadata = {'url': '/certificates/{certificate-name}/{certificate-version}'}
def update_certificate_operation(self, vault_base_url, certificate_name, cancellation_requested, cls=None, **kwargs):
"""Updates a certificate operation.
Updates a certificate creation operation that is already in progress.
This operation requires the certificates/update permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param cancellation_requested: Indicates if cancellation was requested
on the certificate operation.
:type cancellation_requested: bool
:param callable cls: A custom type or function that will be passed the
direct response
:return: CertificateOperation or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.CertificateOperation
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
certificate_operation = models.CertificateOperationUpdateParameter(cancellation_requested=cancellation_requested)
# Construct URL
url = self.update_certificate_operation.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(certificate_operation, 'CertificateOperationUpdateParameter')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CertificateOperation', response)
if cls:
return cls(response, deserialized, None)
return deserialized
update_certificate_operation.metadata = {'url': '/certificates/{certificate-name}/pending'}
def get_certificate_operation(self, vault_base_url, certificate_name, cls=None, **kwargs):
"""Gets the creation operation of a certificate.
Gets the creation operation associated with a specified certificate.
This operation requires the certificates/get permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: CertificateOperation or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.CertificateOperation
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_certificate_operation.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CertificateOperation', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_certificate_operation.metadata = {'url': '/certificates/{certificate-name}/pending'}
def delete_certificate_operation(self, vault_base_url, certificate_name, cls=None, **kwargs):
"""Deletes the creation operation for a specific certificate.
Deletes the creation operation for a specified certificate that is in
the process of being created. The certificate is no longer created.
This operation requires the certificates/update permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: CertificateOperation or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.CertificateOperation
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.delete_certificate_operation.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CertificateOperation', response)
if cls:
return cls(response, deserialized, None)
return deserialized
delete_certificate_operation.metadata = {'url': '/certificates/{certificate-name}/pending'}
def merge_certificate(self, vault_base_url, certificate_name, x509_certificates, certificate_attributes=None, tags=None, cls=None, **kwargs):
"""Merges a certificate or a certificate chain with a key pair existing on
the server.
The MergeCertificate operation performs the merging of a certificate or
certificate chain with a key pair currently available in the service.
This operation requires the certificates/create permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param x509_certificates: The certificate or the certificate chain to
merge.
:type x509_certificates: list[bytearray]
:param certificate_attributes: The attributes of the certificate
(optional).
:type certificate_attributes:
~azure.keyvault.v7_1.models.CertificateAttributes
:param tags: Application specific metadata in the form of key-value
pairs.
:type tags: dict[str, str]
:param callable cls: A custom type or function that will be passed the
direct response
:return: CertificateBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.CertificateBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.CertificateMergeParameters(x509_certificates=x509_certificates, certificate_attributes=certificate_attributes, tags=tags)
# Construct URL
url = self.merge_certificate.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'CertificateMergeParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 201:
deserialized = self._deserialize('CertificateBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
merge_certificate.metadata = {'url': '/certificates/{certificate-name}/pending/merge'}
def backup_certificate(self, vault_base_url, certificate_name, cls=None, **kwargs):
"""Backs up the specified certificate.
Requests that a backup of the specified certificate be downloaded to
the client. All versions of the certificate will be downloaded. This
operation requires the certificates/backup permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate.
:type certificate_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: BackupCertificateResult or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.BackupCertificateResult
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.backup_certificate.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BackupCertificateResult', response)
if cls:
return cls(response, deserialized, None)
return deserialized
backup_certificate.metadata = {'url': '/certificates/{certificate-name}/backup'}
def restore_certificate(self, vault_base_url, certificate_bundle_backup, cls=None, **kwargs):
"""Restores a backed up certificate to a vault.
Restores a backed up certificate, and all its versions, to a vault.
This operation requires the certificates/restore permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_bundle_backup: The backup blob associated with a
certificate bundle.
:type certificate_bundle_backup: bytes
:param callable cls: A custom type or function that will be passed the
direct response
:return: CertificateBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.CertificateBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.CertificateRestoreParameters(certificate_bundle_backup=certificate_bundle_backup)
# Construct URL
url = self.restore_certificate.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'CertificateRestoreParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CertificateBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
restore_certificate.metadata = {'url': '/certificates/restore'}
def get_deleted_certificates(
self, vault_base_url, maxresults=None, include_pending=None, cls=None, **kwargs):
"""Lists the deleted certificates in the specified vault currently
available for recovery.
The GetDeletedCertificates operation retrieves the certificates in the
current vault which are in a deleted state and ready for recovery or
purging. This operation includes deletion-specific information. This
operation requires the certificates/get/list permission. This operation
can only be enabled on soft-delete enabled vaults.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If
not specified the service will return up to 25 results.
:type maxresults: int
:param include_pending: Specifies whether to include certificates
which are not completely provisioned.
:type include_pending: bool
:return: An iterator like instance of DeletedCertificateItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.DeletedCertificateItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_deleted_certificates.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
if include_pending is not None:
query_parameters['includePending'] = self._serialize.query("include_pending", include_pending, 'bool')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('DeletedCertificateListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_deleted_certificates.metadata = {'url': '/deletedcertificates'}
def get_deleted_certificate(self, vault_base_url, certificate_name, cls=None, **kwargs):
"""Retrieves information about the specified deleted certificate.
The GetDeletedCertificate operation retrieves the deleted certificate
information plus its attributes, such as retention interval, scheduled
permanent deletion and the current deletion recovery level. This
operation requires the certificates/get permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate
:type certificate_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: DeletedCertificateBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.DeletedCertificateBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_deleted_certificate.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeletedCertificateBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_deleted_certificate.metadata = {'url': '/deletedcertificates/{certificate-name}'}
def purge_deleted_certificate(self, vault_base_url, certificate_name, cls=None, **kwargs):
"""Permanently deletes the specified deleted certificate.
The PurgeDeletedCertificate operation performs an irreversible deletion
of the specified certificate, without possibility for recovery. The
operation is not available if the recovery level does not specify
'Purgeable'. This operation requires the certificate/purge permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the certificate
:type certificate_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.purge_deleted_certificate.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
if cls:
response_headers = {}
return cls(response, None, response_headers)
purge_deleted_certificate.metadata = {'url': '/deletedcertificates/{certificate-name}'}
def recover_deleted_certificate(self, vault_base_url, certificate_name, cls=None, **kwargs):
"""Recovers the deleted certificate back to its current version under
/certificates.
The RecoverDeletedCertificate operation performs the reversal of the
Delete operation. The operation is applicable in vaults enabled for
soft-delete, and must be issued during the retention interval
(available in the deleted certificate's attributes). This operation
requires the certificates/recover permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param certificate_name: The name of the deleted certificate
:type certificate_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: CertificateBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.CertificateBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.recover_deleted_certificate.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CertificateBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
recover_deleted_certificate.metadata = {'url': '/deletedcertificates/{certificate-name}/recover'}
def create_key(self, vault_base_url, key_name, kty, key_size=None, key_ops=None, key_attributes=None, tags=None, curve=None, cls=None, **kwargs):
"""Creates a new key, stores it, then returns key parameters and
attributes to the client.
The create key operation can be used to create any key type in Azure
Key Vault. If the named key already exists, Azure Key Vault creates a
new version of the key. It requires the keys/create permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name for the new key. The system will generate
the version name for the new key.
:type key_name: str
:param kty: The type of key to create. For valid values, see
JsonWebKeyType. Possible values include: 'EC', 'EC-HSM', 'RSA',
'RSA-HSM', 'oct'
:type kty: str or ~azure.keyvault.v7_1.models.JsonWebKeyType
:param key_size: The key size in bits. For example: 2048, 3072, or
4096 for RSA.
:type key_size: int
:param key_ops:
:type key_ops: list[str or
~azure.keyvault.v7_1.models.JsonWebKeyOperation]
:param key_attributes:
:type key_attributes: ~azure.keyvault.v7_1.models.KeyAttributes
:param tags: Application specific metadata in the form of key-value
pairs.
:type tags: dict[str, str]
:param curve: Elliptic curve name. For valid values, see
JsonWebKeyCurveName. Possible values include: 'P-256', 'P-384',
'P-521', 'P-256K'
:type curve: str or ~azure.keyvault.v7_1.models.JsonWebKeyCurveName
:param callable cls: A custom type or function that will be passed the
direct response
:return: KeyBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.KeyBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.KeyCreateParameters(kty=kty, key_size=key_size, key_ops=key_ops, key_attributes=key_attributes, tags=tags, curve=curve)
# Construct URL
url = self.create_key.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str', pattern=r'^[0-9a-zA-Z-]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'KeyCreateParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('KeyBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
create_key.metadata = {'url': '/keys/{key-name}/create'}
def import_key(self, vault_base_url, key_name, key, hsm=None, key_attributes=None, tags=None, cls=None, **kwargs):
"""Imports an externally created key, stores it, and returns key
parameters and attributes to the client.
The import key operation may be used to import any key type into an
Azure Key Vault. If the named key already exists, Azure Key Vault
creates a new version of the key. This operation requires the
keys/import permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: Name for the imported key.
:type key_name: str
:param key: The Json web key
:type key: ~azure.keyvault.v7_1.models.JsonWebKey
:param hsm: Whether to import as a hardware key (HSM) or software key.
:type hsm: bool
:param key_attributes: The key management attributes.
:type key_attributes: ~azure.keyvault.v7_1.models.KeyAttributes
:param tags: Application specific metadata in the form of key-value
pairs.
:type tags: dict[str, str]
:param callable cls: A custom type or function that will be passed the
direct response
:return: KeyBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.KeyBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.KeyImportParameters(hsm=hsm, key=key, key_attributes=key_attributes, tags=tags)
# Construct URL
url = self.import_key.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str', pattern=r'^[0-9a-zA-Z-]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'KeyImportParameters')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('KeyBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
import_key.metadata = {'url': '/keys/{key-name}'}
def delete_key(self, vault_base_url, key_name, cls=None, **kwargs):
"""Deletes a key of any type from storage in Azure Key Vault.
The delete key operation cannot be used to remove individual versions
of a key. This operation removes the cryptographic material associated
with the key, which means the key is not usable for Sign/Verify,
Wrap/Unwrap or Encrypt/Decrypt operations. This operation requires the
keys/delete permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key to delete.
:type key_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: DeletedKeyBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.DeletedKeyBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.delete_key.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeletedKeyBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
delete_key.metadata = {'url': '/keys/{key-name}'}
def update_key(self, vault_base_url, key_name, key_version, key_ops=None, key_attributes=None, tags=None, cls=None, **kwargs):
"""The update key operation changes specified attributes of a stored key
and can be applied to any key type and key version stored in Azure Key
Vault.
In order to perform this operation, the key must already exist in the
Key Vault. Note: The cryptographic material of a key itself cannot be
changed. This operation requires the keys/update permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of key to update.
:type key_name: str
:param key_version: The version of the key to update.
:type key_version: str
:param key_ops: Json web key operations. For more information on
possible key operations, see JsonWebKeyOperation.
:type key_ops: list[str or
~azure.keyvault.v7_1.models.JsonWebKeyOperation]
:param key_attributes:
:type key_attributes: ~azure.keyvault.v7_1.models.KeyAttributes
:param tags: Application specific metadata in the form of key-value
pairs.
:type tags: dict[str, str]
:param callable cls: A custom type or function that will be passed the
direct response
:return: KeyBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.KeyBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.KeyUpdateParameters(key_ops=key_ops, key_attributes=key_attributes, tags=tags)
# Construct URL
url = self.update_key.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'KeyUpdateParameters')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('KeyBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
update_key.metadata = {'url': '/keys/{key-name}/{key-version}'}
def get_key(self, vault_base_url, key_name, key_version, cls=None, **kwargs):
"""Gets the public part of a stored key.
The get key operation is applicable to all key types. If the requested
key is symmetric, then no key material is released in the response.
This operation requires the keys/get permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key to get.
:type key_name: str
:param key_version: Adding the version parameter retrieves a specific
version of a key.
:type key_version: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: KeyBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.KeyBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_key.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('KeyBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_key.metadata = {'url': '/keys/{key-name}/{key-version}'}
def get_key_versions(
self, vault_base_url, key_name, maxresults=None, cls=None, **kwargs):
"""Retrieves a list of individual key versions with the same key name.
The full key identifier, attributes, and tags are provided in the
response. This operation requires the keys/list permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param maxresults: Maximum number of results to return in a page. If
not specified the service will return up to 25 results.
:type maxresults: int
:return: An iterator like instance of KeyItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.KeyItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_key_versions.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('KeyListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_key_versions.metadata = {'url': '/keys/{key-name}/versions'}
def get_keys(
self, vault_base_url, maxresults=None, cls=None, **kwargs):
"""List keys in the specified vault.
Retrieves a list of the keys in the Key Vault as JSON Web Key
structures that contain the public part of a stored key. The LIST
operation is applicable to all key types, however only the base key
identifier, attributes, and tags are provided in the response.
Individual versions of a key are not listed in the response. This
operation requires the keys/list permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If
not specified the service will return up to 25 results.
:type maxresults: int
:return: An iterator like instance of KeyItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.KeyItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_keys.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('KeyListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_keys.metadata = {'url': '/keys'}
def backup_key(self, vault_base_url, key_name, cls=None, **kwargs):
"""Requests that a backup of the specified key be downloaded to the
client.
The Key Backup operation exports a key from Azure Key Vault in a
protected form. Note that this operation does NOT return key material
in a form that can be used outside the Azure Key Vault system, the
returned key material is either protected to a Azure Key Vault HSM or
to Azure Key Vault itself. The intent of this operation is to allow a
client to GENERATE a key in one Azure Key Vault instance, BACKUP the
key, and then RESTORE it into another Azure Key Vault instance. The
BACKUP operation may be used to export, in protected form, any key type
from Azure Key Vault. Individual versions of a key cannot be backed up.
BACKUP / RESTORE can be performed within geographical boundaries only;
meaning that a BACKUP from one geographical area cannot be restored to
another geographical area. For example, a backup from the US
geographical area cannot be restored in an EU geographical area. This
operation requires the key/backup permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: BackupKeyResult or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.BackupKeyResult
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.backup_key.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BackupKeyResult', response)
if cls:
return cls(response, deserialized, None)
return deserialized
backup_key.metadata = {'url': '/keys/{key-name}/backup'}
def restore_key(self, vault_base_url, key_bundle_backup, cls=None, **kwargs):
"""Restores a backed up key to a vault.
Imports a previously backed up key into Azure Key Vault, restoring the
key, its key identifier, attributes and access control policies. The
RESTORE operation may be used to import a previously backed up key.
Individual versions of a key cannot be restored. The key is restored in
its entirety with the same key name as it had when it was backed up. If
the key name is not available in the target Key Vault, the RESTORE
operation will be rejected. While the key name is retained during
restore, the final key identifier will change if the key is restored to
a different vault. Restore will restore all versions and preserve
version identifiers. The RESTORE operation is subject to security
constraints: The target Key Vault must be owned by the same Microsoft
Azure Subscription as the source Key Vault The user must have RESTORE
permission in the target Key Vault. This operation requires the
keys/restore permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_bundle_backup: The backup blob associated with a key
bundle.
:type key_bundle_backup: bytes
:param callable cls: A custom type or function that will be passed the
direct response
:return: KeyBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.KeyBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.KeyRestoreParameters(key_bundle_backup=key_bundle_backup)
# Construct URL
url = self.restore_key.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'KeyRestoreParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('KeyBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
restore_key.metadata = {'url': '/keys/restore'}
def encrypt(self, vault_base_url, key_name, key_version, algorithm, value, cls=None, **kwargs):
"""Encrypts an arbitrary sequence of bytes using an encryption key that is
stored in a key vault.
The ENCRYPT operation encrypts an arbitrary sequence of bytes using an
encryption key that is stored in Azure Key Vault. Note that the ENCRYPT
operation only supports a single block of data, the size of which is
dependent on the target key and the encryption algorithm to be used.
The ENCRYPT operation is only strictly necessary for symmetric keys
stored in Azure Key Vault since protection with an asymmetric key can
be performed using public portion of the key. This operation is
supported for asymmetric keys as a convenience for callers that have a
key-reference but do not have access to the public key material. This
operation requires the keys/encrypt permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param algorithm: algorithm identifier. Possible values include:
'RSA-OAEP', 'RSA-OAEP-256', 'RSA1_5'
:type algorithm: str or
~azure.keyvault.v7_1.models.JsonWebKeyEncryptionAlgorithm
:param value:
:type value: bytes
:param callable cls: A custom type or function that will be passed the
direct response
:return: KeyOperationResult or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.KeyOperationResult
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.KeyOperationsParameters(algorithm=algorithm, value=value)
# Construct URL
url = self.encrypt.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'KeyOperationsParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('KeyOperationResult', response)
if cls:
return cls(response, deserialized, None)
return deserialized
encrypt.metadata = {'url': '/keys/{key-name}/{key-version}/encrypt'}
def decrypt(self, vault_base_url, key_name, key_version, algorithm, value, cls=None, **kwargs):
"""Decrypts a single block of encrypted data.
The DECRYPT operation decrypts a well-formed block of ciphertext using
the target encryption key and specified algorithm. This operation is
the reverse of the ENCRYPT operation; only a single block of data may
be decrypted, the size of this block is dependent on the target key and
the algorithm to be used. The DECRYPT operation applies to asymmetric
and symmetric keys stored in Azure Key Vault since it uses the private
portion of the key. This operation requires the keys/decrypt
permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param algorithm: algorithm identifier. Possible values include:
'RSA-OAEP', 'RSA-OAEP-256', 'RSA1_5'
:type algorithm: str or
~azure.keyvault.v7_1.models.JsonWebKeyEncryptionAlgorithm
:param value:
:type value: bytes
:param callable cls: A custom type or function that will be passed the
direct response
:return: KeyOperationResult or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.KeyOperationResult
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.KeyOperationsParameters(algorithm=algorithm, value=value)
# Construct URL
url = self.decrypt.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'KeyOperationsParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('KeyOperationResult', response)
if cls:
return cls(response, deserialized, None)
return deserialized
decrypt.metadata = {'url': '/keys/{key-name}/{key-version}/decrypt'}
def sign(self, vault_base_url, key_name, key_version, algorithm, value, cls=None, **kwargs):
"""Creates a signature from a digest using the specified key.
The SIGN operation is applicable to asymmetric and symmetric keys
stored in Azure Key Vault since this operation uses the private portion
of the key. This operation requires the keys/sign permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param algorithm: The signing/verification algorithm identifier. For
more information on possible algorithm types, see
JsonWebKeySignatureAlgorithm. Possible values include: 'PS256',
'PS384', 'PS512', 'RS256', 'RS384', 'RS512', 'RSNULL', 'ES256',
'ES384', 'ES512', 'ES256K'
:type algorithm: str or
~azure.keyvault.v7_1.models.JsonWebKeySignatureAlgorithm
:param value:
:type value: bytes
:param callable cls: A custom type or function that will be passed the
direct response
:return: KeyOperationResult or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.KeyOperationResult
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.KeySignParameters(algorithm=algorithm, value=value)
# Construct URL
url = self.sign.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'KeySignParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('KeyOperationResult', response)
if cls:
return cls(response, deserialized, None)
return deserialized
sign.metadata = {'url': '/keys/{key-name}/{key-version}/sign'}
def verify(self, vault_base_url, key_name, key_version, algorithm, digest, signature, cls=None, **kwargs):
"""Verifies a signature using a specified key.
The VERIFY operation is applicable to symmetric keys stored in Azure
Key Vault. VERIFY is not strictly necessary for asymmetric keys stored
in Azure Key Vault since signature verification can be performed using
the public portion of the key but this operation is supported as a
convenience for callers that only have a key-reference and not the
public portion of the key. This operation requires the keys/verify
permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param algorithm: The signing/verification algorithm. For more
information on possible algorithm types, see
JsonWebKeySignatureAlgorithm. Possible values include: 'PS256',
'PS384', 'PS512', 'RS256', 'RS384', 'RS512', 'RSNULL', 'ES256',
'ES384', 'ES512', 'ES256K'
:type algorithm: str or
~azure.keyvault.v7_1.models.JsonWebKeySignatureAlgorithm
:param digest: The digest used for signing.
:type digest: bytes
:param signature: The signature to be verified.
:type signature: bytes
:param callable cls: A custom type or function that will be passed the
direct response
:return: KeyVerifyResult or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.KeyVerifyResult
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.KeyVerifyParameters(algorithm=algorithm, digest=digest, signature=signature)
# Construct URL
url = self.verify.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'KeyVerifyParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('KeyVerifyResult', response)
if cls:
return cls(response, deserialized, None)
return deserialized
verify.metadata = {'url': '/keys/{key-name}/{key-version}/verify'}
def wrap_key(self, vault_base_url, key_name, key_version, algorithm, value, cls=None, **kwargs):
"""Wraps a symmetric key using a specified key.
The WRAP operation supports encryption of a symmetric key using a key
encryption key that has previously been stored in an Azure Key Vault.
The WRAP operation is only strictly necessary for symmetric keys stored
in Azure Key Vault since protection with an asymmetric key can be
performed using the public portion of the key. This operation is
supported for asymmetric keys as a convenience for callers that have a
key-reference but do not have access to the public key material. This
operation requires the keys/wrapKey permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param algorithm: algorithm identifier. Possible values include:
'RSA-OAEP', 'RSA-OAEP-256', 'RSA1_5'
:type algorithm: str or
~azure.keyvault.v7_1.models.JsonWebKeyEncryptionAlgorithm
:param value:
:type value: bytes
:param callable cls: A custom type or function that will be passed the
direct response
:return: KeyOperationResult or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.KeyOperationResult
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.KeyOperationsParameters(algorithm=algorithm, value=value)
# Construct URL
url = self.wrap_key.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'KeyOperationsParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('KeyOperationResult', response)
if cls:
return cls(response, deserialized, None)
return deserialized
wrap_key.metadata = {'url': '/keys/{key-name}/{key-version}/wrapkey'}
def unwrap_key(self, vault_base_url, key_name, key_version, algorithm, value, cls=None, **kwargs):
"""Unwraps a symmetric key using the specified key that was initially used
for wrapping that key.
The UNWRAP operation supports decryption of a symmetric key using the
target key encryption key. This operation is the reverse of the WRAP
operation. The UNWRAP operation applies to asymmetric and symmetric
keys stored in Azure Key Vault since it uses the private portion of the
key. This operation requires the keys/unwrapKey permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param key_version: The version of the key.
:type key_version: str
:param algorithm: algorithm identifier. Possible values include:
'RSA-OAEP', 'RSA-OAEP-256', 'RSA1_5'
:type algorithm: str or
~azure.keyvault.v7_1.models.JsonWebKeyEncryptionAlgorithm
:param value:
:type value: bytes
:param callable cls: A custom type or function that will be passed the
direct response
:return: KeyOperationResult or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.KeyOperationResult
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.KeyOperationsParameters(algorithm=algorithm, value=value)
# Construct URL
url = self.unwrap_key.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str'),
'key-version': self._serialize.url("key_version", key_version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'KeyOperationsParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('KeyOperationResult', response)
if cls:
return cls(response, deserialized, None)
return deserialized
unwrap_key.metadata = {'url': '/keys/{key-name}/{key-version}/unwrapkey'}
def get_deleted_keys(
self, vault_base_url, maxresults=None, cls=None, **kwargs):
"""Lists the deleted keys in the specified vault.
Retrieves a list of the keys in the Key Vault as JSON Web Key
structures that contain the public part of a deleted key. This
operation includes deletion-specific information. The Get Deleted Keys
operation is applicable for vaults enabled for soft-delete. While the
operation can be invoked on any vault, it will return an error if
invoked on a non soft-delete enabled vault. This operation requires the
keys/list permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If
not specified the service will return up to 25 results.
:type maxresults: int
:return: An iterator like instance of DeletedKeyItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.DeletedKeyItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_deleted_keys.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('DeletedKeyListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_deleted_keys.metadata = {'url': '/deletedkeys'}
def get_deleted_key(self, vault_base_url, key_name, cls=None, **kwargs):
"""Gets the public part of a deleted key.
The Get Deleted Key operation is applicable for soft-delete enabled
vaults. While the operation can be invoked on any vault, it will return
an error if invoked on a non soft-delete enabled vault. This operation
requires the keys/get permission. .
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key.
:type key_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: DeletedKeyBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.DeletedKeyBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_deleted_key.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeletedKeyBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_deleted_key.metadata = {'url': '/deletedkeys/{key-name}'}
def purge_deleted_key(self, vault_base_url, key_name, cls=None, **kwargs):
"""Permanently deletes the specified key.
The Purge Deleted Key operation is applicable for soft-delete enabled
vaults. While the operation can be invoked on any vault, it will return
an error if invoked on a non soft-delete enabled vault. This operation
requires the keys/purge permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the key
:type key_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.purge_deleted_key.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
if cls:
response_headers = {}
return cls(response, None, response_headers)
purge_deleted_key.metadata = {'url': '/deletedkeys/{key-name}'}
def recover_deleted_key(self, vault_base_url, key_name, cls=None, **kwargs):
"""Recovers the deleted key to its latest version.
The Recover Deleted Key operation is applicable for deleted keys in
soft-delete enabled vaults. It recovers the deleted key back to its
latest version under /keys. An attempt to recover an non-deleted key
will return an error. Consider this the inverse of the delete operation
on soft-delete enabled vaults. This operation requires the keys/recover
permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param key_name: The name of the deleted key.
:type key_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: KeyBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.KeyBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.recover_deleted_key.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'key-name': self._serialize.url("key_name", key_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('KeyBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
recover_deleted_key.metadata = {'url': '/deletedkeys/{key-name}/recover'}
def set_secret(self, vault_base_url, secret_name, value, tags=None, content_type=None, secret_attributes=None, cls=None, **kwargs):
"""Sets a secret in a specified key vault.
The SET operation adds a secret to the Azure Key Vault. If the named
secret already exists, Azure Key Vault creates a new version of that
secret. This operation requires the secrets/set permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param value: The value of the secret.
:type value: str
:param tags: Application specific metadata in the form of key-value
pairs.
:type tags: dict[str, str]
:param content_type: Type of the secret value such as a password.
:type content_type: str
:param secret_attributes: The secret management attributes.
:type secret_attributes: ~azure.keyvault.v7_1.models.SecretAttributes
:param callable cls: A custom type or function that will be passed the
direct response
:return: SecretBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.SecretBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.SecretSetParameters(value=value, tags=tags, content_type=content_type, secret_attributes=secret_attributes)
# Construct URL
url = self.set_secret.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str', pattern=r'^[0-9a-zA-Z-]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'SecretSetParameters')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SecretBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
set_secret.metadata = {'url': '/secrets/{secret-name}'}
def delete_secret(self, vault_base_url, secret_name, cls=None, **kwargs):
"""Deletes a secret from a specified key vault.
The DELETE operation applies to any secret stored in Azure Key Vault.
DELETE cannot be applied to an individual version of a secret. This
operation requires the secrets/delete permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: DeletedSecretBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.DeletedSecretBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.delete_secret.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeletedSecretBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
delete_secret.metadata = {'url': '/secrets/{secret-name}'}
def update_secret(self, vault_base_url, secret_name, secret_version, content_type=None, secret_attributes=None, tags=None, cls=None, **kwargs):
"""Updates the attributes associated with a specified secret in a given
key vault.
The UPDATE operation changes specified attributes of an existing stored
secret. Attributes that are not specified in the request are left
unchanged. The value of a secret itself cannot be changed. This
operation requires the secrets/set permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param secret_version: The version of the secret.
:type secret_version: str
:param content_type: Type of the secret value such as a password.
:type content_type: str
:param secret_attributes: The secret management attributes.
:type secret_attributes: ~azure.keyvault.v7_1.models.SecretAttributes
:param tags: Application specific metadata in the form of key-value
pairs.
:type tags: dict[str, str]
:param callable cls: A custom type or function that will be passed the
direct response
:return: SecretBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.SecretBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.SecretUpdateParameters(content_type=content_type, secret_attributes=secret_attributes, tags=tags)
# Construct URL
url = self.update_secret.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
'secret-version': self._serialize.url("secret_version", secret_version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'SecretUpdateParameters')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SecretBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
update_secret.metadata = {'url': '/secrets/{secret-name}/{secret-version}'}
def get_secret(self, vault_base_url, secret_name, secret_version, cls=None, **kwargs):
"""Get a specified secret from a given key vault.
The GET operation is applicable to any secret stored in Azure Key
Vault. This operation requires the secrets/get permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param secret_version: The version of the secret.
:type secret_version: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: SecretBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.SecretBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_secret.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str'),
'secret-version': self._serialize.url("secret_version", secret_version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SecretBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_secret.metadata = {'url': '/secrets/{secret-name}/{secret-version}'}
def get_secrets(
self, vault_base_url, maxresults=None, cls=None, **kwargs):
"""List secrets in a specified key vault.
The Get Secrets operation is applicable to the entire vault. However,
only the base secret identifier and its attributes are provided in the
response. Individual secret versions are not listed in the response.
This operation requires the secrets/list permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If
not specified, the service will return up to 25 results.
:type maxresults: int
:return: An iterator like instance of SecretItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.SecretItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_secrets.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('SecretListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_secrets.metadata = {'url': '/secrets'}
def get_secret_versions(
self, vault_base_url, secret_name, maxresults=None, cls=None, **kwargs):
"""List all versions of the specified secret.
The full secret identifier and attributes are provided in the response.
No values are returned for the secrets. This operations requires the
secrets/list permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param maxresults: Maximum number of results to return in a page. If
not specified, the service will return up to 25 results.
:type maxresults: int
:return: An iterator like instance of SecretItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.SecretItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_secret_versions.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('SecretListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_secret_versions.metadata = {'url': '/secrets/{secret-name}/versions'}
def get_deleted_secrets(
self, vault_base_url, maxresults=None, cls=None, **kwargs):
"""Lists deleted secrets for the specified vault.
The Get Deleted Secrets operation returns the secrets that have been
deleted for a vault enabled for soft-delete. This operation requires
the secrets/list permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If
not specified the service will return up to 25 results.
:type maxresults: int
:return: An iterator like instance of DeletedSecretItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.DeletedSecretItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_deleted_secrets.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('DeletedSecretListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_deleted_secrets.metadata = {'url': '/deletedsecrets'}
def get_deleted_secret(self, vault_base_url, secret_name, cls=None, **kwargs):
"""Gets the specified deleted secret.
The Get Deleted Secret operation returns the specified deleted secret
along with its attributes. This operation requires the secrets/get
permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: DeletedSecretBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.DeletedSecretBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_deleted_secret.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeletedSecretBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_deleted_secret.metadata = {'url': '/deletedsecrets/{secret-name}'}
def purge_deleted_secret(self, vault_base_url, secret_name, cls=None, **kwargs):
"""Permanently deletes the specified secret.
The purge deleted secret operation removes the secret permanently,
without the possibility of recovery. This operation can only be enabled
on a soft-delete enabled vault. This operation requires the
secrets/purge permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.purge_deleted_secret.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
if cls:
response_headers = {}
return cls(response, None, response_headers)
purge_deleted_secret.metadata = {'url': '/deletedsecrets/{secret-name}'}
def recover_deleted_secret(self, vault_base_url, secret_name, cls=None, **kwargs):
"""Recovers the deleted secret to the latest version.
Recovers the deleted secret in the specified vault. This operation can
only be performed on a soft-delete enabled vault. This operation
requires the secrets/recover permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the deleted secret.
:type secret_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: SecretBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.SecretBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.recover_deleted_secret.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SecretBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
recover_deleted_secret.metadata = {'url': '/deletedsecrets/{secret-name}/recover'}
def backup_secret(self, vault_base_url, secret_name, cls=None, **kwargs):
"""Backs up the specified secret.
Requests that a backup of the specified secret be downloaded to the
client. All versions of the secret will be downloaded. This operation
requires the secrets/backup permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_name: The name of the secret.
:type secret_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: BackupSecretResult or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.BackupSecretResult
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.backup_secret.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'secret-name': self._serialize.url("secret_name", secret_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BackupSecretResult', response)
if cls:
return cls(response, deserialized, None)
return deserialized
backup_secret.metadata = {'url': '/secrets/{secret-name}/backup'}
def restore_secret(self, vault_base_url, secret_bundle_backup, cls=None, **kwargs):
"""Restores a backed up secret to a vault.
Restores a backed up secret, and all its versions, to a vault. This
operation requires the secrets/restore permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param secret_bundle_backup: The backup blob associated with a secret
bundle.
:type secret_bundle_backup: bytes
:param callable cls: A custom type or function that will be passed the
direct response
:return: SecretBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.SecretBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.SecretRestoreParameters(secret_bundle_backup=secret_bundle_backup)
# Construct URL
url = self.restore_secret.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'SecretRestoreParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SecretBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
restore_secret.metadata = {'url': '/secrets/restore'}
def get_storage_accounts(
self, vault_base_url, maxresults=None, cls=None, **kwargs):
"""List storage accounts managed by the specified key vault. This
operation requires the storage/list permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If
not specified the service will return up to 25 results.
:type maxresults: int
:return: An iterator like instance of StorageAccountItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.StorageAccountItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_storage_accounts.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('StorageListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_storage_accounts.metadata = {'url': '/storage'}
def get_deleted_storage_accounts(
self, vault_base_url, maxresults=None, cls=None, **kwargs):
"""Lists deleted storage accounts for the specified vault.
The Get Deleted Storage Accounts operation returns the storage accounts
that have been deleted for a vault enabled for soft-delete. This
operation requires the storage/list permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param maxresults: Maximum number of results to return in a page. If
not specified the service will return up to 25 results.
:type maxresults: int
:return: An iterator like instance of DeletedStorageAccountItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.DeletedStorageAccountItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_deleted_storage_accounts.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('DeletedStorageListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_deleted_storage_accounts.metadata = {'url': '/deletedstorage'}
def get_deleted_storage_account(self, vault_base_url, storage_account_name, cls=None, **kwargs):
"""Gets the specified deleted storage account.
The Get Deleted Storage Account operation returns the specified deleted
storage account along with its attributes. This operation requires the
storage/get permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: DeletedStorageBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.DeletedStorageBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_deleted_storage_account.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeletedStorageBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_deleted_storage_account.metadata = {'url': '/deletedstorage/{storage-account-name}'}
def purge_deleted_storage_account(self, vault_base_url, storage_account_name, cls=None, **kwargs):
"""Permanently deletes the specified storage account.
The purge deleted storage account operation removes the secret
permanently, without the possibility of recovery. This operation can
only be performed on a soft-delete enabled vault. This operation
requires the storage/purge permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.purge_deleted_storage_account.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
if cls:
response_headers = {}
return cls(response, None, response_headers)
purge_deleted_storage_account.metadata = {'url': '/deletedstorage/{storage-account-name}'}
def recover_deleted_storage_account(self, vault_base_url, storage_account_name, cls=None, **kwargs):
"""Recovers the deleted storage account.
Recovers the deleted storage account in the specified vault. This
operation can only be performed on a soft-delete enabled vault. This
operation requires the storage/recover permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: StorageBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.StorageBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.recover_deleted_storage_account.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('StorageBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
recover_deleted_storage_account.metadata = {'url': '/deletedstorage/{storage-account-name}/recover'}
def backup_storage_account(self, vault_base_url, storage_account_name, cls=None, **kwargs):
"""Backs up the specified storage account.
Requests that a backup of the specified storage account be downloaded
to the client. This operation requires the storage/backup permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: BackupStorageResult or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.BackupStorageResult
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.backup_storage_account.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BackupStorageResult', response)
if cls:
return cls(response, deserialized, None)
return deserialized
backup_storage_account.metadata = {'url': '/storage/{storage-account-name}/backup'}
def restore_storage_account(self, vault_base_url, storage_bundle_backup, cls=None, **kwargs):
"""Restores a backed up storage account to a vault.
Restores a backed up storage account to a vault. This operation
requires the storage/restore permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_bundle_backup: The backup blob associated with a
storage account.
:type storage_bundle_backup: bytes
:param callable cls: A custom type or function that will be passed the
direct response
:return: StorageBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.StorageBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.StorageRestoreParameters(storage_bundle_backup=storage_bundle_backup)
# Construct URL
url = self.restore_storage_account.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'StorageRestoreParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('StorageBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
restore_storage_account.metadata = {'url': '/storage/restore'}
def delete_storage_account(self, vault_base_url, storage_account_name, cls=None, **kwargs):
"""Deletes a storage account. This operation requires the storage/delete
permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: DeletedStorageBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.DeletedStorageBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.delete_storage_account.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeletedStorageBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
delete_storage_account.metadata = {'url': '/storage/{storage-account-name}'}
def get_storage_account(self, vault_base_url, storage_account_name, cls=None, **kwargs):
"""Gets information about a specified storage account. This operation
requires the storage/get permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: StorageBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.StorageBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_storage_account.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('StorageBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_storage_account.metadata = {'url': '/storage/{storage-account-name}'}
def set_storage_account(self, vault_base_url, storage_account_name, resource_id, active_key_name, auto_regenerate_key, regeneration_period=None, storage_account_attributes=None, tags=None, cls=None, **kwargs):
"""Creates or updates a new storage account. This operation requires the
storage/set permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param resource_id: Storage account resource id.
:type resource_id: str
:param active_key_name: Current active storage account key name.
:type active_key_name: str
:param auto_regenerate_key: whether keyvault should manage the storage
account for the user.
:type auto_regenerate_key: bool
:param regeneration_period: The key regeneration time duration
specified in ISO-8601 format.
:type regeneration_period: str
:param storage_account_attributes: The attributes of the storage
account.
:type storage_account_attributes:
~azure.keyvault.v7_1.models.StorageAccountAttributes
:param tags: Application specific metadata in the form of key-value
pairs.
:type tags: dict[str, str]
:param callable cls: A custom type or function that will be passed the
direct response
:return: StorageBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.StorageBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.StorageAccountCreateParameters(resource_id=resource_id, active_key_name=active_key_name, auto_regenerate_key=auto_regenerate_key, regeneration_period=regeneration_period, storage_account_attributes=storage_account_attributes, tags=tags)
# Construct URL
url = self.set_storage_account.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'StorageAccountCreateParameters')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('StorageBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
set_storage_account.metadata = {'url': '/storage/{storage-account-name}'}
def update_storage_account(self, vault_base_url, storage_account_name, active_key_name=None, auto_regenerate_key=None, regeneration_period=None, storage_account_attributes=None, tags=None, cls=None, **kwargs):
"""Updates the specified attributes associated with the given storage
account. This operation requires the storage/set/update permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param active_key_name: The current active storage account key name.
:type active_key_name: str
:param auto_regenerate_key: whether keyvault should manage the storage
account for the user.
:type auto_regenerate_key: bool
:param regeneration_period: The key regeneration time duration
specified in ISO-8601 format.
:type regeneration_period: str
:param storage_account_attributes: The attributes of the storage
account.
:type storage_account_attributes:
~azure.keyvault.v7_1.models.StorageAccountAttributes
:param tags: Application specific metadata in the form of key-value
pairs.
:type tags: dict[str, str]
:param callable cls: A custom type or function that will be passed the
direct response
:return: StorageBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.StorageBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.StorageAccountUpdateParameters(active_key_name=active_key_name, auto_regenerate_key=auto_regenerate_key, regeneration_period=regeneration_period, storage_account_attributes=storage_account_attributes, tags=tags)
# Construct URL
url = self.update_storage_account.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'StorageAccountUpdateParameters')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('StorageBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
update_storage_account.metadata = {'url': '/storage/{storage-account-name}'}
def regenerate_storage_account_key(self, vault_base_url, storage_account_name, key_name, cls=None, **kwargs):
"""Regenerates the specified key value for the given storage account. This
operation requires the storage/regeneratekey permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param key_name: The storage account key name.
:type key_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: StorageBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.StorageBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.StorageAccountRegenerteKeyParameters(key_name=key_name)
# Construct URL
url = self.regenerate_storage_account_key.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'StorageAccountRegenerteKeyParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('StorageBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
regenerate_storage_account_key.metadata = {'url': '/storage/{storage-account-name}/regeneratekey'}
def get_sas_definitions(
self, vault_base_url, storage_account_name, maxresults=None, cls=None, **kwargs):
"""List storage SAS definitions for the given storage account. This
operation requires the storage/listsas permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param maxresults: Maximum number of results to return in a page. If
not specified the service will return up to 25 results.
:type maxresults: int
:return: An iterator like instance of SasDefinitionItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.SasDefinitionItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_sas_definitions.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('SasDefinitionListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_sas_definitions.metadata = {'url': '/storage/{storage-account-name}/sas'}
def get_deleted_sas_definitions(
self, vault_base_url, storage_account_name, maxresults=None, cls=None, **kwargs):
"""Lists deleted SAS definitions for the specified vault and storage
account.
The Get Deleted Sas Definitions operation returns the SAS definitions
that have been deleted for a vault enabled for soft-delete. This
operation requires the storage/listsas permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param maxresults: Maximum number of results to return in a page. If
not specified the service will return up to 25 results.
:type maxresults: int
:return: An iterator like instance of DeletedSasDefinitionItem
:rtype:
~azure.core.paging.ItemPaged[~azure.keyvault.v7_1.models.DeletedSasDefinitionItem]
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
def prepare_request(next_link=None):
query_parameters = {}
if not next_link:
# Construct URL
url = self.get_deleted_sas_definitions.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(response):
deserialized = self._deserialize('DeletedSasDefinitionListResult', response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
return response
# Deserialize response
return ItemPaged(
get_next, extract_data
)
get_deleted_sas_definitions.metadata = {'url': '/deletedstorage/{storage-account-name}/sas'}
def get_deleted_sas_definition(self, vault_base_url, storage_account_name, sas_definition_name, cls=None, **kwargs):
"""Gets the specified deleted sas definition.
The Get Deleted SAS Definition operation returns the specified deleted
SAS definition along with its attributes. This operation requires the
storage/getsas permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: DeletedSasDefinitionBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.DeletedSasDefinitionBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_deleted_sas_definition.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeletedSasDefinitionBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_deleted_sas_definition.metadata = {'url': '/deletedstorage/{storage-account-name}/sas/{sas-definition-name}'}
def recover_deleted_sas_definition(self, vault_base_url, storage_account_name, sas_definition_name, cls=None, **kwargs):
"""Recovers the deleted SAS definition.
Recovers the deleted SAS definition for the specified storage account.
This operation can only be performed on a soft-delete enabled vault.
This operation requires the storage/recover permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: SasDefinitionBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.SasDefinitionBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.recover_deleted_sas_definition.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SasDefinitionBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
recover_deleted_sas_definition.metadata = {'url': '/deletedstorage/{storage-account-name}/sas/{sas-definition-name}/recover'}
def delete_sas_definition(self, vault_base_url, storage_account_name, sas_definition_name, cls=None, **kwargs):
"""Deletes a SAS definition from a specified storage account. This
operation requires the storage/deletesas permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: DeletedSasDefinitionBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.DeletedSasDefinitionBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.delete_sas_definition.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeletedSasDefinitionBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
delete_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'}
def get_sas_definition(self, vault_base_url, storage_account_name, sas_definition_name, cls=None, **kwargs):
"""Gets information about a SAS definition for the specified storage
account. This operation requires the storage/getsas permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: SasDefinitionBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.SasDefinitionBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.get_sas_definition.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SasDefinitionBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
get_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'}
def set_sas_definition(self, vault_base_url, storage_account_name, sas_definition_name, template_uri, sas_type, validity_period, sas_definition_attributes=None, tags=None, cls=None, **kwargs):
"""Creates or updates a new SAS definition for the specified storage
account. This operation requires the storage/setsas permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:param template_uri: The SAS definition token template signed with an
arbitrary key. Tokens created according to the SAS definition will
have the same properties as the template.
:type template_uri: str
:param sas_type: The type of SAS token the SAS definition will create.
Possible values include: 'account', 'service'
:type sas_type: str or ~azure.keyvault.v7_1.models.SasTokenType
:param validity_period: The validity period of SAS tokens created
according to the SAS definition.
:type validity_period: str
:param sas_definition_attributes: The attributes of the SAS
definition.
:type sas_definition_attributes:
~azure.keyvault.v7_1.models.SasDefinitionAttributes
:param tags: Application specific metadata in the form of key-value
pairs.
:type tags: dict[str, str]
:param callable cls: A custom type or function that will be passed the
direct response
:return: SasDefinitionBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.SasDefinitionBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.SasDefinitionCreateParameters(template_uri=template_uri, sas_type=sas_type, validity_period=validity_period, sas_definition_attributes=sas_definition_attributes, tags=tags)
# Construct URL
url = self.set_sas_definition.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'SasDefinitionCreateParameters')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SasDefinitionBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
set_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'}
def update_sas_definition(self, vault_base_url, storage_account_name, sas_definition_name, template_uri=None, sas_type=None, validity_period=None, sas_definition_attributes=None, tags=None, cls=None, **kwargs):
"""Updates the specified attributes associated with the given SAS
definition. This operation requires the storage/setsas permission.
:param vault_base_url: The vault name, for example
https://myvault.vault.azure.net.
:type vault_base_url: str
:param storage_account_name: The name of the storage account.
:type storage_account_name: str
:param sas_definition_name: The name of the SAS definition.
:type sas_definition_name: str
:param template_uri: The SAS definition token template signed with an
arbitrary key. Tokens created according to the SAS definition will
have the same properties as the template.
:type template_uri: str
:param sas_type: The type of SAS token the SAS definition will create.
Possible values include: 'account', 'service'
:type sas_type: str or ~azure.keyvault.v7_1.models.SasTokenType
:param validity_period: The validity period of SAS tokens created
according to the SAS definition.
:type validity_period: str
:param sas_definition_attributes: The attributes of the SAS
definition.
:type sas_definition_attributes:
~azure.keyvault.v7_1.models.SasDefinitionAttributes
:param tags: Application specific metadata in the form of key-value
pairs.
:type tags: dict[str, str]
:param callable cls: A custom type or function that will be passed the
direct response
:return: SasDefinitionBundle or the result of cls(response)
:rtype: ~azure.keyvault.v7_1.models.SasDefinitionBundle
:raises:
:class:`KeyVaultErrorException<azure.keyvault.v7_1.models.KeyVaultErrorException>`
"""
error_map = kwargs.pop('error_map', None)
parameters = models.SasDefinitionUpdateParameters(template_uri=template_uri, sas_type=sas_type, validity_period=validity_period, sas_definition_attributes=sas_definition_attributes, tags=tags)
# Construct URL
url = self.update_sas_definition.metadata['url']
path_format_arguments = {
'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True),
'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'),
'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self._config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
# Construct body
body_content = self._serialize.body(parameters, 'SasDefinitionUpdateParameters')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.KeyVaultErrorException(response, self._deserialize)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SasDefinitionBundle', response)
if cls:
return cls(response, deserialized, None)
return deserialized
update_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'}
| 47.083254
| 264
| 0.669139
|
0857a3279f118bca48d762e00248a2908f2a180e
| 45,074
|
py
|
Python
|
tensorflow/python/keras/callbacks.py
|
elielhojman/tensorflow
|
163aae337c875efce2518c3cd0fecb61968fe408
|
[
"Apache-2.0"
] | 3
|
2018-07-16T09:11:12.000Z
|
2018-07-17T13:38:16.000Z
|
tensorflow/python/keras/callbacks.py
|
elielhojman/tensorflow
|
163aae337c875efce2518c3cd0fecb61968fe408
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/python/keras/callbacks.py
|
elielhojman/tensorflow
|
163aae337c875efce2518c3cd0fecb61968fe408
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-import-not-at-top
"""Callbacks: utilities called at certain points during model training.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import deque
from collections import Iterable
from collections import OrderedDict
import csv
import json
import math
import os
import time
import numpy as np
import six
from tensorflow.python.framework import dtypes
from tensorflow.python.keras import backend as K
from tensorflow.python.keras.engine.training_utils import standardize_input_data
from tensorflow.python.keras.utils.generic_utils import Progbar
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary as tf_summary
from tensorflow.python.training import saver
from tensorflow.python.util.tf_export import tf_export
try:
import requests
except ImportError:
requests = None
class CallbackList(object):
"""Container abstracting a list of callbacks.
Arguments:
callbacks: List of `Callback` instances.
queue_length: Queue length for keeping
running statistics over callback execution time.
"""
def __init__(self, callbacks=None, queue_length=10):
callbacks = callbacks or []
self.callbacks = [c for c in callbacks]
self.queue_length = queue_length
def append(self, callback):
self.callbacks.append(callback)
def set_params(self, params):
for callback in self.callbacks:
callback.set_params(params)
def set_model(self, model):
for callback in self.callbacks:
callback.set_model(model)
def on_epoch_begin(self, epoch, logs=None):
"""Called at the start of an epoch.
Arguments:
epoch: integer, index of epoch.
logs: dictionary of logs.
"""
logs = logs or {}
for callback in self.callbacks:
callback.on_epoch_begin(epoch, logs)
self._delta_t_batch = 0.
self._delta_ts_batch_begin = deque([], maxlen=self.queue_length)
self._delta_ts_batch_end = deque([], maxlen=self.queue_length)
def on_epoch_end(self, epoch, logs=None):
"""Called at the end of an epoch.
Arguments:
epoch: integer, index of epoch.
logs: dictionary of logs.
"""
logs = logs or {}
for callback in self.callbacks:
callback.on_epoch_end(epoch, logs)
def on_batch_begin(self, batch, logs=None):
"""Called right before processing a batch.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dictionary of logs.
"""
logs = logs or {}
t_before_callbacks = time.time()
for callback in self.callbacks:
callback.on_batch_begin(batch, logs)
self._delta_ts_batch_begin.append(time.time() - t_before_callbacks)
delta_t_median = np.median(self._delta_ts_batch_begin)
if (self._delta_t_batch > 0. and
delta_t_median > 0.95 * self._delta_t_batch and delta_t_median > 0.1):
logging.warning('Method on_batch_begin() is slow compared '
'to the batch update (%f). Check your callbacks.',
delta_t_median)
self._t_enter_batch = time.time()
def on_batch_end(self, batch, logs=None):
"""Called at the end of a batch.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dictionary of logs.
"""
logs = logs or {}
if not hasattr(self, '_t_enter_batch'):
self._t_enter_batch = time.time()
self._delta_t_batch = time.time() - self._t_enter_batch
t_before_callbacks = time.time()
for callback in self.callbacks:
callback.on_batch_end(batch, logs)
self._delta_ts_batch_end.append(time.time() - t_before_callbacks)
delta_t_median = np.median(self._delta_ts_batch_end)
if (self._delta_t_batch > 0. and
(delta_t_median > 0.95 * self._delta_t_batch and delta_t_median > 0.1)):
logging.warning('Method on_batch_end() is slow compared '
'to the batch update (%f). Check your callbacks.',
delta_t_median)
def on_train_begin(self, logs=None):
"""Called at the beginning of training.
Arguments:
logs: dictionary of logs.
"""
logs = logs or {}
for callback in self.callbacks:
callback.on_train_begin(logs)
def on_train_end(self, logs=None):
"""Called at the end of training.
Arguments:
logs: dictionary of logs.
"""
logs = logs or {}
for callback in self.callbacks:
callback.on_train_end(logs)
def __iter__(self):
return iter(self.callbacks)
@tf_export('keras.callbacks.Callback')
class Callback(object):
"""Abstract base class used to build new callbacks.
Attributes:
params: dict. Training parameters
(eg. verbosity, batch size, number of epochs...).
model: instance of `keras.models.Model`.
Reference of the model being trained.
The `logs` dictionary that callback methods
take as argument will contain keys for quantities relevant to
the current batch or epoch.
Currently, the `.fit()` method of the `Sequential` model class
will include the following quantities in the `logs` that
it passes to its callbacks:
on_epoch_end: logs include `acc` and `loss`, and
optionally include `val_loss`
(if validation is enabled in `fit`), and `val_acc`
(if validation and accuracy monitoring are enabled).
on_batch_begin: logs include `size`,
the number of samples in the current batch.
on_batch_end: logs include `loss`, and optionally `acc`
(if accuracy monitoring is enabled).
"""
def __init__(self):
self.validation_data = None
self.model = None
def set_params(self, params):
self.params = params
def set_model(self, model):
self.model = model
def on_epoch_begin(self, epoch, logs=None):
pass
def on_epoch_end(self, epoch, logs=None):
pass
def on_batch_begin(self, batch, logs=None):
pass
def on_batch_end(self, batch, logs=None):
pass
def on_train_begin(self, logs=None):
pass
def on_train_end(self, logs=None):
pass
@tf_export('keras.callbacks.BaseLogger')
class BaseLogger(Callback):
"""Callback that accumulates epoch averages of metrics.
This callback is automatically applied to every Keras model.
Arguments:
stateful_metrics: Iterable of string names of metrics that
should *not* be averaged over an epoch.
Metrics in this list will be logged as-is in `on_epoch_end`.
All others will be averaged in `on_epoch_end`.
"""
def __init__(self, stateful_metrics=None):
super(BaseLogger, self).__init__()
self.stateful_metrics = set(stateful_metrics or [])
def on_epoch_begin(self, epoch, logs=None):
self.seen = 0
self.totals = {}
def on_batch_end(self, batch, logs=None):
logs = logs or {}
batch_size = logs.get('size', 0)
self.seen += batch_size
for k, v in logs.items():
if k in self.stateful_metrics:
self.totals[k] = v
else:
if k in self.totals:
self.totals[k] += v * batch_size
else:
self.totals[k] = v * batch_size
def on_epoch_end(self, epoch, logs=None):
if logs is not None:
for k in self.params['metrics']:
if k in self.totals:
# Make value available to next callbacks.
if k in self.stateful_metrics:
logs[k] = self.totals[k]
else:
logs[k] = self.totals[k] / self.seen
@tf_export('keras.callbacks.TerminateOnNaN')
class TerminateOnNaN(Callback):
"""Callback that terminates training when a NaN loss is encountered.
"""
def on_batch_end(self, batch, logs=None):
logs = logs or {}
loss = logs.get('loss')
if loss is not None:
if np.isnan(loss) or np.isinf(loss):
print('Batch %d: Invalid loss, terminating training' % (batch))
self.model.stop_training = True
@tf_export('keras.callbacks.ProgbarLogger')
class ProgbarLogger(Callback):
"""Callback that prints metrics to stdout.
Arguments:
count_mode: One of "steps" or "samples".
Whether the progress bar should
count samples seen or steps (batches) seen.
stateful_metrics: Iterable of string names of metrics that
should *not* be averaged over an epoch.
Metrics in this list will be logged as-is.
All others will be averaged over time (e.g. loss, etc).
Raises:
ValueError: In case of invalid `count_mode`.
"""
def __init__(self, count_mode='samples', stateful_metrics=None):
super(ProgbarLogger, self).__init__()
if count_mode == 'samples':
self.use_steps = False
elif count_mode == 'steps':
self.use_steps = True
else:
raise ValueError('Unknown `count_mode`: ' + str(count_mode))
self.stateful_metrics = set(stateful_metrics or [])
def on_train_begin(self, logs=None):
self.verbose = self.params['verbose']
self.epochs = self.params['epochs']
def on_epoch_begin(self, epoch, logs=None):
if self.verbose:
print('Epoch %d/%d' % (epoch + 1, self.epochs))
if self.use_steps:
target = self.params['steps']
else:
target = self.params['samples']
self.target = target
self.progbar = Progbar(
target=self.target,
verbose=self.verbose,
stateful_metrics=self.stateful_metrics)
self.seen = 0
def on_batch_begin(self, batch, logs=None):
if self.seen < self.target:
self.log_values = []
def on_batch_end(self, batch, logs=None):
logs = logs or {}
batch_size = logs.get('size', 0)
if self.use_steps:
self.seen += 1
else:
self.seen += batch_size
for k in self.params['metrics']:
if k in logs:
self.log_values.append((k, logs[k]))
# Skip progbar update for the last batch;
# will be handled by on_epoch_end.
if self.verbose and self.seen < self.target:
self.progbar.update(self.seen, self.log_values)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
for k in self.params['metrics']:
if k in logs:
self.log_values.append((k, logs[k]))
if self.verbose:
self.progbar.update(self.seen, self.log_values)
@tf_export('keras.callbacks.History')
class History(Callback):
"""Callback that records events into a `History` object.
This callback is automatically applied to
every Keras model. The `History` object
gets returned by the `fit` method of models.
"""
def on_train_begin(self, logs=None):
self.epoch = []
self.history = {}
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
self.epoch.append(epoch)
for k, v in logs.items():
self.history.setdefault(k, []).append(v)
@tf_export('keras.callbacks.ModelCheckpoint')
class ModelCheckpoint(Callback):
"""Save the model after every epoch.
`filepath` can contain named formatting options,
which will be filled the value of `epoch` and
keys in `logs` (passed in `on_epoch_end`).
For example: if `filepath` is `weights.{epoch:02d}-{val_loss:.2f}.hdf5`,
then the model checkpoints will be saved with the epoch number and
the validation loss in the filename.
Arguments:
filepath: string, path to save the model file.
monitor: quantity to monitor.
verbose: verbosity mode, 0 or 1.
save_best_only: if `save_best_only=True`,
the latest best model according to
the quantity monitored will not be overwritten.
mode: one of {auto, min, max}.
If `save_best_only=True`, the decision
to overwrite the current save file is made
based on either the maximization or the
minimization of the monitored quantity. For `val_acc`,
this should be `max`, for `val_loss` this should
be `min`, etc. In `auto` mode, the direction is
automatically inferred from the name of the monitored quantity.
save_weights_only: if True, then only the model's weights will be
saved (`model.save_weights(filepath)`), else the full model
is saved (`model.save(filepath)`).
period: Interval (number of epochs) between checkpoints.
"""
def __init__(self,
filepath,
monitor='val_loss',
verbose=0,
save_best_only=False,
save_weights_only=False,
mode='auto',
period=1):
super(ModelCheckpoint, self).__init__()
self.monitor = monitor
self.verbose = verbose
self.filepath = filepath
self.save_best_only = save_best_only
self.save_weights_only = save_weights_only
self.period = period
self.epochs_since_last_save = 0
if mode not in ['auto', 'min', 'max']:
logging.warning('ModelCheckpoint mode %s is unknown, '
'fallback to auto mode.', mode)
mode = 'auto'
if mode == 'min':
self.monitor_op = np.less
self.best = np.Inf
elif mode == 'max':
self.monitor_op = np.greater
self.best = -np.Inf
else:
if 'acc' in self.monitor or self.monitor.startswith('fmeasure'):
self.monitor_op = np.greater
self.best = -np.Inf
else:
self.monitor_op = np.less
self.best = np.Inf
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
self.epochs_since_last_save += 1
if self.epochs_since_last_save >= self.period:
self.epochs_since_last_save = 0
filepath = self.filepath.format(epoch=epoch + 1, **logs)
if self.save_best_only:
current = logs.get(self.monitor)
if current is None:
logging.warning('Can save best model only with %s available, '
'skipping.', self.monitor)
else:
if self.monitor_op(current, self.best):
if self.verbose > 0:
print('\nEpoch %05d: %s improved from %0.5f to %0.5f,'
' saving model to %s' % (epoch + 1, self.monitor, self.best,
current, filepath))
self.best = current
if self.save_weights_only:
self.model.save_weights(filepath, overwrite=True)
else:
self.model.save(filepath, overwrite=True)
else:
if self.verbose > 0:
print('\nEpoch %05d: %s did not improve from %0.5f' %
(epoch + 1, self.monitor, self.best))
else:
if self.verbose > 0:
print('\nEpoch %05d: saving model to %s' % (epoch + 1, filepath))
if self.save_weights_only:
self.model.save_weights(filepath, overwrite=True)
else:
self.model.save(filepath, overwrite=True)
@tf_export('keras.callbacks.EarlyStopping')
class EarlyStopping(Callback):
"""Stop training when a monitored quantity has stopped improving.
Arguments:
monitor: quantity to be monitored.
min_delta: minimum change in the monitored quantity
to qualify as an improvement, i.e. an absolute
change of less than min_delta, will count as no
improvement.
patience: number of epochs with no improvement
after which training will be stopped.
verbose: verbosity mode.
mode: one of {auto, min, max}. In `min` mode,
training will stop when the quantity
monitored has stopped decreasing; in `max`
mode it will stop when the quantity
monitored has stopped increasing; in `auto`
mode, the direction is automatically inferred
from the name of the monitored quantity.
baseline: baseline value for the monitored quantity.
Training will stop if the model doesn't show improvement over the
baseline.
"""
def __init__(self,
monitor='val_loss',
min_delta=0,
patience=0,
verbose=0,
mode='auto',
baseline=None):
super(EarlyStopping, self).__init__()
self.monitor = monitor
self.patience = patience
self.verbose = verbose
self.baseline = baseline
self.min_delta = abs(min_delta)
self.wait = 0
self.stopped_epoch = 0
if mode not in ['auto', 'min', 'max']:
logging.warning('EarlyStopping mode %s is unknown, '
'fallback to auto mode.', mode)
mode = 'auto'
if mode == 'min':
self.monitor_op = np.less
elif mode == 'max':
self.monitor_op = np.greater
else:
if 'acc' in self.monitor:
self.monitor_op = np.greater
else:
self.monitor_op = np.less
if self.monitor_op == np.greater:
self.min_delta *= 1
else:
self.min_delta *= -1
def on_train_begin(self, logs=None):
# Allow instances to be re-used
self.wait = 0
self.stopped_epoch = 0
if self.baseline is not None:
self.best = self.baseline
else:
self.best = np.Inf if self.monitor_op == np.less else -np.Inf
def on_epoch_end(self, epoch, logs=None):
current = logs.get(self.monitor)
if current is None:
logging.warning('Early stopping conditioned on metric `%s` '
'which is not available. Available metrics are: %s',
self.monitor, ','.join(list(logs.keys())))
return
if self.monitor_op(current - self.min_delta, self.best):
self.best = current
self.wait = 0
else:
self.wait += 1
if self.wait >= self.patience:
self.stopped_epoch = epoch
self.model.stop_training = True
def on_train_end(self, logs=None):
if self.stopped_epoch > 0 and self.verbose > 0:
print('Epoch %05d: early stopping' % (self.stopped_epoch + 1))
@tf_export('keras.callbacks.RemoteMonitor')
class RemoteMonitor(Callback):
"""Callback used to stream events to a server.
Requires the `requests` library.
Events are sent to `root + '/publish/epoch/end/'` by default. Calls are
HTTP POST, with a `data` argument which is a
JSON-encoded dictionary of event data.
If send_as_json is set to True, the content type of the request will be
application/json. Otherwise the serialized JSON will be sent within a form.
Arguments:
root: String; root url of the target server.
path: String; path relative to `root` to which the events will be sent.
field: String; JSON field under which the data will be stored.
The field is used only if the payload is sent within a form
(i.e. send_as_json is set to False).
headers: Dictionary; optional custom HTTP headers.
send_as_json: Boolean; whether the request should be
sent as application/json.
"""
def __init__(self,
root='http://localhost:9000',
path='/publish/epoch/end/',
field='data',
headers=None,
send_as_json=False):
super(RemoteMonitor, self).__init__()
self.root = root
self.path = path
self.field = field
self.headers = headers
self.send_as_json = send_as_json
def on_epoch_end(self, epoch, logs=None):
if requests is None:
raise ImportError('RemoteMonitor requires the `requests` library.')
logs = logs or {}
send = {}
send['epoch'] = epoch
for k, v in logs.items():
send[k] = v
try:
if self.send_as_json:
requests.post(self.root + self.path, json=send, headers=self.headers)
else:
requests.post(
self.root + self.path, {self.field: json.dumps(send)},
headers=self.headers)
except requests.exceptions.RequestException:
logging.warning('Warning: could not reach RemoteMonitor '
'root server at ' + str(self.root))
@tf_export('keras.callbacks.LearningRateScheduler')
class LearningRateScheduler(Callback):
"""Learning rate scheduler.
Arguments:
schedule: a function that takes an epoch index as input
(integer, indexed from 0) and returns a new
learning rate as output (float).
verbose: int. 0: quiet, 1: update messages.
"""
def __init__(self, schedule, verbose=0):
super(LearningRateScheduler, self).__init__()
self.schedule = schedule
self.verbose = verbose
def on_epoch_begin(self, epoch, logs=None):
if not hasattr(self.model.optimizer, 'lr'):
raise ValueError('Optimizer must have a "lr" attribute.')
try: # new API
lr = float(K.get_value(self.model.optimizer.lr))
lr = self.schedule(epoch, lr)
except TypeError: # Support for old API for backward compatibility
lr = self.schedule(epoch)
if not isinstance(lr, (float, np.float32, np.float64)):
raise ValueError('The output of the "schedule" function '
'should be float.')
K.set_value(self.model.optimizer.lr, lr)
if self.verbose > 0:
print('\nEpoch %05d: LearningRateScheduler reducing learning '
'rate to %s.' % (epoch + 1, lr))
@tf_export('keras.callbacks.TensorBoard')
class TensorBoard(Callback):
# pylint: disable=line-too-long
"""Tensorboard basic visualizations.
This callback writes a log for TensorBoard, which allows
you to visualize dynamic graphs of your training and test
metrics, as well as activation histograms for the different
layers in your model.
TensorBoard is a visualization tool provided with TensorFlow.
If you have installed TensorFlow with pip, you should be able
to launch TensorBoard from the command line:
```sh
tensorboard --logdir=/full_path_to_your_logs
```
You can find more information about TensorBoard
[here](https://www.tensorflow.org/get_started/summaries_and_tensorboard).
Arguments:
log_dir: the path of the directory where to save the log
files to be parsed by TensorBoard.
histogram_freq: frequency (in epochs) at which to compute activation
and weight histograms for the layers of the model. If set to 0,
histograms won't be computed. Validation data (or split) must be
specified for histogram visualizations.
write_graph: whether to visualize the graph in TensorBoard.
The log file can become quite large when
write_graph is set to True.
write_grads: whether to visualize gradient histograms in TensorBoard.
`histogram_freq` must be greater than 0.
batch_size: size of batch of inputs to feed to the network
for histograms computation.
write_images: whether to write model weights to visualize as
image in TensorBoard.
embeddings_freq: frequency (in epochs) at which selected embedding
layers will be saved. If set to 0, embeddings won't be computed.
Data to be visualized in TensorBoard's Embedding tab must be passed
as `embeddings_data`.
embeddings_layer_names: a list of names of layers to keep eye on. If
None or empty list all the embedding layer will be watched.
embeddings_metadata: a dictionary which maps layer name to a file name
in which metadata for this embedding layer is saved. See the
[details](https://www.tensorflow.org/how_tos/embedding_viz/#metadata_optional)
about metadata files format. In case if the same metadata file is
used for all embedding layers, string can be passed.
embeddings_data: data to be embedded at layers specified in
`embeddings_layer_names`. Numpy array (if the model has a single
input) or list of Numpy arrays (if the model has multiple inputs).
Learn [more about embeddings](https://www.tensorflow.org/programmers_guide/embedding)
"""
# pylint: enable=line-too-long
def __init__(self,
log_dir='./logs',
histogram_freq=0,
batch_size=32,
write_graph=True,
write_grads=False,
write_images=False,
embeddings_freq=0,
embeddings_layer_names=None,
embeddings_metadata=None,
embeddings_data=None):
super(TensorBoard, self).__init__()
self.log_dir = log_dir
self.histogram_freq = histogram_freq
self.merged = None
self.write_graph = write_graph
self.write_grads = write_grads
self.write_images = write_images
self.batch_size = batch_size
self._current_batch = 0
# abstracted writer class to be able to stub for testing
self._writer_class = tf_summary.FileWriter
self.embeddings_freq = embeddings_freq
self.embeddings_layer_names = embeddings_layer_names
self.embeddings_metadata = embeddings_metadata
self.embeddings_data = embeddings_data
def set_model(self, model):
"""Sets Keras model and creates summary ops."""
self.model = model
self.sess = K.get_session()
# only make histogram summary op if it hasn't already been made
if self.histogram_freq and self.merged is None:
for layer in self.model.layers:
for weight in layer.weights:
mapped_weight_name = weight.name.replace(':', '_')
tf_summary.histogram(mapped_weight_name, weight)
if self.write_images:
w_img = array_ops.squeeze(weight)
shape = K.int_shape(w_img)
if len(shape) == 2: # dense layer kernel case
if shape[0] > shape[1]:
w_img = array_ops.transpose(w_img)
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img, [1, shape[0], shape[1], 1])
elif len(shape) == 3: # convnet case
if K.image_data_format() == 'channels_last':
# switch to channels_first to display
# every kernel as a separate image
w_img = array_ops.transpose(w_img, perm=[2, 0, 1])
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img,
[shape[0], shape[1], shape[2], 1])
elif len(shape) == 1: # bias case
w_img = array_ops.reshape(w_img, [1, shape[0], 1, 1])
else:
# not possible to handle 3D convnets etc.
continue
shape = K.int_shape(w_img)
assert len(shape) == 4 and shape[-1] in [1, 3, 4]
tf_summary.image(mapped_weight_name, w_img)
if self.write_grads:
for weight in layer.trainable_weights:
mapped_weight_name = weight.name.replace(':', '_')
grads = model.optimizer.get_gradients(model.total_loss, weight)
def is_indexed_slices(grad):
return type(grad).__name__ == 'IndexedSlices'
grads = [grad.values if is_indexed_slices(grad) else grad
for grad in grads]
tf_summary.histogram('{}_grad'.format(mapped_weight_name), grads)
if hasattr(layer, 'output'):
if isinstance(layer.output, list):
for i, output in enumerate(layer.output):
tf_summary.histogram('{}_out_{}'.format(layer.name, i), output)
else:
tf_summary.histogram('{}_out'.format(layer.name), layer.output)
self.merged = tf_summary.merge_all()
if self.write_graph:
self.writer = self._writer_class(self.log_dir, self.sess.graph)
else:
self.writer = self._writer_class(self.log_dir)
# If both embedding_freq and embeddings_data are available, we will
# visualize embeddings.
if self.embeddings_freq and self.embeddings_data is not None:
self.embeddings_data = standardize_input_data(self.embeddings_data,
model.input_names)
# If embedding_layer_names are not provided, get all of the embedding
# layers from the model.
embeddings_layer_names = self.embeddings_layer_names
if not embeddings_layer_names:
embeddings_layer_names = [
layer.name
for layer in self.model.layers
if type(layer).__name__ == 'Embedding'
]
self.assign_embeddings = []
embeddings_vars = {}
self.batch_id = batch_id = array_ops.placeholder(dtypes.int32)
self.step = step = array_ops.placeholder(dtypes.int32)
for layer in self.model.layers:
if layer.name in embeddings_layer_names:
embedding_input = self.model.get_layer(layer.name).output
embedding_size = np.prod(embedding_input.shape[1:])
embedding_input = array_ops.reshape(embedding_input,
(step, int(embedding_size)))
shape = (self.embeddings_data[0].shape[0], int(embedding_size))
embedding = variables.Variable(
array_ops.zeros(shape), name=layer.name + '_embedding')
embeddings_vars[layer.name] = embedding
batch = state_ops.assign(embedding[batch_id:batch_id + step],
embedding_input)
self.assign_embeddings.append(batch)
self.saver = saver.Saver(list(embeddings_vars.values()))
# Create embeddings_metadata dictionary
if isinstance(self.embeddings_metadata, str):
embeddings_metadata = {
layer_name: self.embeddings_metadata
for layer_name in embeddings_vars.keys()
}
else:
# If embedding_metadata is already a dictionary
embeddings_metadata = self.embeddings_metadata
try:
from tensorboard.plugins import projector
except ImportError:
raise ImportError('Failed to import TensorBoard. Please make sure that '
'TensorBoard integration is complete."')
# TODO(psv): Add integration tests to test embedding visualization
# with TensorBoard callback. We are unable to write a unit test for this
# because TensorBoard dependency assumes TensorFlow package is installed.
config = projector.ProjectorConfig()
for layer_name, tensor in embeddings_vars.items():
embedding = config.embeddings.add()
embedding.tensor_name = tensor.name
if (embeddings_metadata is not None and
layer_name in embeddings_metadata):
embedding.metadata_path = embeddings_metadata[layer_name]
projector.visualize_embeddings(self.writer, config)
def _fetch_callback(self, summary):
self.writer.add_summary(
summary,
self._epoch + self._current_val_batch / self._validation_batches)
self._current_val_batch += 1
def on_train_begin(self, logs=None):
"""Checks if histogram summaries can be run."""
if self.histogram_freq:
if 'validation_steps' in self.params:
self._validation_batches = self.params['validation_steps']
elif self.validation_data:
self._validation_batches = math.ceil(
self.validation_data[0].shape[0] / self.batch_size)
else:
raise ValueError('If printing histograms, validation data must be '
'provided.')
if self._validation_batches == 0:
raise ValueError(
'If printing histograms, validation data must have length > 0.')
def on_epoch_begin(self, epoch, logs=None):
"""Add histogram op to Model test_function callbacks, reset batch count."""
# check if histogram summary should be run for this epoch
if self.histogram_freq and epoch % self.histogram_freq == 0:
self._epoch = epoch
self._current_val_batch = 0
# add the histogram summary op if it should run this epoch
if self.merged not in self.model.test_function.fetches:
self.model.test_function.fetches.append(self.merged)
self.model.test_function.fetch_callbacks[
self.merged] = self._fetch_callback
def on_epoch_end(self, epoch, logs=None):
"""Checks if summary ops should run next epoch, logs scalar summaries."""
logs = logs or {}
# pop the histogram summary op after each epoch
if self.histogram_freq:
if self.merged in self.model.test_function.fetches:
self.model.test_function.fetches.remove(self.merged)
if self.merged in self.model.test_function.fetch_callbacks:
self.model.test_function.fetch_callbacks.pop(self.merged)
if self.embeddings_data is None and self.embeddings_freq:
raise ValueError('To visualize embeddings, embeddings_data must '
'be provided.')
if self.embeddings_freq and self.embeddings_data is not None:
if epoch % self.embeddings_freq == 0:
# We need a second forward-pass here because we're passing
# the `embeddings_data` explicitly. This design allows to pass
# arbitrary data as `embeddings_data` and results from the fact
# that we need to know the size of the `tf.Variable`s which
# hold the embeddings in `set_model`. At this point, however,
# the `validation_data` is not yet set.
embeddings_data = self.embeddings_data
n_samples = embeddings_data[0].shape[0]
i = 0
while i < n_samples:
step = min(self.batch_size, n_samples - i)
batch = slice(i, i + step)
if isinstance(self.model.input, list):
feed_dict = {
model_input: embeddings_data[idx][batch]
for idx, model_input in enumerate(self.model.input)
}
else:
feed_dict = {self.model.input: embeddings_data[0][batch]}
feed_dict.update({self.batch_id: i, self.step: step})
if self.model.uses_learning_phase:
feed_dict[K.learning_phase()] = False
self.sess.run(self.assign_embeddings, feed_dict=feed_dict)
self.saver.save(self.sess,
os.path.join(self.log_dir, 'keras_embedding.ckpt'),
epoch)
i += self.batch_size
for name, value in logs.items():
if name in ['batch', 'size']:
continue
summary = tf_summary.Summary()
summary_value = summary.value.add()
summary_value.simple_value = value.item()
summary_value.tag = name
self.writer.add_summary(summary, epoch)
self.writer.flush()
def on_train_end(self, logs=None):
self.writer.close()
@tf_export('keras.callbacks.ReduceLROnPlateau')
class ReduceLROnPlateau(Callback):
"""Reduce learning rate when a metric has stopped improving.
Models often benefit from reducing the learning rate by a factor
of 2-10 once learning stagnates. This callback monitors a
quantity and if no improvement is seen for a 'patience' number
of epochs, the learning rate is reduced.
Example:
```python
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2,
patience=5, min_lr=0.001)
model.fit(X_train, Y_train, callbacks=[reduce_lr])
```
Arguments:
monitor: quantity to be monitored.
factor: factor by which the learning rate will
be reduced. new_lr = lr * factor
patience: number of epochs with no improvement
after which learning rate will be reduced.
verbose: int. 0: quiet, 1: update messages.
mode: one of {auto, min, max}. In `min` mode,
lr will be reduced when the quantity
monitored has stopped decreasing; in `max`
mode it will be reduced when the quantity
monitored has stopped increasing; in `auto`
mode, the direction is automatically inferred
from the name of the monitored quantity.
min_delta: threshold for measuring the new optimum,
to only focus on significant changes.
cooldown: number of epochs to wait before resuming
normal operation after lr has been reduced.
min_lr: lower bound on the learning rate.
"""
def __init__(self,
monitor='val_loss',
factor=0.1,
patience=10,
verbose=0,
mode='auto',
min_delta=1e-4,
cooldown=0,
min_lr=0,
**kwargs):
super(ReduceLROnPlateau, self).__init__()
self.monitor = monitor
if factor >= 1.0:
raise ValueError('ReduceLROnPlateau ' 'does not support a factor >= 1.0.')
if 'epsilon' in kwargs:
min_delta = kwargs.pop('epsilon')
logging.warning('`epsilon` argument is deprecated and '
'will be removed, use `min_delta` instead.')
self.factor = factor
self.min_lr = min_lr
self.min_delta = min_delta
self.patience = patience
self.verbose = verbose
self.cooldown = cooldown
self.cooldown_counter = 0 # Cooldown counter.
self.wait = 0
self.best = 0
self.mode = mode
self.monitor_op = None
self._reset()
def _reset(self):
"""Resets wait counter and cooldown counter.
"""
if self.mode not in ['auto', 'min', 'max']:
logging.warning('Learning Rate Plateau Reducing mode %s is unknown, '
'fallback to auto mode.', self.mode)
self.mode = 'auto'
if (self.mode == 'min' or
(self.mode == 'auto' and 'acc' not in self.monitor)):
self.monitor_op = lambda a, b: np.less(a, b - self.min_delta)
self.best = np.Inf
else:
self.monitor_op = lambda a, b: np.greater(a, b + self.min_delta)
self.best = -np.Inf
self.cooldown_counter = 0
self.wait = 0
def on_train_begin(self, logs=None):
self._reset()
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
logs['lr'] = K.get_value(self.model.optimizer.lr)
current = logs.get(self.monitor)
if current is None:
logging.warning('Reduce LR on plateau conditioned on metric `%s` '
'which is not available. Available metrics are: %s',
self.monitor, ','.join(list(logs.keys())))
else:
if self.in_cooldown():
self.cooldown_counter -= 1
self.wait = 0
if self.monitor_op(current, self.best):
self.best = current
self.wait = 0
elif not self.in_cooldown():
self.wait += 1
if self.wait >= self.patience:
old_lr = float(K.get_value(self.model.optimizer.lr))
if old_lr > self.min_lr:
new_lr = old_lr * self.factor
new_lr = max(new_lr, self.min_lr)
K.set_value(self.model.optimizer.lr, new_lr)
if self.verbose > 0:
print('\nEpoch %05d: ReduceLROnPlateau reducing learning '
'rate to %s.' % (epoch + 1, new_lr))
self.cooldown_counter = self.cooldown
self.wait = 0
def in_cooldown(self):
return self.cooldown_counter > 0
@tf_export('keras.callbacks.CSVLogger')
class CSVLogger(Callback):
"""Callback that streams epoch results to a csv file.
Supports all values that can be represented as a string,
including 1D iterables such as np.ndarray.
Example:
```python
csv_logger = CSVLogger('training.log')
model.fit(X_train, Y_train, callbacks=[csv_logger])
```
Arguments:
filename: filename of the csv file, e.g. 'run/log.csv'.
separator: string used to separate elements in the csv file.
append: True: append if file exists (useful for continuing
training). False: overwrite existing file,
"""
def __init__(self, filename, separator=',', append=False):
self.sep = separator
self.filename = filename
self.append = append
self.writer = None
self.keys = None
self.append_header = True
self.file_flags = 'b' if six.PY2 and os.name == 'nt' else ''
super(CSVLogger, self).__init__()
def on_train_begin(self, logs=None):
if self.append:
if os.path.exists(self.filename):
with open(self.filename, 'r' + self.file_flags) as f:
self.append_header = not bool(len(f.readline()))
self.csv_file = open(self.filename, 'a' + self.file_flags)
else:
self.csv_file = open(self.filename, 'w' + self.file_flags)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
def handle_value(k):
is_zero_dim_ndarray = isinstance(k, np.ndarray) and k.ndim == 0
if isinstance(k, six.string_types):
return k
elif isinstance(k, Iterable) and not is_zero_dim_ndarray:
return '"[%s]"' % (', '.join(map(str, k)))
else:
return k
if self.keys is None:
self.keys = sorted(logs.keys())
if self.model.stop_training:
# We set NA so that csv parsers do not fail for this last epoch.
logs = dict([(k, logs[k]) if k in logs else (k, 'NA') for k in self.keys])
if not self.writer:
class CustomDialect(csv.excel):
delimiter = self.sep
self.writer = csv.DictWriter(
self.csv_file,
fieldnames=['epoch'] + self.keys,
dialect=CustomDialect)
if self.append_header:
self.writer.writeheader()
row_dict = OrderedDict({'epoch': epoch})
row_dict.update((key, handle_value(logs[key])) for key in self.keys)
self.writer.writerow(row_dict)
self.csv_file.flush()
def on_train_end(self, logs=None):
self.csv_file.close()
self.writer = None
@tf_export('keras.callbacks.LambdaCallback')
class LambdaCallback(Callback):
r"""Callback for creating simple, custom callbacks on-the-fly.
This callback is constructed with anonymous functions that will be called
at the appropriate time. Note that the callbacks expects positional
arguments, as:
- `on_epoch_begin` and `on_epoch_end` expect two positional arguments:
`epoch`, `logs`
- `on_batch_begin` and `on_batch_end` expect two positional arguments:
`batch`, `logs`
- `on_train_begin` and `on_train_end` expect one positional argument:
`logs`
Arguments:
on_epoch_begin: called at the beginning of every epoch.
on_epoch_end: called at the end of every epoch.
on_batch_begin: called at the beginning of every batch.
on_batch_end: called at the end of every batch.
on_train_begin: called at the beginning of model training.
on_train_end: called at the end of model training.
Example:
```python
# Print the batch number at the beginning of every batch.
batch_print_callback = LambdaCallback(
on_batch_begin=lambda batch,logs: print(batch))
# Stream the epoch loss to a file in JSON format. The file content
# is not well-formed JSON but rather has a JSON object per line.
import json
json_log = open('loss_log.json', mode='wt', buffering=1)
json_logging_callback = LambdaCallback(
on_epoch_end=lambda epoch, logs: json_log.write(
json.dumps({'epoch': epoch, 'loss': logs['loss']}) + '\n'),
on_train_end=lambda logs: json_log.close()
)
# Terminate some processes after having finished model training.
processes = ...
cleanup_callback = LambdaCallback(
on_train_end=lambda logs: [
p.terminate() for p in processes if p.is_alive()])
model.fit(...,
callbacks=[batch_print_callback,
json_logging_callback,
cleanup_callback])
```
"""
def __init__(self,
on_epoch_begin=None,
on_epoch_end=None,
on_batch_begin=None,
on_batch_end=None,
on_train_begin=None,
on_train_end=None,
**kwargs):
super(LambdaCallback, self).__init__()
self.__dict__.update(kwargs)
if on_epoch_begin is not None:
self.on_epoch_begin = on_epoch_begin
else:
self.on_epoch_begin = lambda epoch, logs: None
if on_epoch_end is not None:
self.on_epoch_end = on_epoch_end
else:
self.on_epoch_end = lambda epoch, logs: None
if on_batch_begin is not None:
self.on_batch_begin = on_batch_begin
else:
self.on_batch_begin = lambda batch, logs: None
if on_batch_end is not None:
self.on_batch_end = on_batch_end
else:
self.on_batch_end = lambda batch, logs: None
if on_train_begin is not None:
self.on_train_begin = on_train_begin
else:
self.on_train_begin = lambda logs: None
if on_train_end is not None:
self.on_train_end = on_train_end
else:
self.on_train_end = lambda logs: None
| 35.379906
| 95
| 0.647158
|
58233f07d55a5270f6c2c9b4e2dae84a45f80b57
| 123
|
py
|
Python
|
habet/__init__.py
|
odra/habet
|
cb791c3367f06fff0a6557fe5697033b2262bfc8
|
[
"MIT"
] | null | null | null |
habet/__init__.py
|
odra/habet
|
cb791c3367f06fff0a6557fe5697033b2262bfc8
|
[
"MIT"
] | null | null | null |
habet/__init__.py
|
odra/habet
|
cb791c3367f06fff0a6557fe5697033b2262bfc8
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
from .application import Application
from .response import Response
from .handler import Handler
| 24.6
| 36
| 0.764228
|
eeefc062c308b9adda22326d250e90a170325888
| 791,176
|
py
|
Python
|
gen/CPP14Parser.py
|
badrinath-reddy/PPL
|
c72d005719692f82769ad24a49f54d0075c8ffda
|
[
"MIT"
] | null | null | null |
gen/CPP14Parser.py
|
badrinath-reddy/PPL
|
c72d005719692f82769ad24a49f54d0075c8ffda
|
[
"MIT"
] | null | null | null |
gen/CPP14Parser.py
|
badrinath-reddy/PPL
|
c72d005719692f82769ad24a49f54d0075c8ffda
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\u0098")
buf.write("\u09a7\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36")
buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t")
buf.write("&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.\t.\4")
buf.write("/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t\64")
buf.write("\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t")
buf.write(";\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\t")
buf.write("D\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\tL\4M\t")
buf.write("M\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\tU\4V\t")
buf.write("V\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4")
buf.write("_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4g\tg\4")
buf.write("h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4p\tp\4")
buf.write("q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv\4w\tw\4x\tx\4y\ty\4")
buf.write("z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080\t\u0080")
buf.write("\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083\4\u0084")
buf.write("\t\u0084\4\u0085\t\u0085\4\u0086\t\u0086\4\u0087\t\u0087")
buf.write("\4\u0088\t\u0088\4\u0089\t\u0089\4\u008a\t\u008a\4\u008b")
buf.write("\t\u008b\4\u008c\t\u008c\4\u008d\t\u008d\4\u008e\t\u008e")
buf.write("\4\u008f\t\u008f\4\u0090\t\u0090\4\u0091\t\u0091\4\u0092")
buf.write("\t\u0092\4\u0093\t\u0093\4\u0094\t\u0094\4\u0095\t\u0095")
buf.write("\4\u0096\t\u0096\4\u0097\t\u0097\4\u0098\t\u0098\4\u0099")
buf.write("\t\u0099\4\u009a\t\u009a\4\u009b\t\u009b\4\u009c\t\u009c")
buf.write("\4\u009d\t\u009d\4\u009e\t\u009e\4\u009f\t\u009f\4\u00a0")
buf.write("\t\u00a0\4\u00a1\t\u00a1\4\u00a2\t\u00a2\4\u00a3\t\u00a3")
buf.write("\4\u00a4\t\u00a4\4\u00a5\t\u00a5\4\u00a6\t\u00a6\4\u00a7")
buf.write("\t\u00a7\4\u00a8\t\u00a8\4\u00a9\t\u00a9\4\u00aa\t\u00aa")
buf.write("\4\u00ab\t\u00ab\4\u00ac\t\u00ac\4\u00ad\t\u00ad\4\u00ae")
buf.write("\t\u00ae\4\u00af\t\u00af\4\u00b0\t\u00b0\4\u00b1\t\u00b1")
buf.write("\4\u00b2\t\u00b2\4\u00b3\t\u00b3\4\u00b4\t\u00b4\4\u00b5")
buf.write("\t\u00b5\4\u00b6\t\u00b6\4\u00b7\t\u00b7\4\u00b8\t\u00b8")
buf.write("\4\u00b9\t\u00b9\4\u00ba\t\u00ba\4\u00bb\t\u00bb\4\u00bc")
buf.write("\t\u00bc\4\u00bd\t\u00bd\4\u00be\t\u00be\4\u00bf\t\u00bf")
buf.write("\4\u00c0\t\u00c0\4\u00c1\t\u00c1\4\u00c2\t\u00c2\4\u00c3")
buf.write("\t\u00c3\4\u00c4\t\u00c4\4\u00c5\t\u00c5\4\u00c6\t\u00c6")
buf.write("\4\u00c7\t\u00c7\4\u00c8\t\u00c8\4\u00c9\t\u00c9\3\2\5")
buf.write("\2\u0194\n\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5")
buf.write("\3\u01a0\n\3\3\4\3\4\5\4\u01a4\n\4\3\5\3\5\3\5\3\5\3\5")
buf.write("\3\5\3\5\3\5\3\5\5\5\u01af\n\5\3\6\3\6\5\6\u01b3\n\6\3")
buf.write("\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\5\7")
buf.write("\u01c2\n\7\3\7\3\7\3\7\3\7\3\7\5\7\u01c9\n\7\3\7\3\7\3")
buf.write("\7\7\7\u01ce\n\7\f\7\16\7\u01d1\13\7\3\b\3\b\5\b\u01d5")
buf.write("\n\b\3\b\3\b\3\t\3\t\5\t\u01db\n\t\3\t\3\t\3\n\3\n\3\n")
buf.write("\3\n\3\n\3\n\5\n\u01e5\n\n\3\13\3\13\3\f\3\f\3\f\5\f\u01ec")
buf.write("\n\f\3\f\3\f\3\f\3\f\5\f\u01f2\n\f\7\f\u01f4\n\f\f\f\16")
buf.write("\f\u01f7\13\f\3\r\3\r\5\r\u01fb\n\r\3\16\3\16\3\16\3\16")
buf.write("\5\16\u0201\n\16\3\17\3\17\3\17\3\17\3\17\5\17\u0208\n")
buf.write("\17\3\20\3\20\3\20\3\20\5\20\u020e\n\20\3\20\5\20\u0211")
buf.write("\n\20\3\20\5\20\u0214\n\20\3\20\5\20\u0217\n\20\3\21\3")
buf.write("\21\3\21\3\21\3\21\5\21\u021e\n\21\3\21\3\21\3\21\3\21")
buf.write("\3\21\5\21\u0225\n\21\3\21\3\21\3\21\3\21\3\21\3\21\3")
buf.write("\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21")
buf.write("\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21")
buf.write("\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21")
buf.write("\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21")
buf.write("\5\21\u0259\n\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3")
buf.write("\21\3\21\3\21\3\21\3\21\3\21\5\21\u0268\n\21\3\21\3\21")
buf.write("\3\21\3\21\5\21\u026e\n\21\3\21\3\21\3\21\3\21\5\21\u0274")
buf.write("\n\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21")
buf.write("\3\21\7\21\u0281\n\21\f\21\16\21\u0284\13\21\3\22\3\22")
buf.write("\3\23\3\23\3\24\3\24\3\25\5\25\u028d\n\25\3\25\3\25\3")
buf.write("\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25")
buf.write("\5\25\u029c\n\25\3\25\3\25\3\25\3\25\5\25\u02a2\n\25\3")
buf.write("\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26")
buf.write("\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26")
buf.write("\3\26\3\26\3\26\3\26\3\26\3\26\5\26\u02c0\n\26\3\27\3")
buf.write("\27\3\30\5\30\u02c5\n\30\3\30\3\30\5\30\u02c9\n\30\3\30")
buf.write("\3\30\5\30\u02cd\n\30\3\30\5\30\u02d0\n\30\3\30\3\30\5")
buf.write("\30\u02d4\n\30\3\30\3\30\3\30\3\30\5\30\u02da\n\30\5\30")
buf.write("\u02dc\n\30\3\31\3\31\3\31\3\31\3\32\3\32\5\32\u02e4\n")
buf.write("\32\3\33\3\33\5\33\u02e8\n\33\3\33\5\33\u02eb\n\33\3\34")
buf.write("\3\34\3\34\3\34\3\34\5\34\u02f2\n\34\3\34\3\34\3\34\3")
buf.write("\34\3\34\5\34\u02f9\n\34\7\34\u02fb\n\34\f\34\16\34\u02fe")
buf.write("\13\34\3\35\3\35\5\35\u0302\n\35\3\35\3\35\5\35\u0306")
buf.write("\n\35\3\36\5\36\u0309\n\36\3\36\3\36\3\36\5\36\u030e\n")
buf.write("\36\3\36\3\36\3\36\3\36\5\36\u0314\n\36\3\37\3\37\3\37")
buf.write("\3\37\3\37\3 \3 \3 \3 \3 \3 \5 \u0321\n \3!\3!\3!\3!\3")
buf.write("!\3!\3!\3!\3!\7!\u032c\n!\f!\16!\u032f\13!\3\"\3\"\3\"")
buf.write("\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\7\"\u033d\n\"\f\"")
buf.write("\16\"\u0340\13\"\3#\3#\3#\3#\3#\3#\3#\3#\3#\7#\u034b\n")
buf.write("#\f#\16#\u034e\13#\3$\3$\3$\3$\3$\3$\3$\7$\u0357\n$\f")
buf.write("$\16$\u035a\13$\3%\3%\3&\3&\3&\3&\3&\3&\3&\3&\3&\3&\3")
buf.write("&\3&\3&\3&\3&\7&\u036d\n&\f&\16&\u0370\13&\3\'\3\'\3\'")
buf.write("\3\'\3\'\3\'\3\'\3\'\3\'\7\'\u037b\n\'\f\'\16\'\u037e")
buf.write("\13\'\3(\3(\3(\3(\3(\3(\7(\u0386\n(\f(\16(\u0389\13(\3")
buf.write(")\3)\3)\3)\3)\3)\7)\u0391\n)\f)\16)\u0394\13)\3*\3*\3")
buf.write("*\3*\3*\3*\7*\u039c\n*\f*\16*\u039f\13*\3+\3+\3+\3+\3")
buf.write("+\3+\3+\3+\3+\7+\u03aa\n+\f+\16+\u03ad\13+\3,\3,\3,\3")
buf.write(",\3,\3,\3,\3,\3,\7,\u03b8\n,\f,\16,\u03bb\13,\3-\3-\3")
buf.write("-\3-\3-\3-\3-\5-\u03c4\n-\3.\3.\3.\3.\3.\3.\5.\u03cc\n")
buf.write(".\3/\3/\3\60\3\60\3\60\3\60\3\60\3\60\7\60\u03d6\n\60")
buf.write("\f\60\16\60\u03d9\13\60\3\61\3\61\3\62\3\62\5\62\u03df")
buf.write("\n\62\3\62\3\62\5\62\u03e3\n\62\3\62\3\62\5\62\u03e7\n")
buf.write("\62\3\62\3\62\5\62\u03eb\n\62\3\62\3\62\5\62\u03ef\n\62")
buf.write("\3\62\3\62\3\62\5\62\u03f4\n\62\3\62\5\62\u03f7\n\62\3")
buf.write("\63\5\63\u03fa\n\63\3\63\3\63\3\63\3\63\5\63\u0400\n\63")
buf.write("\3\63\3\63\3\63\3\63\3\63\3\63\5\63\u0408\n\63\3\63\3")
buf.write("\63\3\63\5\63\u040d\n\63\3\64\5\64\u0410\n\64\3\64\3\64")
buf.write("\3\65\3\65\5\65\u0416\n\65\3\65\3\65\3\66\3\66\3\66\3")
buf.write("\66\3\66\7\66\u041f\n\66\f\66\16\66\u0422\13\66\3\67\3")
buf.write("\67\3\67\3\67\3\67\3\67\3\67\3\67\3\67\3\67\3\67\3\67")
buf.write("\3\67\3\67\3\67\3\67\3\67\3\67\3\67\3\67\5\67\u0438\n")
buf.write("\67\38\38\58\u043c\n8\38\38\38\38\38\38\58\u0444\n8\3")
buf.write("8\38\38\38\58\u044a\n8\39\39\39\39\39\39\39\39\39\39\3")
buf.write("9\39\39\39\39\39\39\39\59\u045e\n9\39\39\59\u0462\n9\3")
buf.write("9\39\39\39\39\39\39\39\39\39\39\59\u046f\n9\3:\3:\5:\u0473")
buf.write("\n:\3;\5;\u0476\n;\3;\3;\3;\3<\3<\5<\u047d\n<\3=\3=\3")
buf.write("=\3=\3=\3=\5=\u0485\n=\3=\3=\3=\3=\3=\3=\3=\3=\5=\u048f")
buf.write("\n=\3>\3>\3?\3?\3?\3?\3?\7?\u0498\n?\f?\16?\u049b\13?")
buf.write("\3@\3@\3@\3@\3@\3@\3@\3@\3@\5@\u04a6\n@\3A\3A\3A\3A\3")
buf.write("A\3A\3A\3A\5A\u04b0\nA\3B\3B\3B\5B\u04b5\nB\3B\3B\3B\3")
buf.write("B\3C\5C\u04bc\nC\3C\5C\u04bf\nC\3C\3C\3C\5C\u04c4\nC\3")
buf.write("C\3C\3C\5C\u04c9\nC\3D\3D\3D\3D\3D\3D\3D\3D\3E\3E\3F\3")
buf.write("F\3F\3G\3G\3G\3G\3G\3G\5G\u04de\nG\3H\3H\5H\u04e2\nH\3")
buf.write("H\3H\3H\5H\u04e7\nH\3I\3I\3J\3J\3K\3K\3L\3L\3L\5L\u04f2")
buf.write("\nL\3M\3M\3M\3M\5M\u04f8\nM\3N\3N\5N\u04fc\nN\3N\3N\3")
buf.write("N\5N\u0501\nN\3O\3O\5O\u0505\nO\3O\3O\3O\5O\u050a\nO\3")
buf.write("P\5P\u050d\nP\3P\3P\3P\3P\3P\3P\3P\3P\3P\3P\3P\3P\3P\3")
buf.write("P\3P\3P\3P\3P\3P\3P\5P\u0523\nP\3Q\3Q\3Q\3Q\5Q\u0529\n")
buf.write("Q\3R\3R\3R\3R\3R\3R\3R\3R\3R\5R\u0534\nR\3S\3S\5S\u0538")
buf.write("\nS\3S\5S\u053b\nS\3S\3S\3S\3S\3S\3S\3S\3S\5S\u0545\n")
buf.write("S\3S\3S\3S\3S\5S\u054b\nS\3S\5S\u054e\nS\3T\3T\3U\3U\3")
buf.write("U\5U\u0555\nU\3U\3U\3U\3U\3U\3U\3U\3U\5U\u055f\nU\3V\3")
buf.write("V\5V\u0563\nV\3V\5V\u0566\nV\3V\5V\u0569\nV\3V\3V\5V\u056d")
buf.write("\nV\3V\3V\3V\5V\u0572\nV\5V\u0574\nV\3W\3W\5W\u0578\n")
buf.write("W\3W\3W\5W\u057c\nW\3W\3W\3X\3X\3X\3X\3X\5X\u0585\nX\3")
buf.write("Y\3Y\3Y\3Z\3Z\3Z\3Z\3Z\3Z\7Z\u0590\nZ\fZ\16Z\u0593\13")
buf.write("Z\3[\3[\3[\3[\3[\5[\u059a\n[\3\\\3\\\3]\3]\5]\u05a0\n")
buf.write("]\3^\3^\3_\3_\5_\u05a6\n_\3`\3`\5`\u05aa\n`\3a\5a\u05ad")
buf.write("\na\3a\3a\3a\3a\3a\3a\3b\5b\u05b6\nb\3b\3b\3b\3b\3b\3")
buf.write("b\3c\5c\u05bf\nc\3c\3c\3c\3c\3c\3d\5d\u05c7\nd\3e\3e\3")
buf.write("f\3f\3f\3f\3f\3f\3g\5g\u05d2\ng\3g\3g\3h\3h\5h\u05d8\n")
buf.write("h\3h\3h\3h\3h\3h\3h\3h\3h\3h\5h\u05e3\nh\3i\5i\u05e6\n")
buf.write("i\3i\3i\3i\5i\u05eb\ni\3i\3i\3i\3j\3j\3j\3j\3j\3j\3k\3")
buf.write("k\3k\3k\5k\u05fa\nk\3k\3k\3k\3k\5k\u0600\nk\3l\3l\3l\3")
buf.write("l\3l\7l\u0607\nl\fl\16l\u060a\13l\3m\3m\3m\3m\3m\3m\3")
buf.write("m\5m\u0613\nm\3n\3n\3n\3n\5n\u0619\nn\3n\3n\3n\3n\3n\3")
buf.write("n\5n\u0621\nn\3n\3n\5n\u0625\nn\3o\3o\5o\u0629\no\3o\3")
buf.write("o\3o\5o\u062e\no\3o\3o\3o\5o\u0633\no\3o\3o\3o\3o\3o\7")
buf.write("o\u063a\no\fo\16o\u063d\13o\3p\3p\5p\u0641\np\3q\3q\5")
buf.write("q\u0645\nq\3r\3r\3r\3r\3s\3s\3t\3t\3t\3t\3u\3u\5u\u0653")
buf.write("\nu\3u\3u\7u\u0657\nu\fu\16u\u065a\13u\3v\3v\3v\3v\3v")
buf.write("\3v\3v\3v\3v\3v\3v\3v\3v\6v\u0669\nv\rv\16v\u066a\5v\u066d")
buf.write("\nv\3w\3w\3w\3w\3w\3w\7w\u0675\nw\fw\16w\u0678\13w\3x")
buf.write("\3x\5x\u067c\nx\3y\3y\3y\3y\3y\5y\u0683\ny\3z\3z\3z\3")
buf.write("z\5z\u0689\nz\3{\3{\3{\5{\u068e\n{\3{\3{\3{\3{\5{\u0694")
buf.write("\n{\3{\3{\3{\3{\3{\5{\u069b\n{\3{\3{\5{\u069f\n{\7{\u06a1")
buf.write("\n{\f{\16{\u06a4\13{\3|\3|\3|\3|\5|\u06aa\n|\3|\5|\u06ad")
buf.write("\n|\3|\5|\u06b0\n|\3|\5|\u06b3\n|\3}\3}\3}\5}\u06b8\n")
buf.write("}\3~\3~\5~\u06bc\n~\3~\5~\u06bf\n~\3~\3~\5~\u06c3\n~\3")
buf.write("~\3~\5~\u06c7\n~\3~\3~\3~\5~\u06cc\n~\3~\5~\u06cf\n~\5")
buf.write("~\u06d1\n~\3\177\3\177\5\177\u06d5\n\177\3\u0080\3\u0080")
buf.write("\3\u0081\3\u0081\3\u0082\5\u0082\u06dc\n\u0082\3\u0082")
buf.write("\3\u0082\3\u0083\3\u0083\5\u0083\u06e2\n\u0083\3\u0084")
buf.write("\3\u0084\5\u0084\u06e6\n\u0084\3\u0084\3\u0084\3\u0084")
buf.write("\3\u0084\5\u0084\u06ec\n\u0084\3\u0085\3\u0085\3\u0085")
buf.write("\5\u0085\u06f1\n\u0085\5\u0085\u06f3\n\u0085\3\u0086\3")
buf.write("\u0086\3\u0086\3\u0086\5\u0086\u06f9\n\u0086\3\u0086\3")
buf.write("\u0086\5\u0086\u06fd\n\u0086\3\u0086\3\u0086\3\u0086\3")
buf.write("\u0086\5\u0086\u0703\n\u0086\3\u0086\3\u0086\3\u0086\3")
buf.write("\u0086\3\u0086\5\u0086\u070a\n\u0086\3\u0086\3\u0086\5")
buf.write("\u0086\u070e\n\u0086\7\u0086\u0710\n\u0086\f\u0086\16")
buf.write("\u0086\u0713\13\u0086\3\u0087\3\u0087\3\u0087\3\u0087")
buf.write("\5\u0087\u0719\n\u0087\3\u0088\3\u0088\3\u0088\3\u0088")
buf.write("\3\u0088\3\u0088\3\u0088\3\u0088\5\u0088\u0723\n\u0088")
buf.write("\3\u0088\3\u0088\5\u0088\u0727\n\u0088\7\u0088\u0729\n")
buf.write("\u0088\f\u0088\16\u0088\u072c\13\u0088\3\u0089\5\u0089")
buf.write("\u072f\n\u0089\3\u0089\5\u0089\u0732\n\u0089\3\u0089\3")
buf.write("\u0089\3\u0089\3\u0089\5\u0089\u0738\n\u0089\3\u008a\3")
buf.write("\u008a\3\u008a\3\u008a\3\u008a\3\u008a\7\u008a\u0740\n")
buf.write("\u008a\f\u008a\16\u008a\u0743\13\u008a\3\u008b\5\u008b")
buf.write("\u0746\n\u008b\3\u008b\3\u008b\3\u008b\3\u008b\5\u008b")
buf.write("\u074c\n\u008b\3\u008b\3\u008b\3\u008b\3\u008b\3\u008b")
buf.write("\3\u008b\5\u008b\u0754\n\u008b\3\u008b\3\u008b\5\u008b")
buf.write("\u0758\n\u008b\3\u008b\5\u008b\u075b\n\u008b\3\u008b\3")
buf.write("\u008b\5\u008b\u075f\n\u008b\3\u008b\3\u008b\3\u008b\5")
buf.write("\u008b\u0764\n\u008b\3\u008c\5\u008c\u0767\n\u008c\3\u008c")
buf.write("\5\u008c\u076a\n\u008c\3\u008c\3\u008c\5\u008c\u076e\n")
buf.write("\u008c\3\u008c\3\u008c\3\u008d\5\u008d\u0773\n\u008d\3")
buf.write("\u008d\3\u008d\3\u008d\3\u008d\3\u008d\3\u008d\3\u008d")
buf.write("\3\u008d\5\u008d\u077d\n\u008d\3\u008e\3\u008e\3\u008e")
buf.write("\3\u008e\3\u008e\5\u008e\u0784\n\u008e\3\u008f\3\u008f")
buf.write("\3\u008f\5\u008f\u0789\n\u008f\3\u0090\3\u0090\5\u0090")
buf.write("\u078d\n\u0090\3\u0091\3\u0091\3\u0091\5\u0091\u0792\n")
buf.write("\u0091\3\u0091\3\u0091\3\u0091\3\u0091\5\u0091\u0798\n")
buf.write("\u0091\7\u0091\u079a\n\u0091\f\u0091\16\u0091\u079d\13")
buf.write("\u0091\3\u0092\3\u0092\3\u0092\5\u0092\u07a2\n\u0092\3")
buf.write("\u0092\3\u0092\3\u0092\3\u0092\5\u0092\u07a8\n\u0092\3")
buf.write("\u0093\3\u0093\5\u0093\u07ac\n\u0093\3\u0094\3\u0094\3")
buf.write("\u0094\5\u0094\u07b1\n\u0094\3\u0094\3\u0094\3\u0095\3")
buf.write("\u0095\5\u0095\u07b7\n\u0095\3\u0095\3\u0095\5\u0095\u07bb")
buf.write("\n\u0095\3\u0095\5\u0095\u07be\n\u0095\3\u0095\3\u0095")
buf.write("\5\u0095\u07c2\n\u0095\3\u0095\5\u0095\u07c5\n\u0095\5")
buf.write("\u0095\u07c7\n\u0095\3\u0096\5\u0096\u07ca\n\u0096\3\u0096")
buf.write("\3\u0096\3\u0097\3\u0097\3\u0098\3\u0098\3\u0099\3\u0099")
buf.write("\5\u0099\u07d4\n\u0099\3\u0099\3\u0099\3\u0099\5\u0099")
buf.write("\u07d9\n\u0099\5\u0099\u07db\n\u0099\3\u009a\5\u009a\u07de")
buf.write("\n\u009a\3\u009a\5\u009a\u07e1\n\u009a\3\u009a\5\u009a")
buf.write("\u07e4\n\u009a\3\u009a\3\u009a\3\u009a\3\u009a\3\u009a")
buf.write("\3\u009a\3\u009a\5\u009a\u07ed\n\u009a\3\u009b\3\u009b")
buf.write("\3\u009b\3\u009b\3\u009b\3\u009b\7\u009b\u07f5\n\u009b")
buf.write("\f\u009b\16\u009b\u07f8\13\u009b\3\u009c\3\u009c\5\u009c")
buf.write("\u07fc\n\u009c\3\u009c\5\u009c\u07ff\n\u009c\3\u009c\3")
buf.write("\u009c\5\u009c\u0803\n\u009c\3\u009c\5\u009c\u0806\n\u009c")
buf.write("\3\u009c\5\u009c\u0809\n\u009c\3\u009c\3\u009c\5\u009c")
buf.write("\u080d\n\u009c\3\u009d\3\u009d\3\u009d\3\u009d\3\u009d")
buf.write("\7\u009d\u0814\n\u009d\f\u009d\16\u009d\u0817\13\u009d")
buf.write("\3\u009e\3\u009e\3\u009f\3\u009f\3\u009f\3\u009f\3\u00a0")
buf.write("\3\u00a0\3\u00a0\3\u00a1\3\u00a1\3\u00a1\5\u00a1\u0825")
buf.write("\n\u00a1\3\u00a1\3\u00a1\3\u00a1\3\u00a1\5\u00a1\u082b")
buf.write("\n\u00a1\7\u00a1\u082d\n\u00a1\f\u00a1\16\u00a1\u0830")
buf.write("\13\u00a1\3\u00a2\5\u00a2\u0833\n\u00a2\3\u00a2\3\u00a2")
buf.write("\5\u00a2\u0837\n\u00a2\3\u00a2\3\u00a2\5\u00a2\u083b\n")
buf.write("\u00a2\3\u00a2\3\u00a2\5\u00a2\u083f\n\u00a2\3\u00a2\3")
buf.write("\u00a2\5\u00a2\u0843\n\u00a2\3\u00a2\3\u00a2\5\u00a2\u0847")
buf.write("\n\u00a2\3\u00a3\5\u00a3\u084a\n\u00a3\3\u00a3\3\u00a3")
buf.write("\5\u00a3\u084e\n\u00a3\3\u00a4\3\u00a4\3\u00a5\3\u00a5")
buf.write("\3\u00a6\3\u00a6\3\u00a6\3\u00a7\3\u00a7\5\u00a7\u0859")
buf.write("\n\u00a7\3\u00a8\3\u00a8\5\u00a8\u085d\n\u00a8\3\u00a9")
buf.write("\3\u00a9\3\u00a9\3\u00aa\3\u00aa\5\u00aa\u0864\n\u00aa")
buf.write("\3\u00aa\3\u00aa\5\u00aa\u0868\n\u00aa\3\u00aa\3\u00aa")
buf.write("\3\u00aa\5\u00aa\u086d\n\u00aa\3\u00ab\3\u00ab\3\u00ab")
buf.write("\5\u00ab\u0872\n\u00ab\3\u00ab\3\u00ab\3\u00ab\3\u00ab")
buf.write("\3\u00ab\5\u00ab\u0879\n\u00ab\3\u00ac\3\u00ac\5\u00ac")
buf.write("\u087d\n\u00ac\3\u00ad\3\u00ad\3\u00ad\3\u00ae\3\u00ae")
buf.write("\3\u00ae\3\u00ae\3\u00ae\5\u00ae\u0887\n\u00ae\3\u00af")
buf.write("\3\u00af\3\u00af\3\u00af\3\u00af\3\u00af\3\u00b0\3\u00b0")
buf.write("\3\u00b0\3\u00b0\3\u00b0\3\u00b0\7\u00b0\u0895\n\u00b0")
buf.write("\f\u00b0\16\u00b0\u0898\13\u00b0\3\u00b1\3\u00b1\5\u00b1")
buf.write("\u089c\n\u00b1\3\u00b2\3\u00b2\5\u00b2\u08a0\n\u00b2\3")
buf.write("\u00b2\5\u00b2\u08a3\n\u00b2\3\u00b2\3\u00b2\5\u00b2\u08a7")
buf.write("\n\u00b2\3\u00b2\3\u00b2\3\u00b2\3\u00b2\5\u00b2\u08ad")
buf.write("\n\u00b2\3\u00b2\5\u00b2\u08b0\n\u00b2\3\u00b2\3\u00b2")
buf.write("\5\u00b2\u08b4\n\u00b2\3\u00b2\3\u00b2\3\u00b2\3\u00b2")
buf.write("\3\u00b2\3\u00b2\3\u00b2\3\u00b2\5\u00b2\u08be\n\u00b2")
buf.write("\3\u00b2\5\u00b2\u08c1\n\u00b2\3\u00b2\3\u00b2\3\u00b2")
buf.write("\3\u00b2\3\u00b2\3\u00b2\5\u00b2\u08c9\n\u00b2\3\u00b2")
buf.write("\3\u00b2\3\u00b2\5\u00b2\u08ce\n\u00b2\3\u00b3\3\u00b3")
buf.write("\3\u00b3\5\u00b3\u08d3\n\u00b3\3\u00b3\3\u00b3\3\u00b4")
buf.write("\3\u00b4\3\u00b4\3\u00b4\5\u00b4\u08db\n\u00b4\3\u00b4")
buf.write("\3\u00b4\3\u00b4\3\u00b4\3\u00b4\5\u00b4\u08e2\n\u00b4")
buf.write("\3\u00b4\3\u00b4\5\u00b4\u08e6\n\u00b4\3\u00b5\3\u00b5")
buf.write("\3\u00b6\3\u00b6\3\u00b6\5\u00b6\u08ed\n\u00b6\3\u00b6")
buf.write("\3\u00b6\3\u00b6\3\u00b6\5\u00b6\u08f3\n\u00b6\7\u00b6")
buf.write("\u08f5\n\u00b6\f\u00b6\16\u00b6\u08f8\13\u00b6\3\u00b7")
buf.write("\3\u00b7\3\u00b7\5\u00b7\u08fd\n\u00b7\3\u00b8\3\u00b8")
buf.write("\3\u00b8\3\u00b8\3\u00b8\3\u00b8\3\u00b8\5\u00b8\u0906")
buf.write("\n\u00b8\3\u00b8\3\u00b8\5\u00b8\u090a\n\u00b8\3\u00b9")
buf.write("\5\u00b9\u090d\n\u00b9\3\u00b9\3\u00b9\3\u00b9\3\u00ba")
buf.write("\3\u00ba\3\u00ba\3\u00ba\3\u00ba\3\u00bb\3\u00bb\3\u00bb")
buf.write("\3\u00bb\3\u00bc\3\u00bc\5\u00bc\u091d\n\u00bc\3\u00bc")
buf.write("\3\u00bc\3\u00bc\3\u00bd\3\u00bd\5\u00bd\u0924\n\u00bd")
buf.write("\3\u00be\3\u00be\3\u00be\3\u00be\3\u00be\3\u00be\3\u00bf")
buf.write("\5\u00bf\u092d\n\u00bf\3\u00bf\3\u00bf\3\u00bf\3\u00bf")
buf.write("\5\u00bf\u0933\n\u00bf\3\u00bf\3\u00bf\5\u00bf\u0937\n")
buf.write("\u00bf\3\u00bf\5\u00bf\u093a\n\u00bf\3\u00c0\3\u00c0\5")
buf.write("\u00c0\u093e\n\u00c0\3\u00c1\3\u00c1\5\u00c1\u0942\n\u00c1")
buf.write("\3\u00c2\3\u00c2\3\u00c2\5\u00c2\u0947\n\u00c2\3\u00c2")
buf.write("\3\u00c2\3\u00c3\3\u00c3\3\u00c3\5\u00c3\u094e\n\u00c3")
buf.write("\3\u00c3\3\u00c3\3\u00c3\3\u00c3\5\u00c3\u0954\n\u00c3")
buf.write("\7\u00c3\u0956\n\u00c3\f\u00c3\16\u00c3\u0959\13\u00c3")
buf.write("\3\u00c4\3\u00c4\3\u00c4\3\u00c4\3\u00c4\3\u00c4\5\u00c4")
buf.write("\u0961\n\u00c4\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5")
buf.write("\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5")
buf.write("\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5")
buf.write("\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5")
buf.write("\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5")
buf.write("\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5")
buf.write("\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5")
buf.write("\3\u00c5\3\u00c5\3\u00c5\3\u00c5\5\u00c5\u0996\n\u00c5")
buf.write("\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c6")
buf.write("\5\u00c6\u099f\n\u00c6\3\u00c7\3\u00c7\3\u00c8\3\u00c8")
buf.write("\3\u00c9\3\u00c9\3\u00c9\2$\f\26 \66@BDFJLNPRTV^j|\u00b2")
buf.write("\u00d6\u00dc\u00e8\u00ec\u00f4\u010a\u010e\u0112\u0120")
buf.write("\u0134\u0138\u0140\u015e\u016a\u0184\u00ca\2\4\6\b\n\f")
buf.write("\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@")
buf.write("BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0086")
buf.write("\u0088\u008a\u008c\u008e\u0090\u0092\u0094\u0096\u0098")
buf.write("\u009a\u009c\u009e\u00a0\u00a2\u00a4\u00a6\u00a8\u00aa")
buf.write("\u00ac\u00ae\u00b0\u00b2\u00b4\u00b6\u00b8\u00ba\u00bc")
buf.write("\u00be\u00c0\u00c2\u00c4\u00c6\u00c8\u00ca\u00cc\u00ce")
buf.write("\u00d0\u00d2\u00d4\u00d6\u00d8\u00da\u00dc\u00de\u00e0")
buf.write("\u00e2\u00e4\u00e6\u00e8\u00ea\u00ec\u00ee\u00f0\u00f2")
buf.write("\u00f4\u00f6\u00f8\u00fa\u00fc\u00fe\u0100\u0102\u0104")
buf.write("\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116")
buf.write("\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128")
buf.write("\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0138\u013a")
buf.write("\u013c\u013e\u0140\u0142\u0144\u0146\u0148\u014a\u014c")
buf.write("\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e")
buf.write("\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170")
buf.write("\u0172\u0174\u0176\u0178\u017a\u017c\u017e\u0180\u0182")
buf.write("\u0184\u0186\u0188\u018a\u018c\u018e\u0190\2\20\4\2bb")
buf.write("ff\5\2\3\4\\^bd\3\2qr\5\2ffipst\7\2%%\60\60::@@GG\5\2")
buf.write("##--QQ\3\2V[\4\2\27\27SS\4\2\5\5bb\5\2\26\26CCNN\4\2\'")
buf.write("\'\66\66\3\2\679\4\2&&II\3\2\u0091\u0094\2\u0ac3\2\u0193")
buf.write("\3\2\2\2\4\u019f\3\2\2\2\6\u01a3\3\2\2\2\b\u01ae\3\2\2")
buf.write("\2\n\u01b0\3\2\2\2\f\u01c1\3\2\2\2\16\u01d2\3\2\2\2\20")
buf.write("\u01d8\3\2\2\2\22\u01e4\3\2\2\2\24\u01e6\3\2\2\2\26\u01e8")
buf.write("\3\2\2\2\30\u01fa\3\2\2\2\32\u0200\3\2\2\2\34\u0207\3")
buf.write("\2\2\2\36\u0209\3\2\2\2 \u0258\3\2\2\2\"\u0285\3\2\2\2")
buf.write("$\u0287\3\2\2\2&\u0289\3\2\2\2(\u02a1\3\2\2\2*\u02bf\3")
buf.write("\2\2\2,\u02c1\3\2\2\2.\u02db\3\2\2\2\60\u02dd\3\2\2\2")
buf.write("\62\u02e1\3\2\2\2\64\u02ea\3\2\2\2\66\u02ec\3\2\2\28\u0305")
buf.write("\3\2\2\2:\u0313\3\2\2\2<\u0315\3\2\2\2>\u0320\3\2\2\2")
buf.write("@\u0322\3\2\2\2B\u0330\3\2\2\2D\u0341\3\2\2\2F\u034f\3")
buf.write("\2\2\2H\u035b\3\2\2\2J\u035d\3\2\2\2L\u0371\3\2\2\2N\u037f")
buf.write("\3\2\2\2P\u038a\3\2\2\2R\u0395\3\2\2\2T\u03a0\3\2\2\2")
buf.write("V\u03ae\3\2\2\2X\u03c3\3\2\2\2Z\u03cb\3\2\2\2\\\u03cd")
buf.write("\3\2\2\2^\u03cf\3\2\2\2`\u03da\3\2\2\2b\u03f6\3\2\2\2")
buf.write("d\u040c\3\2\2\2f\u040f\3\2\2\2h\u0413\3\2\2\2j\u0419\3")
buf.write("\2\2\2l\u0437\3\2\2\2n\u0449\3\2\2\2p\u046e\3\2\2\2r\u0472")
buf.write("\3\2\2\2t\u0475\3\2\2\2v\u047c\3\2\2\2x\u048e\3\2\2\2")
buf.write("z\u0490\3\2\2\2|\u0492\3\2\2\2~\u04a5\3\2\2\2\u0080\u04af")
buf.write("\3\2\2\2\u0082\u04b1\3\2\2\2\u0084\u04c8\3\2\2\2\u0086")
buf.write("\u04ca\3\2\2\2\u0088\u04d2\3\2\2\2\u008a\u04d4\3\2\2\2")
buf.write("\u008c\u04dd\3\2\2\2\u008e\u04e6\3\2\2\2\u0090\u04e8\3")
buf.write("\2\2\2\u0092\u04ea\3\2\2\2\u0094\u04ec\3\2\2\2\u0096\u04f1")
buf.write("\3\2\2\2\u0098\u04f7\3\2\2\2\u009a\u0500\3\2\2\2\u009c")
buf.write("\u0509\3\2\2\2\u009e\u0522\3\2\2\2\u00a0\u0528\3\2\2\2")
buf.write("\u00a2\u0533\3\2\2\2\u00a4\u054d\3\2\2\2\u00a6\u054f\3")
buf.write("\2\2\2\u00a8\u055e\3\2\2\2\u00aa\u0573\3\2\2\2\u00ac\u0575")
buf.write("\3\2\2\2\u00ae\u0584\3\2\2\2\u00b0\u0586\3\2\2\2\u00b2")
buf.write("\u0589\3\2\2\2\u00b4\u0599\3\2\2\2\u00b6\u059b\3\2\2\2")
buf.write("\u00b8\u059f\3\2\2\2\u00ba\u05a1\3\2\2\2\u00bc\u05a5\3")
buf.write("\2\2\2\u00be\u05a9\3\2\2\2\u00c0\u05ac\3\2\2\2\u00c2\u05b5")
buf.write("\3\2\2\2\u00c4\u05be\3\2\2\2\u00c6\u05c6\3\2\2\2\u00c8")
buf.write("\u05c8\3\2\2\2\u00ca\u05ca\3\2\2\2\u00cc\u05d1\3\2\2\2")
buf.write("\u00ce\u05e2\3\2\2\2\u00d0\u05e5\3\2\2\2\u00d2\u05ef\3")
buf.write("\2\2\2\u00d4\u05ff\3\2\2\2\u00d6\u0601\3\2\2\2\u00d8\u0612")
buf.write("\3\2\2\2\u00da\u0624\3\2\2\2\u00dc\u062d\3\2\2\2\u00de")
buf.write("\u063e\3\2\2\2\u00e0\u0644\3\2\2\2\u00e2\u0646\3\2\2\2")
buf.write("\u00e4\u064a\3\2\2\2\u00e6\u064c\3\2\2\2\u00e8\u0650\3")
buf.write("\2\2\2\u00ea\u066c\3\2\2\2\u00ec\u066e\3\2\2\2\u00ee\u0679")
buf.write("\3\2\2\2\u00f0\u0682\3\2\2\2\u00f2\u0688\3\2\2\2\u00f4")
buf.write("\u0693\3\2\2\2\u00f6\u06a5\3\2\2\2\u00f8\u06b4\3\2\2\2")
buf.write("\u00fa\u06d0\3\2\2\2\u00fc\u06d2\3\2\2\2\u00fe\u06d6\3")
buf.write("\2\2\2\u0100\u06d8\3\2\2\2\u0102\u06db\3\2\2\2\u0104\u06df")
buf.write("\3\2\2\2\u0106\u06eb\3\2\2\2\u0108\u06f2\3\2\2\2\u010a")
buf.write("\u0702\3\2\2\2\u010c\u0718\3\2\2\2\u010e\u071a\3\2\2\2")
buf.write("\u0110\u0737\3\2\2\2\u0112\u0739\3\2\2\2\u0114\u0763\3")
buf.write("\2\2\2\u0116\u0766\3\2\2\2\u0118\u077c\3\2\2\2\u011a\u0783")
buf.write("\3\2\2\2\u011c\u0788\3\2\2\2\u011e\u078c\3\2\2\2\u0120")
buf.write("\u078e\3\2\2\2\u0122\u07a7\3\2\2\2\u0124\u07ab\3\2\2\2")
buf.write("\u0126\u07ad\3\2\2\2\u0128\u07c6\3\2\2\2\u012a\u07c9\3")
buf.write("\2\2\2\u012c\u07cd\3\2\2\2\u012e\u07cf\3\2\2\2\u0130\u07da")
buf.write("\3\2\2\2\u0132\u07ec\3\2\2\2\u0134\u07ee\3\2\2\2\u0136")
buf.write("\u080c\3\2\2\2\u0138\u080e\3\2\2\2\u013a\u0818\3\2\2\2")
buf.write("\u013c\u081a\3\2\2\2\u013e\u081e\3\2\2\2\u0140\u0821\3")
buf.write("\2\2\2\u0142\u0846\3\2\2\2\u0144\u084d\3\2\2\2\u0146\u084f")
buf.write("\3\2\2\2\u0148\u0851\3\2\2\2\u014a\u0853\3\2\2\2\u014c")
buf.write("\u0856\3\2\2\2\u014e\u085a\3\2\2\2\u0150\u085e\3\2\2\2")
buf.write("\u0152\u086c\3\2\2\2\u0154\u0878\3\2\2\2\u0156\u087c\3")
buf.write("\2\2\2\u0158\u087e\3\2\2\2\u015a\u0886\3\2\2\2\u015c\u0888")
buf.write("\3\2\2\2\u015e\u088e\3\2\2\2\u0160\u089b\3\2\2\2\u0162")
buf.write("\u08cd\3\2\2\2\u0164\u08cf\3\2\2\2\u0166\u08e5\3\2\2\2")
buf.write("\u0168\u08e7\3\2\2\2\u016a\u08e9\3\2\2\2\u016c\u08fc\3")
buf.write("\2\2\2\u016e\u0909\3\2\2\2\u0170\u090c\3\2\2\2\u0172\u0911")
buf.write("\3\2\2\2\u0174\u0916\3\2\2\2\u0176\u091a\3\2\2\2\u0178")
buf.write("\u0921\3\2\2\2\u017a\u0925\3\2\2\2\u017c\u0939\3\2\2\2")
buf.write("\u017e\u093b\3\2\2\2\u0180\u0941\3\2\2\2\u0182\u0943\3")
buf.write("\2\2\2\u0184\u094a\3\2\2\2\u0186\u0960\3\2\2\2\u0188\u0995")
buf.write("\3\2\2\2\u018a\u099e\3\2\2\2\u018c\u09a0\3\2\2\2\u018e")
buf.write("\u09a2\3\2\2\2\u0190\u09a4\3\2\2\2\u0192\u0194\5|?\2\u0193")
buf.write("\u0192\3\2\2\2\u0193\u0194\3\2\2\2\u0194\u0195\3\2\2\2")
buf.write("\u0195\u0196\7\2\2\3\u0196\3\3\2\2\2\u0197\u01a0\5\u018a")
buf.write("\u00c6\2\u0198\u01a0\7F\2\2\u0199\u019a\7V\2\2\u019a\u019b")
buf.write("\5^\60\2\u019b\u019c\7W\2\2\u019c\u01a0\3\2\2\2\u019d")
buf.write("\u01a0\5\6\4\2\u019e\u01a0\5\16\b\2\u019f\u0197\3\2\2")
buf.write("\2\u019f\u0198\3\2\2\2\u019f\u0199\3\2\2\2\u019f\u019d")
buf.write("\3\2\2\2\u019f\u019e\3\2\2\2\u01a0\5\3\2\2\2\u01a1\u01a4")
buf.write("\5\b\5\2\u01a2\u01a4\5\n\6\2\u01a3\u01a1\3\2\2\2\u01a3")
buf.write("\u01a2\3\2\2\2\u01a4\7\3\2\2\2\u01a5\u01af\7\u0087\2\2")
buf.write("\u01a6\u01af\5\u0158\u00ad\2\u01a7\u01af\5\u014a\u00a6")
buf.write("\2\u01a8\u01af\5\u015a\u00ae\2\u01a9\u01aa\7d\2\2\u01aa")
buf.write("\u01af\5\u0124\u0093\2\u01ab\u01ac\7d\2\2\u01ac\u01af")
buf.write("\5\u00a2R\2\u01ad\u01af\5\u0166\u00b4\2\u01ae\u01a5\3")
buf.write("\2\2\2\u01ae\u01a6\3\2\2\2\u01ae\u01a7\3\2\2\2\u01ae\u01a8")
buf.write("\3\2\2\2\u01ae\u01a9\3\2\2\2\u01ae\u01ab\3\2\2\2\u01ae")
buf.write("\u01ad\3\2\2\2\u01af\t\3\2\2\2\u01b0\u01b2\5\f\7\2\u01b1")
buf.write("\u01b3\7E\2\2\u01b2\u01b1\3\2\2\2\u01b2\u01b3\3\2\2\2")
buf.write("\u01b3\u01b4\3\2\2\2\u01b4\u01b5\5\b\5\2\u01b5\13\3\2")
buf.write("\2\2\u01b6\u01b7\b\7\1\2\u01b7\u01c2\7\u0082\2\2\u01b8")
buf.write("\u01b9\5\u00a0Q\2\u01b9\u01ba\7\u0082\2\2\u01ba\u01c2")
buf.write("\3\2\2\2\u01bb\u01bc\5\u00b8]\2\u01bc\u01bd\7\u0082\2")
buf.write("\2\u01bd\u01c2\3\2\2\2\u01be\u01bf\5\u00a2R\2\u01bf\u01c0")
buf.write("\7\u0082\2\2\u01c0\u01c2\3\2\2\2\u01c1\u01b6\3\2\2\2\u01c1")
buf.write("\u01b8\3\2\2\2\u01c1\u01bb\3\2\2\2\u01c1\u01be\3\2\2\2")
buf.write("\u01c2\u01cf\3\2\2\2\u01c3\u01c4\f\4\2\2\u01c4\u01c5\7")
buf.write("\u0087\2\2\u01c5\u01ce\7\u0082\2\2\u01c6\u01c8\f\3\2\2")
buf.write("\u01c7\u01c9\7E\2\2\u01c8\u01c7\3\2\2\2\u01c8\u01c9\3")
buf.write("\2\2\2\u01c9\u01ca\3\2\2\2\u01ca\u01cb\5\u0164\u00b3\2")
buf.write("\u01cb\u01cc\7\u0082\2\2\u01cc\u01ce\3\2\2\2\u01cd\u01c3")
buf.write("\3\2\2\2\u01cd\u01c6\3\2\2\2\u01ce\u01d1\3\2\2\2\u01cf")
buf.write("\u01cd\3\2\2\2\u01cf\u01d0\3\2\2\2\u01d0\r\3\2\2\2\u01d1")
buf.write("\u01cf\3\2\2\2\u01d2\u01d4\5\20\t\2\u01d3\u01d5\5\36\20")
buf.write("\2\u01d4\u01d3\3\2\2\2\u01d4\u01d5\3\2\2\2\u01d5\u01d6")
buf.write("\3\2\2\2\u01d6\u01d7\5h\65\2\u01d7\17\3\2\2\2\u01d8\u01da")
buf.write("\7X\2\2\u01d9\u01db\5\22\n\2\u01da\u01d9\3\2\2\2\u01da")
buf.write("\u01db\3\2\2\2\u01db\u01dc\3\2\2\2\u01dc\u01dd\7Y\2\2")
buf.write("\u01dd\21\3\2\2\2\u01de\u01e5\5\24\13\2\u01df\u01e5\5")
buf.write("\26\f\2\u01e0\u01e1\5\24\13\2\u01e1\u01e2\7}\2\2\u01e2")
buf.write("\u01e3\5\26\f\2\u01e3\u01e5\3\2\2\2\u01e4\u01de\3\2\2")
buf.write("\2\u01e4\u01df\3\2\2\2\u01e4\u01e0\3\2\2\2\u01e5\23\3")
buf.write("\2\2\2\u01e6\u01e7\t\2\2\2\u01e7\25\3\2\2\2\u01e8\u01e9")
buf.write("\b\f\1\2\u01e9\u01eb\5\30\r\2\u01ea\u01ec\7\u0086\2\2")
buf.write("\u01eb\u01ea\3\2\2\2\u01eb\u01ec\3\2\2\2\u01ec\u01f5\3")
buf.write("\2\2\2\u01ed\u01ee\f\3\2\2\u01ee\u01ef\7}\2\2\u01ef\u01f1")
buf.write("\5\30\r\2\u01f0\u01f2\7\u0086\2\2\u01f1\u01f0\3\2\2\2")
buf.write("\u01f1\u01f2\3\2\2\2\u01f2\u01f4\3\2\2\2\u01f3\u01ed\3")
buf.write("\2\2\2\u01f4\u01f7\3\2\2\2\u01f5\u01f3\3\2\2\2\u01f5\u01f6")
buf.write("\3\2\2\2\u01f6\27\3\2\2\2\u01f7\u01f5\3\2\2\2\u01f8\u01fb")
buf.write("\5\32\16\2\u01f9\u01fb\5\34\17\2\u01fa\u01f8\3\2\2\2\u01fa")
buf.write("\u01f9\3\2\2\2\u01fb\31\3\2\2\2\u01fc\u0201\7\u0087\2")
buf.write("\2\u01fd\u01fe\7b\2\2\u01fe\u0201\7\u0087\2\2\u01ff\u0201")
buf.write("\7F\2\2\u0200\u01fc\3\2\2\2\u0200\u01fd\3\2\2\2\u0200")
buf.write("\u01ff\3\2\2\2\u0201\33\3\2\2\2\u0202\u0203\7\u0087\2")
buf.write("\2\u0203\u0208\5\u011a\u008e\2\u0204\u0205\7b\2\2\u0205")
buf.write("\u0206\7\u0087\2\2\u0206\u0208\5\u011a\u008e\2\u0207\u0202")
buf.write("\3\2\2\2\u0207\u0204\3\2\2\2\u0208\35\3\2\2\2\u0209\u020a")
buf.write("\7V\2\2\u020a\u020b\5\u0110\u0089\2\u020b\u020d\7W\2\2")
buf.write("\u020c\u020e\7\60\2\2\u020d\u020c\3\2\2\2\u020d\u020e")
buf.write("\3\2\2\2\u020e\u0210\3\2\2\2\u020f\u0211\5\u0180\u00c1")
buf.write("\2\u0210\u020f\3\2\2\2\u0210\u0211\3\2\2\2\u0211\u0213")
buf.write("\3\2\2\2\u0212\u0214\5\u00d6l\2\u0213\u0212\3\2\2\2\u0213")
buf.write("\u0214\3\2\2\2\u0214\u0216\3\2\2\2\u0215\u0217\5\u00f8")
buf.write("}\2\u0216\u0215\3\2\2\2\u0216\u0217\3\2\2\2\u0217\37\3")
buf.write("\2\2\2\u0218\u0219\b\21\1\2\u0219\u0259\5\4\3\2\u021a")
buf.write("\u021b\5\u009eP\2\u021b\u021d\7V\2\2\u021c\u021e\5&\24")
buf.write("\2\u021d\u021c\3\2\2\2\u021d\u021e\3\2\2\2\u021e\u021f")
buf.write("\3\2\2\2\u021f\u0220\7W\2\2\u0220\u0259\3\2\2\2\u0221")
buf.write("\u0222\5\u016e\u00b8\2\u0222\u0224\7V\2\2\u0223\u0225")
buf.write("\5&\24\2\u0224\u0223\3\2\2\2\u0224\u0225\3\2\2\2\u0225")
buf.write("\u0226\3\2\2\2\u0226\u0227\7W\2\2\u0227\u0259\3\2\2\2")
buf.write("\u0228\u0229\5\u009eP\2\u0229\u022a\5\u0122\u0092\2\u022a")
buf.write("\u0259\3\2\2\2\u022b\u022c\5\u016e\u00b8\2\u022c\u022d")
buf.write("\5\u0122\u0092\2\u022d\u0259\3\2\2\2\u022e\u022f\7 \2")
buf.write("\2\u022f\u0230\7g\2\2\u0230\u0231\5\u0104\u0083\2\u0231")
buf.write("\u0232\7h\2\2\u0232\u0233\7V\2\2\u0233\u0234\5^\60\2\u0234")
buf.write("\u0235\7W\2\2\u0235\u0259\3\2\2\2\u0236\u0237\7B\2\2\u0237")
buf.write("\u0238\7g\2\2\u0238\u0239\5\u0104\u0083\2\u0239\u023a")
buf.write("\7h\2\2\u023a\u023b\7V\2\2\u023b\u023c\5^\60\2\u023c\u023d")
buf.write("\7W\2\2\u023d\u0259\3\2\2\2\u023e\u023f\7;\2\2\u023f\u0240")
buf.write("\7g\2\2\u0240\u0241\5\u0104\u0083\2\u0241\u0242\7h\2\2")
buf.write("\u0242\u0243\7V\2\2\u0243\u0244\5^\60\2\u0244\u0245\7")
buf.write("W\2\2\u0245\u0259\3\2\2\2\u0246\u0247\7\31\2\2\u0247\u0248")
buf.write("\7g\2\2\u0248\u0249\5\u0104\u0083\2\u0249\u024a\7h\2\2")
buf.write("\u024a\u024b\7V\2\2\u024b\u024c\5^\60\2\u024c\u024d\7")
buf.write("W\2\2\u024d\u0259\3\2\2\2\u024e\u024f\5$\23\2\u024f\u0250")
buf.write("\7V\2\2\u0250\u0251\5^\60\2\u0251\u0252\7W\2\2\u0252\u0259")
buf.write("\3\2\2\2\u0253\u0254\5$\23\2\u0254\u0255\7V\2\2\u0255")
buf.write("\u0256\5\u0104\u0083\2\u0256\u0257\7W\2\2\u0257\u0259")
buf.write("\3\2\2\2\u0258\u0218\3\2\2\2\u0258\u021a\3\2\2\2\u0258")
buf.write("\u0221\3\2\2\2\u0258\u0228\3\2\2\2\u0258\u022b\3\2\2\2")
buf.write("\u0258\u022e\3\2\2\2\u0258\u0236\3\2\2\2\u0258\u023e\3")
buf.write("\2\2\2\u0258\u0246\3\2\2\2\u0258\u024e\3\2\2\2\u0258\u0253")
buf.write("\3\2\2\2\u0259\u0282\3\2\2\2\u025a\u025b\f\25\2\2\u025b")
buf.write("\u025c\7X\2\2\u025c\u025d\5^\60\2\u025d\u025e\7Y\2\2\u025e")
buf.write("\u0281\3\2\2\2\u025f\u0260\f\24\2\2\u0260\u0261\7X\2\2")
buf.write("\u0261\u0262\5\u0122\u0092\2\u0262\u0263\7Y\2\2\u0263")
buf.write("\u0281\3\2\2\2\u0264\u0265\f\23\2\2\u0265\u0267\7V\2\2")
buf.write("\u0266\u0268\5&\24\2\u0267\u0266\3\2\2\2\u0267\u0268\3")
buf.write("\2\2\2\u0268\u0269\3\2\2\2\u0269\u0281\7W\2\2\u026a\u026b")
buf.write("\f\16\2\2\u026b\u026d\7\u0084\2\2\u026c\u026e\7E\2\2\u026d")
buf.write("\u026c\3\2\2\2\u026d\u026e\3\2\2\2\u026e\u026f\3\2\2\2")
buf.write("\u026f\u0281\5\6\4\2\u0270\u0271\f\r\2\2\u0271\u0273\7")
buf.write("\177\2\2\u0272\u0274\7E\2\2\u0273\u0272\3\2\2\2\u0273")
buf.write("\u0274\3\2\2\2\u0274\u0275\3\2\2\2\u0275\u0281\5\6\4\2")
buf.write("\u0276\u0277\f\f\2\2\u0277\u0278\7\u0084\2\2\u0278\u0281")
buf.write("\5(\25\2\u0279\u027a\f\13\2\2\u027a\u027b\7\177\2\2\u027b")
buf.write("\u0281\5(\25\2\u027c\u027d\f\n\2\2\u027d\u0281\7{\2\2")
buf.write("\u027e\u027f\f\t\2\2\u027f\u0281\7|\2\2\u0280\u025a\3")
buf.write("\2\2\2\u0280\u025f\3\2\2\2\u0280\u0264\3\2\2\2\u0280\u026a")
buf.write("\3\2\2\2\u0280\u0270\3\2\2\2\u0280\u0276\3\2\2\2\u0280")
buf.write("\u0279\3\2\2\2\u0280\u027c\3\2\2\2\u0280\u027e\3\2\2\2")
buf.write("\u0281\u0284\3\2\2\2\u0282\u0280\3\2\2\2\u0282\u0283\3")
buf.write("\2\2\2\u0283!\3\2\2\2\u0284\u0282\3\2\2\2\u0285\u0286")
buf.write("\7L\2\2\u0286#\3\2\2\2\u0287\u0288\7L\2\2\u0288%\3\2\2")
buf.write("\2\u0289\u028a\5\u0120\u0091\2\u028a\'\3\2\2\2\u028b\u028d")
buf.write("\5\f\7\2\u028c\u028b\3\2\2\2\u028c\u028d\3\2\2\2\u028d")
buf.write("\u028e\3\2\2\2\u028e\u028f\5\u00a0Q\2\u028f\u0290\7\u0082")
buf.write("\2\2\u0290\u0291\7d\2\2\u0291\u0292\5\u00a0Q\2\u0292\u02a2")
buf.write("\3\2\2\2\u0293\u0294\5\f\7\2\u0294\u0295\7E\2\2\u0295")
buf.write("\u0296\5\u0164\u00b3\2\u0296\u0297\7\u0082\2\2\u0297\u0298")
buf.write("\7d\2\2\u0298\u0299\5\u00a0Q\2\u0299\u02a2\3\2\2\2\u029a")
buf.write("\u029c\5\f\7\2\u029b\u029a\3\2\2\2\u029b\u029c\3\2\2\2")
buf.write("\u029c\u029d\3\2\2\2\u029d\u029e\7d\2\2\u029e\u02a2\5")
buf.write("\u00a0Q\2\u029f\u02a0\7d\2\2\u02a0\u02a2\5\u00a2R\2\u02a1")
buf.write("\u028c\3\2\2\2\u02a1\u0293\3\2\2\2\u02a1\u029b\3\2\2\2")
buf.write("\u02a1\u029f\3\2\2\2\u02a2)\3\2\2\2\u02a3\u02c0\5 \21")
buf.write("\2\u02a4\u02a5\7{\2\2\u02a5\u02c0\5> \2\u02a6\u02a7\7")
buf.write("|\2\2\u02a7\u02c0\5> \2\u02a8\u02a9\5,\27\2\u02a9\u02aa")
buf.write("\5> \2\u02aa\u02c0\3\2\2\2\u02ab\u02ac\7?\2\2\u02ac\u02c0")
buf.write("\5*\26\2\u02ad\u02ae\7?\2\2\u02ae\u02af\7V\2\2\u02af\u02b0")
buf.write("\5\u0104\u0083\2\u02b0\u02b1\7W\2\2\u02b1\u02c0\3\2\2")
buf.write("\2\u02b2\u02b3\7?\2\2\u02b3\u02b4\7\u0086\2\2\u02b4\u02b5")
buf.write("\7V\2\2\u02b5\u02b6\7\u0087\2\2\u02b6\u02c0\7W\2\2\u02b7")
buf.write("\u02b8\7\f\2\2\u02b8\u02b9\7V\2\2\u02b9\u02ba\5\u0104")
buf.write("\u0083\2\u02ba\u02bb\7W\2\2\u02bb\u02c0\3\2\2\2\u02bc")
buf.write("\u02c0\5<\37\2\u02bd\u02c0\5.\30\2\u02be\u02c0\5:\36\2")
buf.write("\u02bf\u02a3\3\2\2\2\u02bf\u02a4\3\2\2\2\u02bf\u02a6\3")
buf.write("\2\2\2\u02bf\u02a8\3\2\2\2\u02bf\u02ab\3\2\2\2\u02bf\u02ad")
buf.write("\3\2\2\2\u02bf\u02b2\3\2\2\2\u02bf\u02b7\3\2\2\2\u02bf")
buf.write("\u02bc\3\2\2\2\u02bf\u02bd\3\2\2\2\u02bf\u02be\3\2\2\2")
buf.write("\u02c0+\3\2\2\2\u02c1\u02c2\t\3\2\2\u02c2-\3\2\2\2\u02c3")
buf.write("\u02c5\7\u0082\2\2\u02c4\u02c3\3\2\2\2\u02c4\u02c5\3\2")
buf.write("\2\2\u02c5\u02c6\3\2\2\2\u02c6\u02c8\7\62\2\2\u02c7\u02c9")
buf.write("\5\60\31\2\u02c8\u02c7\3\2\2\2\u02c8\u02c9\3\2\2\2\u02c9")
buf.write("\u02ca\3\2\2\2\u02ca\u02cc\5\62\32\2\u02cb\u02cd\58\35")
buf.write("\2\u02cc\u02cb\3\2\2\2\u02cc\u02cd\3\2\2\2\u02cd\u02dc")
buf.write("\3\2\2\2\u02ce\u02d0\7\u0082\2\2\u02cf\u02ce\3\2\2\2\u02cf")
buf.write("\u02d0\3\2\2\2\u02d0\u02d1\3\2\2\2\u02d1\u02d3\7\62\2")
buf.write("\2\u02d2\u02d4\5\60\31\2\u02d3\u02d2\3\2\2\2\u02d3\u02d4")
buf.write("\3\2\2\2\u02d4\u02d5\3\2\2\2\u02d5\u02d6\7V\2\2\u02d6")
buf.write("\u02d7\5\u0104\u0083\2\u02d7\u02d9\7W\2\2\u02d8\u02da")
buf.write("\58\35\2\u02d9\u02d8\3\2\2\2\u02d9\u02da\3\2\2\2\u02da")
buf.write("\u02dc\3\2\2\2\u02db\u02c4\3\2\2\2\u02db\u02cf\3\2\2\2")
buf.write("\u02dc/\3\2\2\2\u02dd\u02de\7V\2\2\u02de\u02df\5&\24\2")
buf.write("\u02df\u02e0\7W\2\2\u02e0\61\3\2\2\2\u02e1\u02e3\5\u009a")
buf.write("N\2\u02e2\u02e4\5\64\33\2\u02e3\u02e2\3\2\2\2\u02e3\u02e4")
buf.write("\3\2\2\2\u02e4\63\3\2\2\2\u02e5\u02e7\5\u00fa~\2\u02e6")
buf.write("\u02e8\5\64\33\2\u02e7\u02e6\3\2\2\2\u02e7\u02e8\3\2\2")
buf.write("\2\u02e8\u02eb\3\2\2\2\u02e9\u02eb\5\66\34\2\u02ea\u02e5")
buf.write("\3\2\2\2\u02ea\u02e9\3\2\2\2\u02eb\65\3\2\2\2\u02ec\u02ed")
buf.write("\b\34\1\2\u02ed\u02ee\7X\2\2\u02ee\u02ef\5^\60\2\u02ef")
buf.write("\u02f1\7Y\2\2\u02f0\u02f2\5\u00d6l\2\u02f1\u02f0\3\2\2")
buf.write("\2\u02f1\u02f2\3\2\2\2\u02f2\u02fc\3\2\2\2\u02f3\u02f4")
buf.write("\f\3\2\2\u02f4\u02f5\7X\2\2\u02f5\u02f6\5`\61\2\u02f6")
buf.write("\u02f8\7Y\2\2\u02f7\u02f9\5\u00d6l\2\u02f8\u02f7\3\2\2")
buf.write("\2\u02f8\u02f9\3\2\2\2\u02f9\u02fb\3\2\2\2\u02fa\u02f3")
buf.write("\3\2\2\2\u02fb\u02fe\3\2\2\2\u02fc\u02fa\3\2\2\2\u02fc")
buf.write("\u02fd\3\2\2\2\u02fd\67\3\2\2\2\u02fe\u02fc\3\2\2\2\u02ff")
buf.write("\u0301\7V\2\2\u0300\u0302\5&\24\2\u0301\u0300\3\2\2\2")
buf.write("\u0301\u0302\3\2\2\2\u0302\u0303\3\2\2\2\u0303\u0306\7")
buf.write("W\2\2\u0304\u0306\5\u0122\u0092\2\u0305\u02ff\3\2\2\2")
buf.write("\u0305\u0304\3\2\2\2\u03069\3\2\2\2\u0307\u0309\7\u0082")
buf.write("\2\2\u0308\u0307\3\2\2\2\u0308\u0309\3\2\2\2\u0309\u030a")
buf.write("\3\2\2\2\u030a\u030b\7\35\2\2\u030b\u0314\5> \2\u030c")
buf.write("\u030e\7\u0082\2\2\u030d\u030c\3\2\2\2\u030d\u030e\3\2")
buf.write("\2\2\u030e\u030f\3\2\2\2\u030f\u0310\7\35\2\2\u0310\u0311")
buf.write("\7X\2\2\u0311\u0312\7Y\2\2\u0312\u0314\5> \2\u0313\u0308")
buf.write("\3\2\2\2\u0313\u030d\3\2\2\2\u0314;\3\2\2\2\u0315\u0316")
buf.write("\7\63\2\2\u0316\u0317\7V\2\2\u0317\u0318\5^\60\2\u0318")
buf.write("\u0319\7W\2\2\u0319=\3\2\2\2\u031a\u0321\5*\26\2\u031b")
buf.write("\u031c\7V\2\2\u031c\u031d\5\u0104\u0083\2\u031d\u031e")
buf.write("\7W\2\2\u031e\u031f\5> \2\u031f\u0321\3\2\2\2\u0320\u031a")
buf.write("\3\2\2\2\u0320\u031b\3\2\2\2\u0321?\3\2\2\2\u0322\u0323")
buf.write("\b!\1\2\u0323\u0324\5> \2\u0324\u032d\3\2\2\2\u0325\u0326")
buf.write("\f\4\2\2\u0326\u0327\7\u0085\2\2\u0327\u032c\5> \2\u0328")
buf.write("\u0329\f\3\2\2\u0329\u032a\7~\2\2\u032a\u032c\5> \2\u032b")
buf.write("\u0325\3\2\2\2\u032b\u0328\3\2\2\2\u032c\u032f\3\2\2\2")
buf.write("\u032d\u032b\3\2\2\2\u032d\u032e\3\2\2\2\u032eA\3\2\2")
buf.write("\2\u032f\u032d\3\2\2\2\u0330\u0331\b\"\1\2\u0331\u0332")
buf.write("\5@!\2\u0332\u033e\3\2\2\2\u0333\u0334\f\5\2\2\u0334\u0335")
buf.write("\7^\2\2\u0335\u033d\5@!\2\u0336\u0337\f\4\2\2\u0337\u0338")
buf.write("\7_\2\2\u0338\u033d\5@!\2\u0339\u033a\f\3\2\2\u033a\u033b")
buf.write("\7`\2\2\u033b\u033d\5@!\2\u033c\u0333\3\2\2\2\u033c\u0336")
buf.write("\3\2\2\2\u033c\u0339\3\2\2\2\u033d\u0340\3\2\2\2\u033e")
buf.write("\u033c\3\2\2\2\u033e\u033f\3\2\2\2\u033fC\3\2\2\2\u0340")
buf.write("\u033e\3\2\2\2\u0341\u0342\b#\1\2\u0342\u0343\5B\"\2\u0343")
buf.write("\u034c\3\2\2\2\u0344\u0345\f\4\2\2\u0345\u0346\7\\\2\2")
buf.write("\u0346\u034b\5B\"\2\u0347\u0348\f\3\2\2\u0348\u0349\7")
buf.write("]\2\2\u0349\u034b\5B\"\2\u034a\u0344\3\2\2\2\u034a\u0347")
buf.write("\3\2\2\2\u034b\u034e\3\2\2\2\u034c\u034a\3\2\2\2\u034c")
buf.write("\u034d\3\2\2\2\u034dE\3\2\2\2\u034e\u034c\3\2\2\2\u034f")
buf.write("\u0350\b$\1\2\u0350\u0351\5D#\2\u0351\u0358\3\2\2\2\u0352")
buf.write("\u0353\f\3\2\2\u0353\u0354\5H%\2\u0354\u0355\5D#\2\u0355")
buf.write("\u0357\3\2\2\2\u0356\u0352\3\2\2\2\u0357\u035a\3\2\2\2")
buf.write("\u0358\u0356\3\2\2\2\u0358\u0359\3\2\2\2\u0359G\3\2\2")
buf.write("\2\u035a\u0358\3\2\2\2\u035b\u035c\t\4\2\2\u035cI\3\2")
buf.write("\2\2\u035d\u035e\b&\1\2\u035e\u035f\5F$\2\u035f\u036e")
buf.write("\3\2\2\2\u0360\u0361\f\6\2\2\u0361\u0362\7g\2\2\u0362")
buf.write("\u036d\5F$\2\u0363\u0364\f\5\2\2\u0364\u0365\7h\2\2\u0365")
buf.write("\u036d\5F$\2\u0366\u0367\f\4\2\2\u0367\u0368\7w\2\2\u0368")
buf.write("\u036d\5F$\2\u0369\u036a\f\3\2\2\u036a\u036b\7x\2\2\u036b")
buf.write("\u036d\5F$\2\u036c\u0360\3\2\2\2\u036c\u0363\3\2\2\2\u036c")
buf.write("\u0366\3\2\2\2\u036c\u0369\3\2\2\2\u036d\u0370\3\2\2\2")
buf.write("\u036e\u036c\3\2\2\2\u036e\u036f\3\2\2\2\u036fK\3\2\2")
buf.write("\2\u0370\u036e\3\2\2\2\u0371\u0372\b\'\1\2\u0372\u0373")
buf.write("\5J&\2\u0373\u037c\3\2\2\2\u0374\u0375\f\4\2\2\u0375\u0376")
buf.write("\7u\2\2\u0376\u037b\5J&\2\u0377\u0378\f\3\2\2\u0378\u0379")
buf.write("\7v\2\2\u0379\u037b\5J&\2\u037a\u0374\3\2\2\2\u037a\u0377")
buf.write("\3\2\2\2\u037b\u037e\3\2\2\2\u037c\u037a\3\2\2\2\u037c")
buf.write("\u037d\3\2\2\2\u037dM\3\2\2\2\u037e\u037c\3\2\2\2\u037f")
buf.write("\u0380\b(\1\2\u0380\u0381\5L\'\2\u0381\u0387\3\2\2\2\u0382")
buf.write("\u0383\f\3\2\2\u0383\u0384\7b\2\2\u0384\u0386\5L\'\2\u0385")
buf.write("\u0382\3\2\2\2\u0386\u0389\3\2\2\2\u0387\u0385\3\2\2\2")
buf.write("\u0387\u0388\3\2\2\2\u0388O\3\2\2\2\u0389\u0387\3\2\2")
buf.write("\2\u038a\u038b\b)\1\2\u038b\u038c\5N(\2\u038c\u0392\3")
buf.write("\2\2\2\u038d\u038e\f\3\2\2\u038e\u038f\7a\2\2\u038f\u0391")
buf.write("\5N(\2\u0390\u038d\3\2\2\2\u0391\u0394\3\2\2\2\u0392\u0390")
buf.write("\3\2\2\2\u0392\u0393\3\2\2\2\u0393Q\3\2\2\2\u0394\u0392")
buf.write("\3\2\2\2\u0395\u0396\b*\1\2\u0396\u0397\5P)\2\u0397\u039d")
buf.write("\3\2\2\2\u0398\u0399\f\3\2\2\u0399\u039a\7c\2\2\u039a")
buf.write("\u039c\5P)\2\u039b\u0398\3\2\2\2\u039c\u039f\3\2\2\2\u039d")
buf.write("\u039b\3\2\2\2\u039d\u039e\3\2\2\2\u039eS\3\2\2\2\u039f")
buf.write("\u039d\3\2\2\2\u03a0\u03a1\b+\1\2\u03a1\u03a2\5R*\2\u03a2")
buf.write("\u03ab\3\2\2\2\u03a3\u03a4\f\4\2\2\u03a4\u03a5\7\5\2\2")
buf.write("\u03a5\u03aa\5R*\2\u03a6\u03a7\f\3\2\2\u03a7\u03a8\7\6")
buf.write("\2\2\u03a8\u03aa\5R*\2\u03a9\u03a3\3\2\2\2\u03a9\u03a6")
buf.write("\3\2\2\2\u03aa\u03ad\3\2\2\2\u03ab\u03a9\3\2\2\2\u03ab")
buf.write("\u03ac\3\2\2\2\u03acU\3\2\2\2\u03ad\u03ab\3\2\2\2\u03ae")
buf.write("\u03af\b,\1\2\u03af\u03b0\5T+\2\u03b0\u03b9\3\2\2\2\u03b1")
buf.write("\u03b2\f\4\2\2\u03b2\u03b3\7\7\2\2\u03b3\u03b8\5T+\2\u03b4")
buf.write("\u03b5\f\3\2\2\u03b5\u03b6\7\b\2\2\u03b6\u03b8\5T+\2\u03b7")
buf.write("\u03b1\3\2\2\2\u03b7\u03b4\3\2\2\2\u03b8\u03bb\3\2\2\2")
buf.write("\u03b9\u03b7\3\2\2\2\u03b9\u03ba\3\2\2\2\u03baW\3\2\2")
buf.write("\2\u03bb\u03b9\3\2\2\2\u03bc\u03c4\5V,\2\u03bd\u03be\5")
buf.write("V,\2\u03be\u03bf\7\u0080\2\2\u03bf\u03c0\5^\60\2\u03c0")
buf.write("\u03c1\7\u0081\2\2\u03c1\u03c2\5Z.\2\u03c2\u03c4\3\2\2")
buf.write("\2\u03c3\u03bc\3\2\2\2\u03c3\u03bd\3\2\2\2\u03c4Y\3\2")
buf.write("\2\2\u03c5\u03cc\5X-\2\u03c6\u03c7\5V,\2\u03c7\u03c8\5")
buf.write("\\/\2\u03c8\u03c9\5\u011e\u0090\2\u03c9\u03cc\3\2\2\2")
buf.write("\u03ca\u03cc\5\u017e\u00c0\2\u03cb\u03c5\3\2\2\2\u03cb")
buf.write("\u03c6\3\2\2\2\u03cb\u03ca\3\2\2\2\u03cc[\3\2\2\2\u03cd")
buf.write("\u03ce\t\5\2\2\u03ce]\3\2\2\2\u03cf\u03d0\b\60\1\2\u03d0")
buf.write("\u03d1\5Z.\2\u03d1\u03d7\3\2\2\2\u03d2\u03d3\f\3\2\2\u03d3")
buf.write("\u03d4\7}\2\2\u03d4\u03d6\5Z.\2\u03d5\u03d2\3\2\2\2\u03d6")
buf.write("\u03d9\3\2\2\2\u03d7\u03d5\3\2\2\2\u03d7\u03d8\3\2\2\2")
buf.write("\u03d8_\3\2\2\2\u03d9\u03d7\3\2\2\2\u03da\u03db\5X-\2")
buf.write("\u03dba\3\2\2\2\u03dc\u03f7\5d\63\2\u03dd\u03df\5\u00d6")
buf.write("l\2\u03de\u03dd\3\2\2\2\u03de\u03df\3\2\2\2\u03df\u03e0")
buf.write("\3\2\2\2\u03e0\u03f7\5f\64\2\u03e1\u03e3\5\u00d6l\2\u03e2")
buf.write("\u03e1\3\2\2\2\u03e2\u03e3\3\2\2\2\u03e3\u03e4\3\2\2\2")
buf.write("\u03e4\u03f7\5h\65\2\u03e5\u03e7\5\u00d6l\2\u03e6\u03e5")
buf.write("\3\2\2\2\u03e6\u03e7\3\2\2\2\u03e7\u03e8\3\2\2\2\u03e8")
buf.write("\u03f7\5l\67\2\u03e9\u03eb\5\u00d6l\2\u03ea\u03e9\3\2")
buf.write("\2\2\u03ea\u03eb\3\2\2\2\u03eb\u03ec\3\2\2\2\u03ec\u03f7")
buf.write("\5p9\2\u03ed\u03ef\5\u00d6l\2\u03ee\u03ed\3\2\2\2\u03ee")
buf.write("\u03ef\3\2\2\2\u03ef\u03f0\3\2\2\2\u03f0\u03f7\5x=\2\u03f1")
buf.write("\u03f7\5z>\2\u03f2\u03f4\5\u00d6l\2\u03f3\u03f2\3\2\2")
buf.write("\2\u03f3\u03f4\3\2\2\2\u03f4\u03f5\3\2\2\2\u03f5\u03f7")
buf.write("\5\u0174\u00bb\2\u03f6\u03dc\3\2\2\2\u03f6\u03de\3\2\2")
buf.write("\2\u03f6\u03e2\3\2\2\2\u03f6\u03e6\3\2\2\2\u03f6\u03ea")
buf.write("\3\2\2\2\u03f6\u03ee\3\2\2\2\u03f6\u03f1\3\2\2\2\u03f6")
buf.write("\u03f3\3\2\2\2\u03f7c\3\2\2\2\u03f8\u03fa\5\u00d6l\2\u03f9")
buf.write("\u03f8\3\2\2\2\u03f9\u03fa\3\2\2\2\u03fa\u03fb\3\2\2\2")
buf.write("\u03fb\u03fc\7\u0087\2\2\u03fc\u03fd\7\u0081\2\2\u03fd")
buf.write("\u040d\5b\62\2\u03fe\u0400\5\u00d6l\2\u03ff\u03fe\3\2")
buf.write("\2\2\u03ff\u0400\3\2\2\2\u0400\u0401\3\2\2\2\u0401\u0402")
buf.write("\7\21\2\2\u0402\u0403\5`\61\2\u0403\u0404\7\u0081\2\2")
buf.write("\u0404\u0405\5b\62\2\u0405\u040d\3\2\2\2\u0406\u0408\5")
buf.write("\u00d6l\2\u0407\u0406\3\2\2\2\u0407\u0408\3\2\2\2\u0408")
buf.write("\u0409\3\2\2\2\u0409\u040a\7\34\2\2\u040a\u040b\7\u0081")
buf.write("\2\2\u040b\u040d\5b\62\2\u040c\u03f9\3\2\2\2\u040c\u03ff")
buf.write("\3\2\2\2\u040c\u0407\3\2\2\2\u040de\3\2\2\2\u040e\u0410")
buf.write("\5^\60\2\u040f\u040e\3\2\2\2\u040f\u0410\3\2\2\2\u0410")
buf.write("\u0411\3\2\2\2\u0411\u0412\7\u0083\2\2\u0412g\3\2\2\2")
buf.write("\u0413\u0415\7Z\2\2\u0414\u0416\5j\66\2\u0415\u0414\3")
buf.write("\2\2\2\u0415\u0416\3\2\2\2\u0416\u0417\3\2\2\2\u0417\u0418")
buf.write("\7[\2\2\u0418i\3\2\2\2\u0419\u041a\b\66\1\2\u041a\u041b")
buf.write("\5b\62\2\u041b\u0420\3\2\2\2\u041c\u041d\f\3\2\2\u041d")
buf.write("\u041f\5b\62\2\u041e\u041c\3\2\2\2\u041f\u0422\3\2\2\2")
buf.write("\u0420\u041e\3\2\2\2\u0420\u0421\3\2\2\2\u0421k\3\2\2")
buf.write("\2\u0422\u0420\3\2\2\2\u0423\u0424\7,\2\2\u0424\u0425")
buf.write("\7V\2\2\u0425\u0426\5n8\2\u0426\u0427\7W\2\2\u0427\u0428")
buf.write("\5b\62\2\u0428\u0438\3\2\2\2\u0429\u042a\7,\2\2\u042a")
buf.write("\u042b\7V\2\2\u042b\u042c\5n8\2\u042c\u042d\7W\2\2\u042d")
buf.write("\u042e\5b\62\2\u042e\u042f\7!\2\2\u042f\u0430\5b\62\2")
buf.write("\u0430\u0438\3\2\2\2\u0431\u0432\7D\2\2\u0432\u0433\7")
buf.write("V\2\2\u0433\u0434\5n8\2\u0434\u0435\7W\2\2\u0435\u0436")
buf.write("\5b\62\2\u0436\u0438\3\2\2\2\u0437\u0423\3\2\2\2\u0437")
buf.write("\u0429\3\2\2\2\u0437\u0431\3\2\2\2\u0438m\3\2\2\2\u0439")
buf.write("\u044a\5^\60\2\u043a\u043c\5\u00d6l\2\u043b\u043a\3\2")
buf.write("\2\2\u043b\u043c\3\2\2\2\u043c\u043d\3\2\2\2\u043d\u043e")
buf.write("\5\u008eH\2\u043e\u043f\5\u00f0y\2\u043f\u0440\7f\2\2")
buf.write("\u0440\u0441\5\u011e\u0090\2\u0441\u044a\3\2\2\2\u0442")
buf.write("\u0444\5\u00d6l\2\u0443\u0442\3\2\2\2\u0443\u0444\3\2")
buf.write("\2\2\u0444\u0445\3\2\2\2\u0445\u0446\5\u008eH\2\u0446")
buf.write("\u0447\5\u00f0y\2\u0447\u0448\5\u0122\u0092\2\u0448\u044a")
buf.write("\3\2\2\2\u0449\u0439\3\2\2\2\u0449\u043b\3\2\2\2\u0449")
buf.write("\u0443\3\2\2\2\u044ao\3\2\2\2\u044b\u044c\7U\2\2\u044c")
buf.write("\u044d\7V\2\2\u044d\u044e\5n8\2\u044e\u044f\7W\2\2\u044f")
buf.write("\u0450\5b\62\2\u0450\u046f\3\2\2\2\u0451\u0452\7\36\2")
buf.write("\2\u0452\u0453\5b\62\2\u0453\u0454\7U\2\2\u0454\u0455")
buf.write("\7V\2\2\u0455\u0456\5^\60\2\u0456\u0457\7W\2\2\u0457\u0458")
buf.write("\7\u0083\2\2\u0458\u046f\3\2\2\2\u0459\u045a\7)\2\2\u045a")
buf.write("\u045b\7V\2\2\u045b\u045d\5r:\2\u045c\u045e\5n8\2\u045d")
buf.write("\u045c\3\2\2\2\u045d\u045e\3\2\2\2\u045e\u045f\3\2\2\2")
buf.write("\u045f\u0461\7\u0083\2\2\u0460\u0462\5^\60\2\u0461\u0460")
buf.write("\3\2\2\2\u0461\u0462\3\2\2\2\u0462\u0463\3\2\2\2\u0463")
buf.write("\u0464\7W\2\2\u0464\u0465\5b\62\2\u0465\u046f\3\2\2\2")
buf.write("\u0466\u0467\7)\2\2\u0467\u0468\7V\2\2\u0468\u0469\5t")
buf.write(";\2\u0469\u046a\7\u0081\2\2\u046a\u046b\5v<\2\u046b\u046c")
buf.write("\7W\2\2\u046c\u046d\5b\62\2\u046d\u046f\3\2\2\2\u046e")
buf.write("\u044b\3\2\2\2\u046e\u0451\3\2\2\2\u046e\u0459\3\2\2\2")
buf.write("\u046e\u0466\3\2\2\2\u046fq\3\2\2\2\u0470\u0473\5f\64")
buf.write("\2\u0471\u0473\5\u0084C\2\u0472\u0470\3\2\2\2\u0472\u0471")
buf.write("\3\2\2\2\u0473s\3\2\2\2\u0474\u0476\5\u00d6l\2\u0475\u0474")
buf.write("\3\2\2\2\u0475\u0476\3\2\2\2\u0476\u0477\3\2\2\2\u0477")
buf.write("\u0478\5\u008eH\2\u0478\u0479\5\u00f0y\2\u0479u\3\2\2")
buf.write("\2\u047a\u047d\5^\60\2\u047b\u047d\5\u0122\u0092\2\u047c")
buf.write("\u047a\3\2\2\2\u047c\u047b\3\2\2\2\u047dw\3\2\2\2\u047e")
buf.write("\u047f\7\20\2\2\u047f\u048f\7\u0083\2\2\u0480\u0481\7")
buf.write("\32\2\2\u0481\u048f\7\u0083\2\2\u0482\u0484\7<\2\2\u0483")
buf.write("\u0485\5^\60\2\u0484\u0483\3\2\2\2\u0484\u0485\3\2\2\2")
buf.write("\u0485\u0486\3\2\2\2\u0486\u048f\7\u0083\2\2\u0487\u0488")
buf.write("\7<\2\2\u0488\u0489\5\u0122\u0092\2\u0489\u048a\7\u0083")
buf.write("\2\2\u048a\u048f\3\2\2\2\u048b\u048c\7+\2\2\u048c\u048d")
buf.write("\7\u0087\2\2\u048d\u048f\7\u0083\2\2\u048e\u047e\3\2\2")
buf.write("\2\u048e\u0480\3\2\2\2\u048e\u0482\3\2\2\2\u048e\u0487")
buf.write("\3\2\2\2\u048e\u048b\3\2\2\2\u048fy\3\2\2\2\u0490\u0491")
buf.write("\5\u0080A\2\u0491{\3\2\2\2\u0492\u0493\b?\1\2\u0493\u0494")
buf.write("\5~@\2\u0494\u0499\3\2\2\2\u0495\u0496\f\3\2\2\u0496\u0498")
buf.write("\5~@\2\u0497\u0495\3\2\2\2\u0498\u049b\3\2\2\2\u0499\u0497")
buf.write("\3\2\2\2\u0499\u049a\3\2\2\2\u049a}\3\2\2\2\u049b\u0499")
buf.write("\3\2\2\2\u049c\u04a6\5\u0080A\2\u049d\u04a6\5\u0116\u008c")
buf.write("\2\u049e\u04a6\5\u015c\u00af\2\u049f\u04a6\5\u0170\u00b9")
buf.write("\2\u04a0\u04a6\5\u0172\u00ba\2\u04a1\u04a6\5\u00d4k\2")
buf.write("\u04a2\u04a6\5\u00bc_\2\u04a3\u04a6\5\u0088E\2\u04a4\u04a6")
buf.write("\5\u008aF\2\u04a5\u049c\3\2\2\2\u04a5\u049d\3\2\2\2\u04a5")
buf.write("\u049e\3\2\2\2\u04a5\u049f\3\2\2\2\u04a5\u04a0\3\2\2\2")
buf.write("\u04a5\u04a1\3\2\2\2\u04a5\u04a2\3\2\2\2\u04a5\u04a3\3")
buf.write("\2\2\2\u04a5\u04a4\3\2\2\2\u04a6\177\3\2\2\2\u04a7\u04b0")
buf.write("\5\u0084C\2\u04a8\u04b0\5\u00d2j\2\u04a9\u04b0\5\u00ca")
buf.write("f\2\u04aa\u04b0\5\u00ceh\2\u04ab\u04b0\5\u00d0i\2\u04ac")
buf.write("\u04b0\5\u0086D\2\u04ad\u04b0\5\u0082B\2\u04ae\u04b0\5")
buf.write("\u00acW\2\u04af\u04a7\3\2\2\2\u04af\u04a8\3\2\2\2\u04af")
buf.write("\u04a9\3\2\2\2\u04af\u04aa\3\2\2\2\u04af\u04ab\3\2\2\2")
buf.write("\u04af\u04ac\3\2\2\2\u04af\u04ad\3\2\2\2\u04af\u04ae\3")
buf.write("\2\2\2\u04b0\u0081\3\2\2\2\u04b1\u04b2\7P\2\2\u04b2\u04b4")
buf.write("\7\u0087\2\2\u04b3\u04b5\5\u00d6l\2\u04b4\u04b3\3\2\2")
buf.write("\2\u04b4\u04b5\3\2\2\2\u04b5\u04b6\3\2\2\2\u04b6\u04b7")
buf.write("\7f\2\2\u04b7\u04b8\5\u0104\u0083\2\u04b8\u04b9\7\u0083")
buf.write("\2\2\u04b9\u0083\3\2\2\2\u04ba\u04bc\5\u008eH\2\u04bb")
buf.write("\u04ba\3\2\2\2\u04bb\u04bc\3\2\2\2\u04bc\u04be\3\2\2\2")
buf.write("\u04bd\u04bf\5\u00ecw\2\u04be\u04bd\3\2\2\2\u04be\u04bf")
buf.write("\3\2\2\2\u04bf\u04c0\3\2\2\2\u04c0\u04c9\7\u0083\2\2\u04c1")
buf.write("\u04c3\5\u00d6l\2\u04c2\u04c4\5\u008eH\2\u04c3\u04c2\3")
buf.write("\2\2\2\u04c3\u04c4\3\2\2\2\u04c4\u04c5\3\2\2\2\u04c5\u04c6")
buf.write("\5\u00ecw\2\u04c6\u04c7\7\u0083\2\2\u04c7\u04c9\3\2\2")
buf.write("\2\u04c8\u04bb\3\2\2\2\u04c8\u04c1\3\2\2\2\u04c9\u0085")
buf.write("\3\2\2\2\u04ca\u04cb\7A\2\2\u04cb\u04cc\7V\2\2\u04cc\u04cd")
buf.write("\5`\61\2\u04cd\u04ce\7}\2\2\u04ce\u04cf\7\u0090\2\2\u04cf")
buf.write("\u04d0\7W\2\2\u04d0\u04d1\7\u0083\2\2\u04d1\u0087\3\2")
buf.write("\2\2\u04d2\u04d3\7\u0083\2\2\u04d3\u0089\3\2\2\2\u04d4")
buf.write("\u04d5\5\u00d6l\2\u04d5\u04d6\7\u0083\2\2\u04d6\u008b")
buf.write("\3\2\2\2\u04d7\u04de\5\u0090I\2\u04d8\u04de\5\u0096L\2")
buf.write("\u04d9\u04de\5\u0092J\2\u04da\u04de\7*\2\2\u04db\u04de")
buf.write("\7K\2\2\u04dc\u04de\7\30\2\2\u04dd\u04d7\3\2\2\2\u04dd")
buf.write("\u04d8\3\2\2\2\u04dd\u04d9\3\2\2\2\u04dd\u04da\3\2\2\2")
buf.write("\u04dd\u04db\3\2\2\2\u04dd\u04dc\3\2\2\2\u04de\u008d\3")
buf.write("\2\2\2\u04df\u04e1\5\u008cG\2\u04e0\u04e2\5\u00d6l\2\u04e1")
buf.write("\u04e0\3\2\2\2\u04e1\u04e2\3\2\2\2\u04e2\u04e7\3\2\2\2")
buf.write("\u04e3\u04e4\5\u008cG\2\u04e4\u04e5\5\u008eH\2\u04e5\u04e7")
buf.write("\3\2\2\2\u04e6\u04df\3\2\2\2\u04e6\u04e3\3\2\2\2\u04e7")
buf.write("\u008f\3\2\2\2\u04e8\u04e9\t\6\2\2\u04e9\u0091\3\2\2\2")
buf.write("\u04ea\u04eb\t\7\2\2\u04eb\u0093\3\2\2\2\u04ec\u04ed\7")
buf.write("\u0087\2\2\u04ed\u0095\3\2\2\2\u04ee\u04f2\5\u0098M\2")
buf.write("\u04ef\u04f2\5\u0126\u0094\2\u04f0\u04f2\5\u00a8U\2\u04f1")
buf.write("\u04ee\3\2\2\2\u04f1\u04ef\3\2\2\2\u04f1\u04f0\3\2\2\2")
buf.write("\u04f2\u0097\3\2\2\2\u04f3\u04f8\5\u009eP\2\u04f4\u04f8")
buf.write("\5\u00a4S\2\u04f5\u04f8\5\u016e\u00b8\2\u04f6\u04f8\5")
buf.write("\u00fe\u0080\2\u04f7\u04f3\3\2\2\2\u04f7\u04f4\3\2\2\2")
buf.write("\u04f7\u04f5\3\2\2\2\u04f7\u04f6\3\2\2\2\u04f8\u0099\3")
buf.write("\2\2\2\u04f9\u04fb\5\u0096L\2\u04fa\u04fc\5\u00d6l\2\u04fb")
buf.write("\u04fa\3\2\2\2\u04fb\u04fc\3\2\2\2\u04fc\u0501\3\2\2\2")
buf.write("\u04fd\u04fe\5\u0096L\2\u04fe\u04ff\5\u009aN\2\u04ff\u0501")
buf.write("\3\2\2\2\u0500\u04f9\3\2\2\2\u0500\u04fd\3\2\2\2\u0501")
buf.write("\u009b\3\2\2\2\u0502\u0504\5\u0098M\2\u0503\u0505\5\u00d6")
buf.write("l\2\u0504\u0503\3\2\2\2\u0504\u0505\3\2\2\2\u0505\u050a")
buf.write("\3\2\2\2\u0506\u0507\5\u0098M\2\u0507\u0508\5\u009cO\2")
buf.write("\u0508\u050a\3\2\2\2\u0509\u0502\3\2\2\2\u0509\u0506\3")
buf.write("\2\2\2\u050a\u009d\3\2\2\2\u050b\u050d\5\f\7\2\u050c\u050b")
buf.write("\3\2\2\2\u050c\u050d\3\2\2\2\u050d\u050e\3\2\2\2\u050e")
buf.write("\u0523\5\u00a0Q\2\u050f\u0510\5\f\7\2\u0510\u0511\7E\2")
buf.write("\2\u0511\u0512\5\u0164\u00b3\2\u0512\u0523\3\2\2\2\u0513")
buf.write("\u0523\7\23\2\2\u0514\u0523\7\24\2\2\u0515\u0523\7\25")
buf.write("\2\2\u0516\u0523\7T\2\2\u0517\u0523\7\17\2\2\u0518\u0523")
buf.write("\7=\2\2\u0519\u0523\7.\2\2\u051a\u0523\7/\2\2\u051b\u0523")
buf.write("\7>\2\2\u051c\u0523\7O\2\2\u051d\u0523\7(\2\2\u051e\u0523")
buf.write("\7\37\2\2\u051f\u0523\7R\2\2\u0520\u0523\7\16\2\2\u0521")
buf.write("\u0523\5\u00a2R\2\u0522\u050c\3\2\2\2\u0522\u050f\3\2")
buf.write("\2\2\u0522\u0513\3\2\2\2\u0522\u0514\3\2\2\2\u0522\u0515")
buf.write("\3\2\2\2\u0522\u0516\3\2\2\2\u0522\u0517\3\2\2\2\u0522")
buf.write("\u0518\3\2\2\2\u0522\u0519\3\2\2\2\u0522\u051a\3\2\2\2")
buf.write("\u0522\u051b\3\2\2\2\u0522\u051c\3\2\2\2\u0522\u051d\3")
buf.write("\2\2\2\u0522\u051e\3\2\2\2\u0522\u051f\3\2\2\2\u0522\u0520")
buf.write("\3\2\2\2\u0522\u0521\3\2\2\2\u0523\u009f\3\2\2\2\u0524")
buf.write("\u0529\5\u0124\u0093\2\u0525\u0529\5\u00a6T\2\u0526\u0529")
buf.write("\5\u0094K\2\u0527\u0529\5\u0164\u00b3\2\u0528\u0524\3")
buf.write("\2\2\2\u0528\u0525\3\2\2\2\u0528\u0526\3\2\2\2\u0528\u0527")
buf.write("\3\2\2\2\u0529\u00a1\3\2\2\2\u052a\u052b\7\33\2\2\u052b")
buf.write("\u052c\7V\2\2\u052c\u052d\5^\60\2\u052d\u052e\7W\2\2\u052e")
buf.write("\u0534\3\2\2\2\u052f\u0530\7\33\2\2\u0530\u0531\7V\2\2")
buf.write("\u0531\u0532\7\16\2\2\u0532\u0534\7W\2\2\u0533\u052a\3")
buf.write("\2\2\2\u0533\u052f\3\2\2\2\u0534\u00a3\3\2\2\2\u0535\u0537")
buf.write("\5\u012e\u0098\2\u0536\u0538\5\u00d6l\2\u0537\u0536\3")
buf.write("\2\2\2\u0537\u0538\3\2\2\2\u0538\u053a\3\2\2\2\u0539\u053b")
buf.write("\5\f\7\2\u053a\u0539\3\2\2\2\u053a\u053b\3\2\2\2\u053b")
buf.write("\u053c\3\2\2\2\u053c\u053d\7\u0087\2\2\u053d\u054e\3\2")
buf.write("\2\2\u053e\u053f\5\u012e\u0098\2\u053f\u0540\5\u0164\u00b3")
buf.write("\2\u0540\u054e\3\2\2\2\u0541\u0542\5\u012e\u0098\2\u0542")
buf.write("\u0544\5\f\7\2\u0543\u0545\7E\2\2\u0544\u0543\3\2\2\2")
buf.write("\u0544\u0545\3\2\2\2\u0545\u0546\3\2\2\2\u0546\u0547\5")
buf.write("\u0164\u00b3\2\u0547\u054e\3\2\2\2\u0548\u054a\7\"\2\2")
buf.write("\u0549\u054b\5\f\7\2\u054a\u0549\3\2\2\2\u054a\u054b\3")
buf.write("\2\2\2\u054b\u054c\3\2\2\2\u054c\u054e\7\u0087\2\2\u054d")
buf.write("\u0535\3\2\2\2\u054d\u053e\3\2\2\2\u054d\u0541\3\2\2\2")
buf.write("\u054d\u0548\3\2\2\2\u054e\u00a5\3\2\2\2\u054f\u0550\7")
buf.write("\u0087\2\2\u0550\u00a7\3\2\2\2\u0551\u0552\5\u00aaV\2")
buf.write("\u0552\u0554\7Z\2\2\u0553\u0555\5\u00b2Z\2\u0554\u0553")
buf.write("\3\2\2\2\u0554\u0555\3\2\2\2\u0555\u0556\3\2\2\2\u0556")
buf.write("\u0557\7[\2\2\u0557\u055f\3\2\2\2\u0558\u0559\5\u00aa")
buf.write("V\2\u0559\u055a\7Z\2\2\u055a\u055b\5\u00b2Z\2\u055b\u055c")
buf.write("\7}\2\2\u055c\u055d\7[\2\2\u055d\u055f\3\2\2\2\u055e\u0551")
buf.write("\3\2\2\2\u055e\u0558\3\2\2\2\u055f\u00a9\3\2\2\2\u0560")
buf.write("\u0562\5\u00aeX\2\u0561\u0563\5\u00d6l\2\u0562\u0561\3")
buf.write("\2\2\2\u0562\u0563\3\2\2\2\u0563\u0565\3\2\2\2\u0564\u0566")
buf.write("\7\u0087\2\2\u0565\u0564\3\2\2\2\u0565\u0566\3\2\2\2\u0566")
buf.write("\u0568\3\2\2\2\u0567\u0569\5\u00b0Y\2\u0568\u0567\3\2")
buf.write("\2\2\u0568\u0569\3\2\2\2\u0569\u0574\3\2\2\2\u056a\u056c")
buf.write("\5\u00aeX\2\u056b\u056d\5\u00d6l\2\u056c\u056b\3\2\2\2")
buf.write("\u056c\u056d\3\2\2\2\u056d\u056e\3\2\2\2\u056e\u056f\5")
buf.write("\f\7\2\u056f\u0571\7\u0087\2\2\u0570\u0572\5\u00b0Y\2")
buf.write("\u0571\u0570\3\2\2\2\u0571\u0572\3\2\2\2\u0572\u0574\3")
buf.write("\2\2\2\u0573\u0560\3\2\2\2\u0573\u056a\3\2\2\2\u0574\u00ab")
buf.write("\3\2\2\2\u0575\u0577\5\u00aeX\2\u0576\u0578\5\u00d6l\2")
buf.write("\u0577\u0576\3\2\2\2\u0577\u0578\3\2\2\2\u0578\u0579\3")
buf.write("\2\2\2\u0579\u057b\7\u0087\2\2\u057a\u057c\5\u00b0Y\2")
buf.write("\u057b\u057a\3\2\2\2\u057b\u057c\3\2\2\2\u057c\u057d\3")
buf.write("\2\2\2\u057d\u057e\7\u0083\2\2\u057e\u00ad\3\2\2\2\u057f")
buf.write("\u0585\7\"\2\2\u0580\u0581\7\"\2\2\u0581\u0585\7\26\2")
buf.write("\2\u0582\u0583\7\"\2\2\u0583\u0585\7C\2\2\u0584\u057f")
buf.write("\3\2\2\2\u0584\u0580\3\2\2\2\u0584\u0582\3\2\2\2\u0585")
buf.write("\u00af\3\2\2\2\u0586\u0587\7\u0081\2\2\u0587\u0588\5\u009a")
buf.write("N\2\u0588\u00b1\3\2\2\2\u0589\u058a\bZ\1\2\u058a\u058b")
buf.write("\5\u00b4[\2\u058b\u0591\3\2\2\2\u058c\u058d\f\3\2\2\u058d")
buf.write("\u058e\7}\2\2\u058e\u0590\5\u00b4[\2\u058f\u058c\3\2\2")
buf.write("\2\u0590\u0593\3\2\2\2\u0591\u058f\3\2\2\2\u0591\u0592")
buf.write("\3\2\2\2\u0592\u00b3\3\2\2\2\u0593\u0591\3\2\2\2\u0594")
buf.write("\u059a\5\u00b6\\\2\u0595\u0596\5\u00b6\\\2\u0596\u0597")
buf.write("\7f\2\2\u0597\u0598\5`\61\2\u0598\u059a\3\2\2\2\u0599")
buf.write("\u0594\3\2\2\2\u0599\u0595\3\2\2\2\u059a\u00b5\3\2\2\2")
buf.write("\u059b\u059c\7\u0087\2\2\u059c\u00b7\3\2\2\2\u059d\u05a0")
buf.write("\5\u00ba^\2\u059e\u05a0\5\u00c8e\2\u059f\u059d\3\2\2\2")
buf.write("\u059f\u059e\3\2\2\2\u05a0\u00b9\3\2\2\2\u05a1\u05a2\7")
buf.write("\u0087\2\2\u05a2\u00bb\3\2\2\2\u05a3\u05a6\5\u00be`\2")
buf.write("\u05a4\u05a6\5\u00c4c\2\u05a5\u05a3\3\2\2\2\u05a5\u05a4")
buf.write("\3\2\2\2\u05a6\u00bd\3\2\2\2\u05a7\u05aa\5\u00c0a\2\u05a8")
buf.write("\u05aa\5\u00c2b\2\u05a9\u05a7\3\2\2\2\u05a9\u05a8\3\2")
buf.write("\2\2\u05aa\u00bf\3\2\2\2\u05ab\u05ad\7-\2\2\u05ac\u05ab")
buf.write("\3\2\2\2\u05ac\u05ad\3\2\2\2\u05ad\u05ae\3\2\2\2\u05ae")
buf.write("\u05af\7\61\2\2\u05af\u05b0\7\u0087\2\2\u05b0\u05b1\7")
buf.write("Z\2\2\u05b1\u05b2\5\u00c6d\2\u05b2\u05b3\7[\2\2\u05b3")
buf.write("\u00c1\3\2\2\2\u05b4\u05b6\7-\2\2\u05b5\u05b4\3\2\2\2")
buf.write("\u05b5\u05b6\3\2\2\2\u05b6\u05b7\3\2\2\2\u05b7\u05b8\7")
buf.write("\61\2\2\u05b8\u05b9\5\u00ba^\2\u05b9\u05ba\7Z\2\2\u05ba")
buf.write("\u05bb\5\u00c6d\2\u05bb\u05bc\7[\2\2\u05bc\u00c3\3\2\2")
buf.write("\2\u05bd\u05bf\7-\2\2\u05be\u05bd\3\2\2\2\u05be\u05bf")
buf.write("\3\2\2\2\u05bf\u05c0\3\2\2\2\u05c0\u05c1\7\61\2\2\u05c1")
buf.write("\u05c2\7Z\2\2\u05c2\u05c3\5\u00c6d\2\u05c3\u05c4\7[\2")
buf.write("\2\u05c4\u00c5\3\2\2\2\u05c5\u05c7\5|?\2\u05c6\u05c5\3")
buf.write("\2\2\2\u05c6\u05c7\3\2\2\2\u05c7\u00c7\3\2\2\2\u05c8\u05c9")
buf.write("\7\u0087\2\2\u05c9\u00c9\3\2\2\2\u05ca\u05cb\7\61\2\2")
buf.write("\u05cb\u05cc\7\u0087\2\2\u05cc\u05cd\7f\2\2\u05cd\u05ce")
buf.write("\5\u00ccg\2\u05ce\u05cf\7\u0083\2\2\u05cf\u00cb\3\2\2")
buf.write("\2\u05d0\u05d2\5\f\7\2\u05d1\u05d0\3\2\2\2\u05d1\u05d2")
buf.write("\3\2\2\2\u05d2\u05d3\3\2\2\2\u05d3\u05d4\5\u00b8]\2\u05d4")
buf.write("\u00cd\3\2\2\2\u05d5\u05d7\7P\2\2\u05d6\u05d8\7M\2\2\u05d7")
buf.write("\u05d6\3\2\2\2\u05d7\u05d8\3\2\2\2\u05d8\u05d9\3\2\2\2")
buf.write("\u05d9\u05da\5\f\7\2\u05da\u05db\5\b\5\2\u05db\u05dc\7")
buf.write("\u0083\2\2\u05dc\u05e3\3\2\2\2\u05dd\u05de\7P\2\2\u05de")
buf.write("\u05df\7\u0082\2\2\u05df\u05e0\5\b\5\2\u05e0\u05e1\7\u0083")
buf.write("\2\2\u05e1\u05e3\3\2\2\2\u05e2\u05d5\3\2\2\2\u05e2\u05dd")
buf.write("\3\2\2\2\u05e3\u00cf\3\2\2\2\u05e4\u05e6\5\u00d6l\2\u05e5")
buf.write("\u05e4\3\2\2\2\u05e5\u05e6\3\2\2\2\u05e6\u05e7\3\2\2\2")
buf.write("\u05e7\u05e8\7P\2\2\u05e8\u05ea\7\61\2\2\u05e9\u05eb\5")
buf.write("\f\7\2\u05ea\u05e9\3\2\2\2\u05ea\u05eb\3\2\2\2\u05eb\u05ec")
buf.write("\3\2\2\2\u05ec\u05ed\5\u00b8]\2\u05ed\u05ee\7\u0083\2")
buf.write("\2\u05ee\u00d1\3\2\2\2\u05ef\u05f0\7\r\2\2\u05f0\u05f1")
buf.write("\7V\2\2\u05f1\u05f2\7\u0090\2\2\u05f2\u05f3\7W\2\2\u05f3")
buf.write("\u05f4\7\u0083\2\2\u05f4\u00d3\3\2\2\2\u05f5\u05f6\7%")
buf.write("\2\2\u05f6\u05f7\7\u0090\2\2\u05f7\u05f9\7Z\2\2\u05f8")
buf.write("\u05fa\5|?\2\u05f9\u05f8\3\2\2\2\u05f9\u05fa\3\2\2\2\u05fa")
buf.write("\u05fb\3\2\2\2\u05fb\u0600\7[\2\2\u05fc\u05fd\7%\2\2\u05fd")
buf.write("\u05fe\7\u0090\2\2\u05fe\u0600\5~@\2\u05ff\u05f5\3\2\2")
buf.write("\2\u05ff\u05fc\3\2\2\2\u0600\u00d5\3\2\2\2\u0601\u0602")
buf.write("\bl\1\2\u0602\u0603\5\u00d8m\2\u0603\u0608\3\2\2\2\u0604")
buf.write("\u0605\f\3\2\2\u0605\u0607\5\u00d8m\2\u0606\u0604\3\2")
buf.write("\2\2\u0607\u060a\3\2\2\2\u0608\u0606\3\2\2\2\u0608\u0609")
buf.write("\3\2\2\2\u0609\u00d7\3\2\2\2\u060a\u0608\3\2\2\2\u060b")
buf.write("\u060c\7X\2\2\u060c\u060d\7X\2\2\u060d\u060e\5\u00dco")
buf.write("\2\u060e\u060f\7Y\2\2\u060f\u0610\7Y\2\2\u0610\u0613\3")
buf.write("\2\2\2\u0611\u0613\5\u00dan\2\u0612\u060b\3\2\2\2\u0612")
buf.write("\u0611\3\2\2\2\u0613\u00d9\3\2\2\2\u0614\u0615\7\13\2")
buf.write("\2\u0615\u0616\7V\2\2\u0616\u0618\5\u0104\u0083\2\u0617")
buf.write("\u0619\7\u0086\2\2\u0618\u0617\3\2\2\2\u0618\u0619\3\2")
buf.write("\2\2\u0619\u061a\3\2\2\2\u061a\u061b\7W\2\2\u061b\u0625")
buf.write("\3\2\2\2\u061c\u061d\7\13\2\2\u061d\u061e\7V\2\2\u061e")
buf.write("\u0620\5`\61\2\u061f\u0621\7\u0086\2\2\u0620\u061f\3\2")
buf.write("\2\2\u0620\u0621\3\2\2\2\u0621\u0622\3\2\2\2\u0622\u0623")
buf.write("\7W\2\2\u0623\u0625\3\2\2\2\u0624\u0614\3\2\2\2\u0624")
buf.write("\u061c\3\2\2\2\u0625\u00db\3\2\2\2\u0626\u0628\bo\1\2")
buf.write("\u0627\u0629\5\u00dep\2\u0628\u0627\3\2\2\2\u0628\u0629")
buf.write("\3\2\2\2\u0629\u062e\3\2\2\2\u062a\u062b\5\u00dep\2\u062b")
buf.write("\u062c\7\u0086\2\2\u062c\u062e\3\2\2\2\u062d\u0626\3\2")
buf.write("\2\2\u062d\u062a\3\2\2\2\u062e\u063b\3\2\2\2\u062f\u0630")
buf.write("\f\5\2\2\u0630\u0632\7}\2\2\u0631\u0633\5\u00dep\2\u0632")
buf.write("\u0631\3\2\2\2\u0632\u0633\3\2\2\2\u0633\u063a\3\2\2\2")
buf.write("\u0634\u0635\f\3\2\2\u0635\u0636\7}\2\2\u0636\u0637\5")
buf.write("\u00dep\2\u0637\u0638\7\u0086\2\2\u0638\u063a\3\2\2\2")
buf.write("\u0639\u062f\3\2\2\2\u0639\u0634\3\2\2\2\u063a\u063d\3")
buf.write("\2\2\2\u063b\u0639\3\2\2\2\u063b\u063c\3\2\2\2\u063c\u00dd")
buf.write("\3\2\2\2\u063d\u063b\3\2\2\2\u063e\u0640\5\u00e0q\2\u063f")
buf.write("\u0641\5\u00e6t\2\u0640\u063f\3\2\2\2\u0640\u0641\3\2")
buf.write("\2\2\u0641\u00df\3\2\2\2\u0642\u0645\7\u0087\2\2\u0643")
buf.write("\u0645\5\u00e2r\2\u0644\u0642\3\2\2\2\u0644\u0643\3\2")
buf.write("\2\2\u0645\u00e1\3\2\2\2\u0646\u0647\5\u00e4s\2\u0647")
buf.write("\u0648\7\u0082\2\2\u0648\u0649\7\u0087\2\2\u0649\u00e3")
buf.write("\3\2\2\2\u064a\u064b\7\u0087\2\2\u064b\u00e5\3\2\2\2\u064c")
buf.write("\u064d\7V\2\2\u064d\u064e\5\u00e8u\2\u064e\u064f\7W\2")
buf.write("\2\u064f\u00e7\3\2\2\2\u0650\u0652\bu\1\2\u0651\u0653")
buf.write("\5\u00eav\2\u0652\u0651\3\2\2\2\u0652\u0653\3\2\2\2\u0653")
buf.write("\u0658\3\2\2\2\u0654\u0655\f\3\2\2\u0655\u0657\5\u00ea")
buf.write("v\2\u0656\u0654\3\2\2\2\u0657\u065a\3\2\2\2\u0658\u0656")
buf.write("\3\2\2\2\u0658\u0659\3\2\2\2\u0659\u00e9\3\2\2\2\u065a")
buf.write("\u0658\3\2\2\2\u065b\u065c\7V\2\2\u065c\u065d\5\u00e8")
buf.write("u\2\u065d\u065e\7W\2\2\u065e\u066d\3\2\2\2\u065f\u0660")
buf.write("\7X\2\2\u0660\u0661\5\u00e8u\2\u0661\u0662\7Y\2\2\u0662")
buf.write("\u066d\3\2\2\2\u0663\u0664\7Z\2\2\u0664\u0665\5\u00e8")
buf.write("u\2\u0665\u0666\7[\2\2\u0666\u066d\3\2\2\2\u0667\u0669")
buf.write("\n\b\2\2\u0668\u0667\3\2\2\2\u0669\u066a\3\2\2\2\u066a")
buf.write("\u0668\3\2\2\2\u066a\u066b\3\2\2\2\u066b\u066d\3\2\2\2")
buf.write("\u066c\u065b\3\2\2\2\u066c\u065f\3\2\2\2\u066c\u0663\3")
buf.write("\2\2\2\u066c\u0668\3\2\2\2\u066d\u00eb\3\2\2\2\u066e\u066f")
buf.write("\bw\1\2\u066f\u0670\5\u00eex\2\u0670\u0676\3\2\2\2\u0671")
buf.write("\u0672\f\3\2\2\u0672\u0673\7}\2\2\u0673\u0675\5\u00ee")
buf.write("x\2\u0674\u0671\3\2\2\2\u0675\u0678\3\2\2\2\u0676\u0674")
buf.write("\3\2\2\2\u0676\u0677\3\2\2\2\u0677\u00ed\3\2\2\2\u0678")
buf.write("\u0676\3\2\2\2\u0679\u067b\5\u00f0y\2\u067a\u067c\5\u011a")
buf.write("\u008e\2\u067b\u067a\3\2\2\2\u067b\u067c\3\2\2\2\u067c")
buf.write("\u00ef\3\2\2\2\u067d\u0683\5\u00f2z\2\u067e\u067f\5\u00f4")
buf.write("{\2\u067f\u0680\5\u00f6|\2\u0680\u0681\5\u00f8}\2\u0681")
buf.write("\u0683\3\2\2\2\u0682\u067d\3\2\2\2\u0682\u067e\3\2\2\2")
buf.write("\u0683\u00f1\3\2\2\2\u0684\u0689\5\u00f4{\2\u0685\u0686")
buf.write("\5\u00fa~\2\u0686\u0687\5\u00f2z\2\u0687\u0689\3\2\2\2")
buf.write("\u0688\u0684\3\2\2\2\u0688\u0685\3\2\2\2\u0689\u00f3\3")
buf.write("\2\2\2\u068a\u068b\b{\1\2\u068b\u068d\5\u0102\u0082\2")
buf.write("\u068c\u068e\5\u00d6l\2\u068d\u068c\3\2\2\2\u068d\u068e")
buf.write("\3\2\2\2\u068e\u0694\3\2\2\2\u068f\u0690\7V\2\2\u0690")
buf.write("\u0691\5\u00f2z\2\u0691\u0692\7W\2\2\u0692\u0694\3\2\2")
buf.write("\2\u0693\u068a\3\2\2\2\u0693\u068f\3\2\2\2\u0694\u06a2")
buf.write("\3\2\2\2\u0695\u0696\f\5\2\2\u0696\u06a1\5\u00f6|\2\u0697")
buf.write("\u0698\f\4\2\2\u0698\u069a\7X\2\2\u0699\u069b\5`\61\2")
buf.write("\u069a\u0699\3\2\2\2\u069a\u069b\3\2\2\2\u069b\u069c\3")
buf.write("\2\2\2\u069c\u069e\7Y\2\2\u069d\u069f\5\u00d6l\2\u069e")
buf.write("\u069d\3\2\2\2\u069e\u069f\3\2\2\2\u069f\u06a1\3\2\2\2")
buf.write("\u06a0\u0695\3\2\2\2\u06a0\u0697\3\2\2\2\u06a1\u06a4\3")
buf.write("\2\2\2\u06a2\u06a0\3\2\2\2\u06a2\u06a3\3\2\2\2\u06a3\u00f5")
buf.write("\3\2\2\2\u06a4\u06a2\3\2\2\2\u06a5\u06a6\7V\2\2\u06a6")
buf.write("\u06a7\5\u0110\u0089\2\u06a7\u06a9\7W\2\2\u06a8\u06aa")
buf.write("\5\u00fc\177\2\u06a9\u06a8\3\2\2\2\u06a9\u06aa\3\2\2\2")
buf.write("\u06aa\u06ac\3\2\2\2\u06ab\u06ad\5\u0100\u0081\2\u06ac")
buf.write("\u06ab\3\2\2\2\u06ac\u06ad\3\2\2\2\u06ad\u06af\3\2\2\2")
buf.write("\u06ae\u06b0\5\u0180\u00c1\2\u06af\u06ae\3\2\2\2\u06af")
buf.write("\u06b0\3\2\2\2\u06b0\u06b2\3\2\2\2\u06b1\u06b3\5\u00d6")
buf.write("l\2\u06b2\u06b1\3\2\2\2\u06b2\u06b3\3\2\2\2\u06b3\u00f7")
buf.write("\3\2\2\2\u06b4\u06b5\7\177\2\2\u06b5\u06b7\5\u009cO\2")
buf.write("\u06b6\u06b8\5\u0106\u0084\2\u06b7\u06b6\3\2\2\2\u06b7")
buf.write("\u06b8\3\2\2\2\u06b8\u00f9\3\2\2\2\u06b9\u06bb\7^\2\2")
buf.write("\u06ba\u06bc\5\u00d6l\2\u06bb\u06ba\3\2\2\2\u06bb\u06bc")
buf.write("\3\2\2\2\u06bc\u06be\3\2\2\2\u06bd\u06bf\5\u00fc\177\2")
buf.write("\u06be\u06bd\3\2\2\2\u06be\u06bf\3\2\2\2\u06bf\u06d1\3")
buf.write("\2\2\2\u06c0\u06c2\7b\2\2\u06c1\u06c3\5\u00d6l\2\u06c2")
buf.write("\u06c1\3\2\2\2\u06c2\u06c3\3\2\2\2\u06c3\u06d1\3\2\2\2")
buf.write("\u06c4\u06c6\7\5\2\2\u06c5\u06c7\5\u00d6l\2\u06c6\u06c5")
buf.write("\3\2\2\2\u06c6\u06c7\3\2\2\2\u06c7\u06d1\3\2\2\2\u06c8")
buf.write("\u06c9\5\f\7\2\u06c9\u06cb\7^\2\2\u06ca\u06cc\5\u00d6")
buf.write("l\2\u06cb\u06ca\3\2\2\2\u06cb\u06cc\3\2\2\2\u06cc\u06ce")
buf.write("\3\2\2\2\u06cd\u06cf\5\u00fc\177\2\u06ce\u06cd\3\2\2\2")
buf.write("\u06ce\u06cf\3\2\2\2\u06cf\u06d1\3\2\2\2\u06d0\u06b9\3")
buf.write("\2\2\2\u06d0\u06c0\3\2\2\2\u06d0\u06c4\3\2\2\2\u06d0\u06c8")
buf.write("\3\2\2\2\u06d1\u00fb\3\2\2\2\u06d2\u06d4\5\u00fe\u0080")
buf.write("\2\u06d3\u06d5\5\u00fc\177\2\u06d4\u06d3\3\2\2\2\u06d4")
buf.write("\u06d5\3\2\2\2\u06d5\u00fd\3\2\2\2\u06d6\u06d7\t\t\2\2")
buf.write("\u06d7\u00ff\3\2\2\2\u06d8\u06d9\t\n\2\2\u06d9\u0101\3")
buf.write("\2\2\2\u06da\u06dc\7\u0086\2\2\u06db\u06da\3\2\2\2\u06db")
buf.write("\u06dc\3\2\2\2\u06dc\u06dd\3\2\2\2\u06dd\u06de\5\6\4\2")
buf.write("\u06de\u0103\3\2\2\2\u06df\u06e1\5\u009aN\2\u06e0\u06e2")
buf.write("\5\u0106\u0084\2\u06e1\u06e0\3\2\2\2\u06e1\u06e2\3\2\2")
buf.write("\2\u06e2\u0105\3\2\2\2\u06e3\u06ec\5\u0108\u0085\2\u06e4")
buf.write("\u06e6\5\u010a\u0086\2\u06e5\u06e4\3\2\2\2\u06e5\u06e6")
buf.write("\3\2\2\2\u06e6\u06e7\3\2\2\2\u06e7\u06e8\5\u00f6|\2\u06e8")
buf.write("\u06e9\5\u00f8}\2\u06e9\u06ec\3\2\2\2\u06ea\u06ec\5\u010c")
buf.write("\u0087\2\u06eb\u06e3\3\2\2\2\u06eb\u06e5\3\2\2\2\u06eb")
buf.write("\u06ea\3\2\2\2\u06ec\u0107\3\2\2\2\u06ed\u06f3\5\u010a")
buf.write("\u0086\2\u06ee\u06f0\5\u00fa~\2\u06ef\u06f1\5\u0108\u0085")
buf.write("\2\u06f0\u06ef\3\2\2\2\u06f0\u06f1\3\2\2\2\u06f1\u06f3")
buf.write("\3\2\2\2\u06f2\u06ed\3\2\2\2\u06f2\u06ee\3\2\2\2\u06f3")
buf.write("\u0109\3\2\2\2\u06f4\u06f5\b\u0086\1\2\u06f5\u0703\5\u00f6")
buf.write("|\2\u06f6\u06f8\7X\2\2\u06f7\u06f9\5`\61\2\u06f8\u06f7")
buf.write("\3\2\2\2\u06f8\u06f9\3\2\2\2\u06f9\u06fa\3\2\2\2\u06fa")
buf.write("\u06fc\7Y\2\2\u06fb\u06fd\5\u00d6l\2\u06fc\u06fb\3\2\2")
buf.write("\2\u06fc\u06fd\3\2\2\2\u06fd\u0703\3\2\2\2\u06fe\u06ff")
buf.write("\7V\2\2\u06ff\u0700\5\u0108\u0085\2\u0700\u0701\7W\2\2")
buf.write("\u0701\u0703\3\2\2\2\u0702\u06f4\3\2\2\2\u0702\u06f6\3")
buf.write("\2\2\2\u0702\u06fe\3\2\2\2\u0703\u0711\3\2\2\2\u0704\u0705")
buf.write("\f\7\2\2\u0705\u0710\5\u00f6|\2\u0706\u0707\f\5\2\2\u0707")
buf.write("\u0709\7X\2\2\u0708\u070a\5`\61\2\u0709\u0708\3\2\2\2")
buf.write("\u0709\u070a\3\2\2\2\u070a\u070b\3\2\2\2\u070b\u070d\7")
buf.write("Y\2\2\u070c\u070e\5\u00d6l\2\u070d\u070c\3\2\2\2\u070d")
buf.write("\u070e\3\2\2\2\u070e\u0710\3\2\2\2\u070f\u0704\3\2\2\2")
buf.write("\u070f\u0706\3\2\2\2\u0710\u0713\3\2\2\2\u0711\u070f\3")
buf.write("\2\2\2\u0711\u0712\3\2\2\2\u0712\u010b\3\2\2\2\u0713\u0711")
buf.write("\3\2\2\2\u0714\u0719\5\u010e\u0088\2\u0715\u0716\5\u00fa")
buf.write("~\2\u0716\u0717\5\u010c\u0087\2\u0717\u0719\3\2\2\2\u0718")
buf.write("\u0714\3\2\2\2\u0718\u0715\3\2\2\2\u0719\u010d\3\2\2\2")
buf.write("\u071a\u071b\b\u0088\1\2\u071b\u071c\7\u0086\2\2\u071c")
buf.write("\u072a\3\2\2\2\u071d\u071e\f\5\2\2\u071e\u0729\5\u00f6")
buf.write("|\2\u071f\u0720\f\4\2\2\u0720\u0722\7X\2\2\u0721\u0723")
buf.write("\5`\61\2\u0722\u0721\3\2\2\2\u0722\u0723\3\2\2\2\u0723")
buf.write("\u0724\3\2\2\2\u0724\u0726\7Y\2\2\u0725\u0727\5\u00d6")
buf.write("l\2\u0726\u0725\3\2\2\2\u0726\u0727\3\2\2\2\u0727\u0729")
buf.write("\3\2\2\2\u0728\u071d\3\2\2\2\u0728\u071f\3\2\2\2\u0729")
buf.write("\u072c\3\2\2\2\u072a\u0728\3\2\2\2\u072a\u072b\3\2\2\2")
buf.write("\u072b\u010f\3\2\2\2\u072c\u072a\3\2\2\2\u072d\u072f\5")
buf.write("\u0112\u008a\2\u072e\u072d\3\2\2\2\u072e\u072f\3\2\2\2")
buf.write("\u072f\u0731\3\2\2\2\u0730\u0732\7\u0086\2\2\u0731\u0730")
buf.write("\3\2\2\2\u0731\u0732\3\2\2\2\u0732\u0738\3\2\2\2\u0733")
buf.write("\u0734\5\u0112\u008a\2\u0734\u0735\7}\2\2\u0735\u0736")
buf.write("\7\u0086\2\2\u0736\u0738\3\2\2\2\u0737\u072e\3\2\2\2\u0737")
buf.write("\u0733\3\2\2\2\u0738\u0111\3\2\2\2\u0739\u073a\b\u008a")
buf.write("\1\2\u073a\u073b\5\u0114\u008b\2\u073b\u0741\3\2\2\2\u073c")
buf.write("\u073d\f\3\2\2\u073d\u073e\7}\2\2\u073e\u0740\5\u0114")
buf.write("\u008b\2\u073f\u073c\3\2\2\2\u0740\u0743\3\2\2\2\u0741")
buf.write("\u073f\3\2\2\2\u0741\u0742\3\2\2\2\u0742\u0113\3\2\2\2")
buf.write("\u0743\u0741\3\2\2\2\u0744\u0746\5\u00d6l\2\u0745\u0744")
buf.write("\3\2\2\2\u0745\u0746\3\2\2\2\u0746\u0747\3\2\2\2\u0747")
buf.write("\u0748\5\u008eH\2\u0748\u0749\5\u00f0y\2\u0749\u0764\3")
buf.write("\2\2\2\u074a\u074c\5\u00d6l\2\u074b\u074a\3\2\2\2\u074b")
buf.write("\u074c\3\2\2\2\u074c\u074d\3\2\2\2\u074d\u074e\5\u008e")
buf.write("H\2\u074e\u074f\5\u00f0y\2\u074f\u0750\7f\2\2\u0750\u0751")
buf.write("\5\u011e\u0090\2\u0751\u0764\3\2\2\2\u0752\u0754\5\u00d6")
buf.write("l\2\u0753\u0752\3\2\2\2\u0753\u0754\3\2\2\2\u0754\u0755")
buf.write("\3\2\2\2\u0755\u0757\5\u008eH\2\u0756\u0758\5\u0106\u0084")
buf.write("\2\u0757\u0756\3\2\2\2\u0757\u0758\3\2\2\2\u0758\u0764")
buf.write("\3\2\2\2\u0759\u075b\5\u00d6l\2\u075a\u0759\3\2\2\2\u075a")
buf.write("\u075b\3\2\2\2\u075b\u075c\3\2\2\2\u075c\u075e\5\u008e")
buf.write("H\2\u075d\u075f\5\u0106\u0084\2\u075e\u075d\3\2\2\2\u075e")
buf.write("\u075f\3\2\2\2\u075f\u0760\3\2\2\2\u0760\u0761\7f\2\2")
buf.write("\u0761\u0762\5\u011e\u0090\2\u0762\u0764\3\2\2\2\u0763")
buf.write("\u0745\3\2\2\2\u0763\u074b\3\2\2\2\u0763\u0753\3\2\2\2")
buf.write("\u0763\u075a\3\2\2\2\u0764\u0115\3\2\2\2\u0765\u0767\5")
buf.write("\u00d6l\2\u0766\u0765\3\2\2\2\u0766\u0767\3\2\2\2\u0767")
buf.write("\u0769\3\2\2\2\u0768\u076a\5\u008eH\2\u0769\u0768\3\2")
buf.write("\2\2\u0769\u076a\3\2\2\2\u076a\u076b\3\2\2\2\u076b\u076d")
buf.write("\5\u00f0y\2\u076c\u076e\5\u0138\u009d\2\u076d\u076c\3")
buf.write("\2\2\2\u076d\u076e\3\2\2\2\u076e\u076f\3\2\2\2\u076f\u0770")
buf.write("\5\u0118\u008d\2\u0770\u0117\3\2\2\2\u0771\u0773\5\u0150")
buf.write("\u00a9\2\u0772\u0771\3\2\2\2\u0772\u0773\3\2\2\2\u0773")
buf.write("\u0774\3\2\2\2\u0774\u077d\5h\65\2\u0775\u077d\5\u0176")
buf.write("\u00bc\2\u0776\u0777\7f\2\2\u0777\u0778\7\34\2\2\u0778")
buf.write("\u077d\7\u0083\2\2\u0779\u077a\7f\2\2\u077a\u077b\7\35")
buf.write("\2\2\u077b\u077d\7\u0083\2\2\u077c\u0772\3\2\2\2\u077c")
buf.write("\u0775\3\2\2\2\u077c\u0776\3\2\2\2\u077c\u0779\3\2\2\2")
buf.write("\u077d\u0119\3\2\2\2\u077e\u0784\5\u011c\u008f\2\u077f")
buf.write("\u0780\7V\2\2\u0780\u0781\5&\24\2\u0781\u0782\7W\2\2\u0782")
buf.write("\u0784\3\2\2\2\u0783\u077e\3\2\2\2\u0783\u077f\3\2\2\2")
buf.write("\u0784\u011b\3\2\2\2\u0785\u0786\7f\2\2\u0786\u0789\5")
buf.write("\u011e\u0090\2\u0787\u0789\5\u0122\u0092\2\u0788\u0785")
buf.write("\3\2\2\2\u0788\u0787\3\2\2\2\u0789\u011d\3\2\2\2\u078a")
buf.write("\u078d\5Z.\2\u078b\u078d\5\u0122\u0092\2\u078c\u078a\3")
buf.write("\2\2\2\u078c\u078b\3\2\2\2\u078d\u011f\3\2\2\2\u078e\u078f")
buf.write("\b\u0091\1\2\u078f\u0791\5\u011e\u0090\2\u0790\u0792\7")
buf.write("\u0086\2\2\u0791\u0790\3\2\2\2\u0791\u0792\3\2\2\2\u0792")
buf.write("\u079b\3\2\2\2\u0793\u0794\f\3\2\2\u0794\u0795\7}\2\2")
buf.write("\u0795\u0797\5\u011e\u0090\2\u0796\u0798\7\u0086\2\2\u0797")
buf.write("\u0796\3\2\2\2\u0797\u0798\3\2\2\2\u0798\u079a\3\2\2\2")
buf.write("\u0799\u0793\3\2\2\2\u079a\u079d\3\2\2\2\u079b\u0799\3")
buf.write("\2\2\2\u079b\u079c\3\2\2\2\u079c\u0121\3\2\2\2\u079d\u079b")
buf.write("\3\2\2\2\u079e\u079f\7Z\2\2\u079f\u07a1\5\u0120\u0091")
buf.write("\2\u07a0\u07a2\7}\2\2\u07a1\u07a0\3\2\2\2\u07a1\u07a2")
buf.write("\3\2\2\2\u07a2\u07a3\3\2\2\2\u07a3\u07a4\7[\2\2\u07a4")
buf.write("\u07a8\3\2\2\2\u07a5\u07a6\7Z\2\2\u07a6\u07a8\7[\2\2\u07a7")
buf.write("\u079e\3\2\2\2\u07a7\u07a5\3\2\2\2\u07a8\u0123\3\2\2\2")
buf.write("\u07a9\u07ac\7\u0087\2\2\u07aa\u07ac\5\u0164\u00b3\2\u07ab")
buf.write("\u07a9\3\2\2\2\u07ab\u07aa\3\2\2\2\u07ac\u0125\3\2\2\2")
buf.write("\u07ad\u07ae\5\u0128\u0095\2\u07ae\u07b0\7Z\2\2\u07af")
buf.write("\u07b1\5\u0130\u0099\2\u07b0\u07af\3\2\2\2\u07b0\u07b1")
buf.write("\3\2\2\2\u07b1\u07b2\3\2\2\2\u07b2\u07b3\7[\2\2\u07b3")
buf.write("\u0127\3\2\2\2\u07b4\u07b6\5\u012e\u0098\2\u07b5\u07b7")
buf.write("\5\u00d6l\2\u07b6\u07b5\3\2\2\2\u07b6\u07b7\3\2\2\2\u07b7")
buf.write("\u07b8\3\2\2\2\u07b8\u07ba\5\u012a\u0096\2\u07b9\u07bb")
buf.write("\5\u012c\u0097\2\u07ba\u07b9\3\2\2\2\u07ba\u07bb\3\2\2")
buf.write("\2\u07bb\u07bd\3\2\2\2\u07bc\u07be\5\u013e\u00a0\2\u07bd")
buf.write("\u07bc\3\2\2\2\u07bd\u07be\3\2\2\2\u07be\u07c7\3\2\2\2")
buf.write("\u07bf\u07c1\5\u012e\u0098\2\u07c0\u07c2\5\u00d6l\2\u07c1")
buf.write("\u07c0\3\2\2\2\u07c1\u07c2\3\2\2\2\u07c2\u07c4\3\2\2\2")
buf.write("\u07c3\u07c5\5\u013e\u00a0\2\u07c4\u07c3\3\2\2\2\u07c4")
buf.write("\u07c5\3\2\2\2\u07c5\u07c7\3\2\2\2\u07c6\u07b4\3\2\2\2")
buf.write("\u07c6\u07bf\3\2\2\2\u07c7\u0129\3\2\2\2\u07c8\u07ca\5")
buf.write("\f\7\2\u07c9\u07c8\3\2\2\2\u07c9\u07ca\3\2\2\2\u07ca\u07cb")
buf.write("\3\2\2\2\u07cb\u07cc\5\u0124\u0093\2\u07cc\u012b\3\2\2")
buf.write("\2\u07cd\u07ce\7\'\2\2\u07ce\u012d\3\2\2\2\u07cf\u07d0")
buf.write("\t\13\2\2\u07d0\u012f\3\2\2\2\u07d1\u07d3\5\u0132\u009a")
buf.write("\2\u07d2\u07d4\5\u0130\u0099\2\u07d3\u07d2\3\2\2\2\u07d3")
buf.write("\u07d4\3\2\2\2\u07d4\u07db\3\2\2\2\u07d5\u07d6\5\u0148")
buf.write("\u00a5\2\u07d6\u07d8\7\u0081\2\2\u07d7\u07d9\5\u0130\u0099")
buf.write("\2\u07d8\u07d7\3\2\2\2\u07d8\u07d9\3\2\2\2\u07d9\u07db")
buf.write("\3\2\2\2\u07da\u07d1\3\2\2\2\u07da\u07d5\3\2\2\2\u07db")
buf.write("\u0131\3\2\2\2\u07dc\u07de\5\u00d6l\2\u07dd\u07dc\3\2")
buf.write("\2\2\u07dd\u07de\3\2\2\2\u07de\u07e0\3\2\2\2\u07df\u07e1")
buf.write("\5\u008eH\2\u07e0\u07df\3\2\2\2\u07e0\u07e1\3\2\2\2\u07e1")
buf.write("\u07e3\3\2\2\2\u07e2\u07e4\5\u0134\u009b\2\u07e3\u07e2")
buf.write("\3\2\2\2\u07e3\u07e4\3\2\2\2\u07e4\u07e5\3\2\2\2\u07e5")
buf.write("\u07ed\7\u0083\2\2\u07e6\u07ed\5\u0116\u008c\2\u07e7\u07ed")
buf.write("\5\u00ceh\2\u07e8\u07ed\5\u0086D\2\u07e9\u07ed\5\u015c")
buf.write("\u00af\2\u07ea\u07ed\5\u0082B\2\u07eb\u07ed\5\u0088E\2")
buf.write("\u07ec\u07dd\3\2\2\2\u07ec\u07e6\3\2\2\2\u07ec\u07e7\3")
buf.write("\2\2\2\u07ec\u07e8\3\2\2\2\u07ec\u07e9\3\2\2\2\u07ec\u07ea")
buf.write("\3\2\2\2\u07ec\u07eb\3\2\2\2\u07ed\u0133\3\2\2\2\u07ee")
buf.write("\u07ef\b\u009b\1\2\u07ef\u07f0\5\u0136\u009c\2\u07f0\u07f6")
buf.write("\3\2\2\2\u07f1\u07f2\f\3\2\2\u07f2\u07f3\7}\2\2\u07f3")
buf.write("\u07f5\5\u0136\u009c\2\u07f4\u07f1\3\2\2\2\u07f5\u07f8")
buf.write("\3\2\2\2\u07f6\u07f4\3\2\2\2\u07f6\u07f7\3\2\2\2\u07f7")
buf.write("\u0135\3\2\2\2\u07f8\u07f6\3\2\2\2\u07f9\u07fb\5\u00f0")
buf.write("y\2\u07fa\u07fc\5\u0138\u009d\2\u07fb\u07fa\3\2\2\2\u07fb")
buf.write("\u07fc\3\2\2\2\u07fc\u07fe\3\2\2\2\u07fd\u07ff\5\u013c")
buf.write("\u009f\2\u07fe\u07fd\3\2\2\2\u07fe\u07ff\3\2\2\2\u07ff")
buf.write("\u080d\3\2\2\2\u0800\u0802\5\u00f0y\2\u0801\u0803\5\u011c")
buf.write("\u008f\2\u0802\u0801\3\2\2\2\u0802\u0803\3\2\2\2\u0803")
buf.write("\u080d\3\2\2\2\u0804\u0806\7\u0087\2\2\u0805\u0804\3\2")
buf.write("\2\2\u0805\u0806\3\2\2\2\u0806\u0808\3\2\2\2\u0807\u0809")
buf.write("\5\u00d6l\2\u0808\u0807\3\2\2\2\u0808\u0809\3\2\2\2\u0809")
buf.write("\u080a\3\2\2\2\u080a\u080b\7\u0081\2\2\u080b\u080d\5`")
buf.write("\61\2\u080c\u07f9\3\2\2\2\u080c\u0800\3\2\2\2\u080c\u0805")
buf.write("\3\2\2\2\u080d\u0137\3\2\2\2\u080e\u080f\b\u009d\1\2\u080f")
buf.write("\u0810\5\u013a\u009e\2\u0810\u0815\3\2\2\2\u0811\u0812")
buf.write("\f\3\2\2\u0812\u0814\5\u013a\u009e\2\u0813\u0811\3\2\2")
buf.write("\2\u0814\u0817\3\2\2\2\u0815\u0813\3\2\2\2\u0815\u0816")
buf.write("\3\2\2\2\u0816\u0139\3\2\2\2\u0817\u0815\3\2\2\2\u0818")
buf.write("\u0819\t\f\2\2\u0819\u013b\3\2\2\2\u081a\u081b\7f\2\2")
buf.write("\u081b\u081c\7\u008a\2\2\u081c\u081d\b\u009f\1\2\u081d")
buf.write("\u013d\3\2\2\2\u081e\u081f\7\u0081\2\2\u081f\u0820\5\u0140")
buf.write("\u00a1\2\u0820\u013f\3\2\2\2\u0821\u0822\b\u00a1\1\2\u0822")
buf.write("\u0824\5\u0142\u00a2\2\u0823\u0825\7\u0086\2\2\u0824\u0823")
buf.write("\3\2\2\2\u0824\u0825\3\2\2\2\u0825\u082e\3\2\2\2\u0826")
buf.write("\u0827\f\3\2\2\u0827\u0828\7}\2\2\u0828\u082a\5\u0142")
buf.write("\u00a2\2\u0829\u082b\7\u0086\2\2\u082a\u0829\3\2\2\2\u082a")
buf.write("\u082b\3\2\2\2\u082b\u082d\3\2\2\2\u082c\u0826\3\2\2\2")
buf.write("\u082d\u0830\3\2\2\2\u082e\u082c\3\2\2\2\u082e\u082f\3")
buf.write("\2\2\2\u082f\u0141\3\2\2\2\u0830\u082e\3\2\2\2\u0831\u0833")
buf.write("\5\u00d6l\2\u0832\u0831\3\2\2\2\u0832\u0833\3\2\2\2\u0833")
buf.write("\u0834\3\2\2\2\u0834\u0847\5\u0146\u00a4\2\u0835\u0837")
buf.write("\5\u00d6l\2\u0836\u0835\3\2\2\2\u0836\u0837\3\2\2\2\u0837")
buf.write("\u0838\3\2\2\2\u0838\u083a\7Q\2\2\u0839\u083b\5\u0148")
buf.write("\u00a5\2\u083a\u0839\3\2\2\2\u083a\u083b\3\2\2\2\u083b")
buf.write("\u083c\3\2\2\2\u083c\u0847\5\u0146\u00a4\2\u083d\u083f")
buf.write("\5\u00d6l\2\u083e\u083d\3\2\2\2\u083e\u083f\3\2\2\2\u083f")
buf.write("\u0840\3\2\2\2\u0840\u0842\5\u0148\u00a5\2\u0841\u0843")
buf.write("\7Q\2\2\u0842\u0841\3\2\2\2\u0842\u0843\3\2\2\2\u0843")
buf.write("\u0844\3\2\2\2\u0844\u0845\5\u0146\u00a4\2\u0845\u0847")
buf.write("\3\2\2\2\u0846\u0832\3\2\2\2\u0846\u0836\3\2\2\2\u0846")
buf.write("\u083e\3\2\2\2\u0847\u0143\3\2\2\2\u0848\u084a\5\f\7\2")
buf.write("\u0849\u0848\3\2\2\2\u0849\u084a\3\2\2\2\u084a\u084b\3")
buf.write("\2\2\2\u084b\u084e\5\u0124\u0093\2\u084c\u084e\5\u00a2")
buf.write("R\2\u084d\u0849\3\2\2\2\u084d\u084c\3\2\2\2\u084e\u0145")
buf.write("\3\2\2\2\u084f\u0850\5\u0144\u00a3\2\u0850\u0147\3\2\2")
buf.write("\2\u0851\u0852\t\r\2\2\u0852\u0149\3\2\2\2\u0853\u0854")
buf.write("\7\65\2\2\u0854\u0855\5\u014c\u00a7\2\u0855\u014b\3\2")
buf.write("\2\2\u0856\u0858\5\u009aN\2\u0857\u0859\5\u014e\u00a8")
buf.write("\2\u0858\u0857\3\2\2\2\u0858\u0859\3\2\2\2\u0859\u014d")
buf.write("\3\2\2\2\u085a\u085c\5\u00fa~\2\u085b\u085d\5\u014e\u00a8")
buf.write("\2\u085c\u085b\3\2\2\2\u085c\u085d\3\2\2\2\u085d\u014f")
buf.write("\3\2\2\2\u085e\u085f\7\u0081\2\2\u085f\u0860\5\u0152\u00aa")
buf.write("\2\u0860\u0151\3\2\2\2\u0861\u0863\5\u0154\u00ab\2\u0862")
buf.write("\u0864\7\u0086\2\2\u0863\u0862\3\2\2\2\u0863\u0864\3\2")
buf.write("\2\2\u0864\u086d\3\2\2\2\u0865\u0867\5\u0154\u00ab\2\u0866")
buf.write("\u0868\7\u0086\2\2\u0867\u0866\3\2\2\2\u0867\u0868\3\2")
buf.write("\2\2\u0868\u0869\3\2\2\2\u0869\u086a\7}\2\2\u086a\u086b")
buf.write("\5\u0152\u00aa\2\u086b\u086d\3\2\2\2\u086c\u0861\3\2\2")
buf.write("\2\u086c\u0865\3\2\2\2\u086d\u0153\3\2\2\2\u086e\u086f")
buf.write("\5\u0156\u00ac\2\u086f\u0871\7V\2\2\u0870\u0872\5&\24")
buf.write("\2\u0871\u0870\3\2\2\2\u0871\u0872\3\2\2\2\u0872\u0873")
buf.write("\3\2\2\2\u0873\u0874\7W\2\2\u0874\u0879\3\2\2\2\u0875")
buf.write("\u0876\5\u0156\u00ac\2\u0876\u0877\5\u0122\u0092\2\u0877")
buf.write("\u0879\3\2\2\2\u0878\u086e\3\2\2\2\u0878\u0875\3\2\2\2")
buf.write("\u0879\u0155\3\2\2\2\u087a\u087d\5\u0144\u00a3\2\u087b")
buf.write("\u087d\7\u0087\2\2\u087c\u087a\3\2\2\2\u087c\u087b\3\2")
buf.write("\2\2\u087d\u0157\3\2\2\2\u087e\u087f\7\65\2\2\u087f\u0880")
buf.write("\5\u0188\u00c5\2\u0880\u0159\3\2\2\2\u0881\u0882\7\65")
buf.write("\2\2\u0882\u0883\7\u0090\2\2\u0883\u0887\7\u0087\2\2\u0884")
buf.write("\u0885\7\65\2\2\u0885\u0887\7\u0093\2\2\u0886\u0881\3")
buf.write("\2\2\2\u0886\u0884\3\2\2\2\u0887\u015b\3\2\2\2\u0888\u0889")
buf.write("\7E\2\2\u0889\u088a\7g\2\2\u088a\u088b\5\u015e\u00b0\2")
buf.write("\u088b\u088c\7h\2\2\u088c\u088d\5~@\2\u088d\u015d\3\2")
buf.write("\2\2\u088e\u088f\b\u00b0\1\2\u088f\u0890\5\u0160\u00b1")
buf.write("\2\u0890\u0896\3\2\2\2\u0891\u0892\f\3\2\2\u0892\u0893")
buf.write("\7}\2\2\u0893\u0895\5\u0160\u00b1\2\u0894\u0891\3\2\2")
buf.write("\2\u0895\u0898\3\2\2\2\u0896\u0894\3\2\2\2\u0896\u0897")
buf.write("\3\2\2\2\u0897\u015f\3\2\2\2\u0898\u0896\3\2\2\2\u0899")
buf.write("\u089c\5\u0162\u00b2\2\u089a\u089c\5\u0114\u008b\2\u089b")
buf.write("\u0899\3\2\2\2\u089b\u089a\3\2\2\2\u089c\u0161\3\2\2\2")
buf.write("\u089d\u089f\7\26\2\2\u089e\u08a0\7\u0086\2\2\u089f\u089e")
buf.write("\3\2\2\2\u089f\u08a0\3\2\2\2\u08a0\u08a2\3\2\2\2\u08a1")
buf.write("\u08a3\7\u0087\2\2\u08a2\u08a1\3\2\2\2\u08a2\u08a3\3\2")
buf.write("\2\2\u08a3\u08ce\3\2\2\2\u08a4\u08a6\7\26\2\2\u08a5\u08a7")
buf.write("\7\u0087\2\2\u08a6\u08a5\3\2\2\2\u08a6\u08a7\3\2\2\2\u08a7")
buf.write("\u08a8\3\2\2\2\u08a8\u08a9\7f\2\2\u08a9\u08ce\5\u0104")
buf.write("\u0083\2\u08aa\u08ac\7M\2\2\u08ab\u08ad\7\u0086\2\2\u08ac")
buf.write("\u08ab\3\2\2\2\u08ac\u08ad\3\2\2\2\u08ad\u08af\3\2\2\2")
buf.write("\u08ae\u08b0\7\u0087\2\2\u08af\u08ae\3\2\2\2\u08af\u08b0")
buf.write("\3\2\2\2\u08b0\u08ce\3\2\2\2\u08b1\u08b3\7M\2\2\u08b2")
buf.write("\u08b4\7\u0087\2\2\u08b3\u08b2\3\2\2\2\u08b3\u08b4\3\2")
buf.write("\2\2\u08b4\u08b5\3\2\2\2\u08b5\u08b6\7f\2\2\u08b6\u08ce")
buf.write("\5\u0104\u0083\2\u08b7\u08b8\7E\2\2\u08b8\u08b9\7g\2\2")
buf.write("\u08b9\u08ba\5\u015e\u00b0\2\u08ba\u08bb\7h\2\2\u08bb")
buf.write("\u08bd\7\26\2\2\u08bc\u08be\7\u0086\2\2\u08bd\u08bc\3")
buf.write("\2\2\2\u08bd\u08be\3\2\2\2\u08be\u08c0\3\2\2\2\u08bf\u08c1")
buf.write("\7\u0087\2\2\u08c0\u08bf\3\2\2\2\u08c0\u08c1\3\2\2\2\u08c1")
buf.write("\u08ce\3\2\2\2\u08c2\u08c3\7E\2\2\u08c3\u08c4\7g\2\2\u08c4")
buf.write("\u08c5\5\u015e\u00b0\2\u08c5\u08c6\7h\2\2\u08c6\u08c8")
buf.write("\7\26\2\2\u08c7\u08c9\7\u0087\2\2\u08c8\u08c7\3\2\2\2")
buf.write("\u08c8\u08c9\3\2\2\2\u08c9\u08ca\3\2\2\2\u08ca\u08cb\7")
buf.write("f\2\2\u08cb\u08cc\5\6\4\2\u08cc\u08ce\3\2\2\2\u08cd\u089d")
buf.write("\3\2\2\2\u08cd\u08a4\3\2\2\2\u08cd\u08aa\3\2\2\2\u08cd")
buf.write("\u08b1\3\2\2\2\u08cd\u08b7\3\2\2\2\u08cd\u08c2\3\2\2\2")
buf.write("\u08ce\u0163\3\2\2\2\u08cf\u08d0\5\u0168\u00b5\2\u08d0")
buf.write("\u08d2\7g\2\2\u08d1\u08d3\5\u016a\u00b6\2\u08d2\u08d1")
buf.write("\3\2\2\2\u08d2\u08d3\3\2\2\2\u08d3\u08d4\3\2\2\2\u08d4")
buf.write("\u08d5\7h\2\2\u08d5\u0165\3\2\2\2\u08d6\u08e6\5\u0164")
buf.write("\u00b3\2\u08d7\u08d8\5\u0158\u00ad\2\u08d8\u08da\7g\2")
buf.write("\2\u08d9\u08db\5\u016a\u00b6\2\u08da\u08d9\3\2\2\2\u08da")
buf.write("\u08db\3\2\2\2\u08db\u08dc\3\2\2\2\u08dc\u08dd\7h\2\2")
buf.write("\u08dd\u08e6\3\2\2\2\u08de\u08df\5\u015a\u00ae\2\u08df")
buf.write("\u08e1\7g\2\2\u08e0\u08e2\5\u016a\u00b6\2\u08e1\u08e0")
buf.write("\3\2\2\2\u08e1\u08e2\3\2\2\2\u08e2\u08e3\3\2\2\2\u08e3")
buf.write("\u08e4\7h\2\2\u08e4\u08e6\3\2\2\2\u08e5\u08d6\3\2\2\2")
buf.write("\u08e5\u08d7\3\2\2\2\u08e5\u08de\3\2\2\2\u08e6\u0167\3")
buf.write("\2\2\2\u08e7\u08e8\7\u0087\2\2\u08e8\u0169\3\2\2\2\u08e9")
buf.write("\u08ea\b\u00b6\1\2\u08ea\u08ec\5\u016c\u00b7\2\u08eb\u08ed")
buf.write("\7\u0086\2\2\u08ec\u08eb\3\2\2\2\u08ec\u08ed\3\2\2\2\u08ed")
buf.write("\u08f6\3\2\2\2\u08ee\u08ef\f\3\2\2\u08ef\u08f0\7}\2\2")
buf.write("\u08f0\u08f2\5\u016c\u00b7\2\u08f1\u08f3\7\u0086\2\2\u08f2")
buf.write("\u08f1\3\2\2\2\u08f2\u08f3\3\2\2\2\u08f3\u08f5\3\2\2\2")
buf.write("\u08f4\u08ee\3\2\2\2\u08f5\u08f8\3\2\2\2\u08f6\u08f4\3")
buf.write("\2\2\2\u08f6\u08f7\3\2\2\2\u08f7\u016b\3\2\2\2\u08f8\u08f6")
buf.write("\3\2\2\2\u08f9\u08fd\5\u0104\u0083\2\u08fa\u08fd\5`\61")
buf.write("\2\u08fb\u08fd\5\6\4\2\u08fc\u08f9\3\2\2\2\u08fc\u08fa")
buf.write("\3\2\2\2\u08fc\u08fb\3\2\2\2\u08fd\u016d\3\2\2\2\u08fe")
buf.write("\u08ff\7M\2\2\u08ff\u0900\5\f\7\2\u0900\u0901\7\u0087")
buf.write("\2\2\u0901\u090a\3\2\2\2\u0902\u0903\7M\2\2\u0903\u0905")
buf.write("\5\f\7\2\u0904\u0906\7E\2\2\u0905\u0904\3\2\2\2\u0905")
buf.write("\u0906\3\2\2\2\u0906\u0907\3\2\2\2\u0907\u0908\5\u0164")
buf.write("\u00b3\2\u0908\u090a\3\2\2\2\u0909\u08fe\3\2\2\2\u0909")
buf.write("\u0902\3\2\2\2\u090a\u016f\3\2\2\2\u090b\u090d\7%\2\2")
buf.write("\u090c\u090b\3\2\2\2\u090c\u090d\3\2\2\2\u090d\u090e\3")
buf.write("\2\2\2\u090e\u090f\7E\2\2\u090f\u0910\5~@\2\u0910\u0171")
buf.write("\3\2\2\2\u0911\u0912\7E\2\2\u0912\u0913\7g\2\2\u0913\u0914")
buf.write("\7h\2\2\u0914\u0915\5~@\2\u0915\u0173\3\2\2\2\u0916\u0917")
buf.write("\7J\2\2\u0917\u0918\5h\65\2\u0918\u0919\5\u0178\u00bd")
buf.write("\2\u0919\u0175\3\2\2\2\u091a\u091c\7J\2\2\u091b\u091d")
buf.write("\5\u0150\u00a9\2\u091c\u091b\3\2\2\2\u091c\u091d\3\2\2")
buf.write("\2\u091d\u091e\3\2\2\2\u091e\u091f\5h\65\2\u091f\u0920")
buf.write("\5\u0178\u00bd\2\u0920\u0177\3\2\2\2\u0921\u0923\5\u017a")
buf.write("\u00be\2\u0922\u0924\5\u0178\u00bd\2\u0923\u0922\3\2\2")
buf.write("\2\u0923\u0924\3\2\2\2\u0924\u0179\3\2\2\2\u0925\u0926")
buf.write("\7\22\2\2\u0926\u0927\7V\2\2\u0927\u0928\5\u017c\u00bf")
buf.write("\2\u0928\u0929\7W\2\2\u0929\u092a\5h\65\2\u092a\u017b")
buf.write("\3\2\2\2\u092b\u092d\5\u00d6l\2\u092c\u092b\3\2\2\2\u092c")
buf.write("\u092d\3\2\2\2\u092d\u092e\3\2\2\2\u092e\u092f\5\u009a")
buf.write("N\2\u092f\u0930\5\u00f0y\2\u0930\u093a\3\2\2\2\u0931\u0933")
buf.write("\5\u00d6l\2\u0932\u0931\3\2\2\2\u0932\u0933\3\2\2\2\u0933")
buf.write("\u0934\3\2\2\2\u0934\u0936\5\u009aN\2\u0935\u0937\5\u0106")
buf.write("\u0084\2\u0936\u0935\3\2\2\2\u0936\u0937\3\2\2\2\u0937")
buf.write("\u093a\3\2\2\2\u0938\u093a\7\u0086\2\2\u0939\u092c\3\2")
buf.write("\2\2\u0939\u0932\3\2\2\2\u0939\u0938\3\2\2\2\u093a\u017d")
buf.write("\3\2\2\2\u093b\u093d\7H\2\2\u093c\u093e\5Z.\2\u093d\u093c")
buf.write("\3\2\2\2\u093d\u093e\3\2\2\2\u093e\u017f\3\2\2\2\u093f")
buf.write("\u0942\5\u0182\u00c2\2\u0940\u0942\5\u0186\u00c4\2\u0941")
buf.write("\u093f\3\2\2\2\u0941\u0940\3\2\2\2\u0942\u0181\3\2\2\2")
buf.write("\u0943\u0944\7H\2\2\u0944\u0946\7V\2\2\u0945\u0947\5\u0184")
buf.write("\u00c3\2\u0946\u0945\3\2\2\2\u0946\u0947\3\2\2\2\u0947")
buf.write("\u0948\3\2\2\2\u0948\u0949\7W\2\2\u0949\u0183\3\2\2\2")
buf.write("\u094a\u094b\b\u00c3\1\2\u094b\u094d\5\u0104\u0083\2\u094c")
buf.write("\u094e\7\u0086\2\2\u094d\u094c\3\2\2\2\u094d\u094e\3\2")
buf.write("\2\2\u094e\u0957\3\2\2\2\u094f\u0950\f\3\2\2\u0950\u0951")
buf.write("\7}\2\2\u0951\u0953\5\u0104\u0083\2\u0952\u0954\7\u0086")
buf.write("\2\2\u0953\u0952\3\2\2\2\u0953\u0954\3\2\2\2\u0954\u0956")
buf.write("\3\2\2\2\u0955\u094f\3\2\2\2\u0956\u0959\3\2\2\2\u0957")
buf.write("\u0955\3\2\2\2\u0957\u0958\3\2\2\2\u0958\u0185\3\2\2\2")
buf.write("\u0959\u0957\3\2\2\2\u095a\u095b\7\63\2\2\u095b\u095c")
buf.write("\7V\2\2\u095c\u095d\5`\61\2\u095d\u095e\7W\2\2\u095e\u0961")
buf.write("\3\2\2\2\u095f\u0961\7\63\2\2\u0960\u095a\3\2\2\2\u0960")
buf.write("\u095f\3\2\2\2\u0961\u0187\3\2\2\2\u0962\u0996\7\62\2")
buf.write("\2\u0963\u0996\7\35\2\2\u0964\u0965\7\62\2\2\u0965\u0966")
buf.write("\7X\2\2\u0966\u0996\7Y\2\2\u0967\u0968\7\35\2\2\u0968")
buf.write("\u0969\7X\2\2\u0969\u0996\7Y\2\2\u096a\u0996\7\\\2\2\u096b")
buf.write("\u0996\7]\2\2\u096c\u0996\7^\2\2\u096d\u0996\7_\2\2\u096e")
buf.write("\u0996\7`\2\2\u096f\u0996\7a\2\2\u0970\u0996\7b\2\2\u0971")
buf.write("\u0996\7c\2\2\u0972\u0996\7d\2\2\u0973\u0996\7\3\2\2\u0974")
buf.write("\u0996\7\4\2\2\u0975\u0996\7f\2\2\u0976\u0996\7g\2\2\u0977")
buf.write("\u0996\7h\2\2\u0978\u0996\7i\2\2\u0979\u0996\7j\2\2\u097a")
buf.write("\u0996\7k\2\2\u097b\u0996\7l\2\2\u097c\u0996\7m\2\2\u097d")
buf.write("\u0996\7n\2\2\u097e\u0996\7o\2\2\u097f\u0996\7p\2\2\u0980")
buf.write("\u0996\7q\2\2\u0981\u0996\7r\2\2\u0982\u0996\7t\2\2\u0983")
buf.write("\u0996\7s\2\2\u0984\u0996\7u\2\2\u0985\u0996\7v\2\2\u0986")
buf.write("\u0996\7w\2\2\u0987\u0996\7x\2\2\u0988\u0996\7\5\2\2\u0989")
buf.write("\u0996\7\6\2\2\u098a\u0996\7\7\2\2\u098b\u0996\7\b\2\2")
buf.write("\u098c\u0996\7{\2\2\u098d\u0996\7|\2\2\u098e\u0996\7}")
buf.write("\2\2\u098f\u0996\7~\2\2\u0990\u0996\7\177\2\2\u0991\u0992")
buf.write("\7V\2\2\u0992\u0996\7W\2\2\u0993\u0994\7X\2\2\u0994\u0996")
buf.write("\7Y\2\2\u0995\u0962\3\2\2\2\u0995\u0963\3\2\2\2\u0995")
buf.write("\u0964\3\2\2\2\u0995\u0967\3\2\2\2\u0995\u096a\3\2\2\2")
buf.write("\u0995\u096b\3\2\2\2\u0995\u096c\3\2\2\2\u0995\u096d\3")
buf.write("\2\2\2\u0995\u096e\3\2\2\2\u0995\u096f\3\2\2\2\u0995\u0970")
buf.write("\3\2\2\2\u0995\u0971\3\2\2\2\u0995\u0972\3\2\2\2\u0995")
buf.write("\u0973\3\2\2\2\u0995\u0974\3\2\2\2\u0995\u0975\3\2\2\2")
buf.write("\u0995\u0976\3\2\2\2\u0995\u0977\3\2\2\2\u0995\u0978\3")
buf.write("\2\2\2\u0995\u0979\3\2\2\2\u0995\u097a\3\2\2\2\u0995\u097b")
buf.write("\3\2\2\2\u0995\u097c\3\2\2\2\u0995\u097d\3\2\2\2\u0995")
buf.write("\u097e\3\2\2\2\u0995\u097f\3\2\2\2\u0995\u0980\3\2\2\2")
buf.write("\u0995\u0981\3\2\2\2\u0995\u0982\3\2\2\2\u0995\u0983\3")
buf.write("\2\2\2\u0995\u0984\3\2\2\2\u0995\u0985\3\2\2\2\u0995\u0986")
buf.write("\3\2\2\2\u0995\u0987\3\2\2\2\u0995\u0988\3\2\2\2\u0995")
buf.write("\u0989\3\2\2\2\u0995\u098a\3\2\2\2\u0995\u098b\3\2\2\2")
buf.write("\u0995\u098c\3\2\2\2\u0995\u098d\3\2\2\2\u0995\u098e\3")
buf.write("\2\2\2\u0995\u098f\3\2\2\2\u0995\u0990\3\2\2\2\u0995\u0991")
buf.write("\3\2\2\2\u0995\u0993\3\2\2\2\u0996\u0189\3\2\2\2\u0997")
buf.write("\u099f\7\u0088\2\2\u0998\u099f\7\u008e\2\2\u0999\u099f")
buf.write("\7\u008f\2\2\u099a\u099f\7\u0090\2\2\u099b\u099f\5\u018c")
buf.write("\u00c7\2\u099c\u099f\5\u018e\u00c8\2\u099d\u099f\5\u0190")
buf.write("\u00c9\2\u099e\u0997\3\2\2\2\u099e\u0998\3\2\2\2\u099e")
buf.write("\u0999\3\2\2\2\u099e\u099a\3\2\2\2\u099e\u099b\3\2\2\2")
buf.write("\u099e\u099c\3\2\2\2\u099e\u099d\3\2\2\2\u099f\u018b\3")
buf.write("\2\2\2\u09a0\u09a1\t\16\2\2\u09a1\u018d\3\2\2\2\u09a2")
buf.write("\u09a3\7\64\2\2\u09a3\u018f\3\2\2\2\u09a4\u09a5\t\17\2")
buf.write("\2\u09a5\u0191\3\2\2\2\u013e\u0193\u019f\u01a3\u01ae\u01b2")
buf.write("\u01c1\u01c8\u01cd\u01cf\u01d4\u01da\u01e4\u01eb\u01f1")
buf.write("\u01f5\u01fa\u0200\u0207\u020d\u0210\u0213\u0216\u021d")
buf.write("\u0224\u0258\u0267\u026d\u0273\u0280\u0282\u028c\u029b")
buf.write("\u02a1\u02bf\u02c4\u02c8\u02cc\u02cf\u02d3\u02d9\u02db")
buf.write("\u02e3\u02e7\u02ea\u02f1\u02f8\u02fc\u0301\u0305\u0308")
buf.write("\u030d\u0313\u0320\u032b\u032d\u033c\u033e\u034a\u034c")
buf.write("\u0358\u036c\u036e\u037a\u037c\u0387\u0392\u039d\u03a9")
buf.write("\u03ab\u03b7\u03b9\u03c3\u03cb\u03d7\u03de\u03e2\u03e6")
buf.write("\u03ea\u03ee\u03f3\u03f6\u03f9\u03ff\u0407\u040c\u040f")
buf.write("\u0415\u0420\u0437\u043b\u0443\u0449\u045d\u0461\u046e")
buf.write("\u0472\u0475\u047c\u0484\u048e\u0499\u04a5\u04af\u04b4")
buf.write("\u04bb\u04be\u04c3\u04c8\u04dd\u04e1\u04e6\u04f1\u04f7")
buf.write("\u04fb\u0500\u0504\u0509\u050c\u0522\u0528\u0533\u0537")
buf.write("\u053a\u0544\u054a\u054d\u0554\u055e\u0562\u0565\u0568")
buf.write("\u056c\u0571\u0573\u0577\u057b\u0584\u0591\u0599\u059f")
buf.write("\u05a5\u05a9\u05ac\u05b5\u05be\u05c6\u05d1\u05d7\u05e2")
buf.write("\u05e5\u05ea\u05f9\u05ff\u0608\u0612\u0618\u0620\u0624")
buf.write("\u0628\u062d\u0632\u0639\u063b\u0640\u0644\u0652\u0658")
buf.write("\u066a\u066c\u0676\u067b\u0682\u0688\u068d\u0693\u069a")
buf.write("\u069e\u06a0\u06a2\u06a9\u06ac\u06af\u06b2\u06b7\u06bb")
buf.write("\u06be\u06c2\u06c6\u06cb\u06ce\u06d0\u06d4\u06db\u06e1")
buf.write("\u06e5\u06eb\u06f0\u06f2\u06f8\u06fc\u0702\u0709\u070d")
buf.write("\u070f\u0711\u0718\u0722\u0726\u0728\u072a\u072e\u0731")
buf.write("\u0737\u0741\u0745\u074b\u0753\u0757\u075a\u075e\u0763")
buf.write("\u0766\u0769\u076d\u0772\u077c\u0783\u0788\u078c\u0791")
buf.write("\u0797\u079b\u07a1\u07a7\u07ab\u07b0\u07b6\u07ba\u07bd")
buf.write("\u07c1\u07c4\u07c6\u07c9\u07d3\u07d8\u07da\u07dd\u07e0")
buf.write("\u07e3\u07ec\u07f6\u07fb\u07fe\u0802\u0805\u0808\u080c")
buf.write("\u0815\u0824\u082a\u082e\u0832\u0836\u083a\u083e\u0842")
buf.write("\u0846\u0849\u084d\u0858\u085c\u0863\u0867\u086c\u0871")
buf.write("\u0878\u087c\u0886\u0896\u089b\u089f\u08a2\u08a6\u08ac")
buf.write("\u08af\u08b3\u08bd\u08c0\u08c8\u08cd\u08d2\u08da\u08e1")
buf.write("\u08e5\u08ec\u08f2\u08f6\u08fc\u0905\u0909\u090c\u091c")
buf.write("\u0923\u092c\u0932\u0936\u0939\u093d\u0941\u0946\u094d")
buf.write("\u0953\u0957\u0960\u0995\u099e")
return buf.getvalue()
class CPP14Parser ( Parser ):
grammarFileName = "CPP14.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'!'", "'not'", "'&&'", "'and'", "'||'",
"'or'", "<INVALID>", "<INVALID>", "'alignas'", "'alignof'",
"'asm'", "'auto'", "'bool'", "'break'", "'case'", "'catch'",
"'char'", "'char16_t'", "'char32_t'", "'class'", "'const'",
"'constexpr'", "'const_cast'", "'continue'", "'decltype'",
"'default'", "'delete'", "'do'", "'double'", "'dynamic_cast'",
"'else'", "'enum'", "'explicit'", "'export'", "'extern'",
"'false'", "'final'", "'float'", "'for'", "'friend'",
"'goto'", "'if'", "'inline'", "'int'", "'long'", "'mutable'",
"'namespace'", "'new'", "'noexcept'", "'nullptr'",
"'operator'", "'override'", "'private'", "'protected'",
"'public'", "'register'", "'reinterpret_cast'", "'return'",
"'short'", "'signed'", "'sizeof'", "'static'", "'static_assert'",
"'static_cast'", "'struct'", "'switch'", "'template'",
"'this'", "'thread_local'", "'throw'", "'true'", "'try'",
"'typedef'", "'typeid'", "'typename'", "'union'", "'unsigned'",
"'using'", "'virtual'", "'void'", "'volatile'", "'wchar_t'",
"'while'", "'('", "')'", "'['", "']'", "'{'", "'}'",
"'+'", "'-'", "'*'", "'/'", "'%'", "'^'", "'&'", "'|'",
"'~'", "<INVALID>", "'='", "'<'", "'>'", "'+='", "'-='",
"'*='", "'/='", "'%='", "'^='", "'&='", "'|='", "'<<'",
"'>>'", "'<<='", "'>>='", "'=='", "'!='", "'<='", "'>='",
"<INVALID>", "<INVALID>", "'++'", "'--'", "','", "'->*'",
"'->'", "'?'", "':'", "'::'", "';'", "'.'", "'.*'",
"'...'" ]
symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "MultiLineMacro",
"Directive", "Alignas", "Alignof", "Asm", "Auto",
"Bool", "Break", "Case", "Catch", "Char", "Char16",
"Char32", "Class", "Const", "Constexpr", "Const_cast",
"Continue", "Decltype", "Default", "Delete", "Do",
"Double", "Dynamic_cast", "Else", "Enum", "Explicit",
"Export", "Extern", "BFalse", "Final", "Float", "For",
"Friend", "Goto", "If", "Inline", "Int", "Long", "Mutable",
"Namespace", "New", "Noexcept", "Nullptr", "Operator",
"Override", "Private", "Protected", "Public", "Register",
"Reinterpret_cast", "Return", "Short", "Signed", "Sizeof",
"Static", "Static_assert", "Static_cast", "Struct",
"Switch", "Template", "This", "Thread_local", "Throw",
"BTrue", "Try", "Typedef", "Typeid_", "Typename_",
"Union", "Unsigned", "Using", "Virtual", "Void", "Volatile",
"Wchar", "While", "LeftParen", "RightParen", "LeftBracket",
"RightBracket", "LeftBrace", "RightBrace", "Plus",
"Minus", "Star", "Div", "Mod", "Caret", "And", "Or",
"Tilde", "Not", "Assign", "Less", "Greater", "PlusAssign",
"MinusAssign", "StarAssign", "DivAssign", "ModAssign",
"XorAssign", "AndAssign", "OrAssign", "LeftShift",
"RightShift", "LeftShiftAssign", "RightShiftAssign",
"Equal", "NotEqual", "LessEqual", "GreaterEqual",
"AndAnd", "OrOr", "PlusPlus", "MinusMinus", "Comma",
"ArrowStar", "Arrow", "Question", "Colon", "Doublecolon",
"Semi", "Dot", "DotStar", "Ellipsis", "Identifier",
"Integerliteral", "Decimalliteral", "Octalliteral",
"Hexadecimalliteral", "Binaryliteral", "Integersuffix",
"Characterliteral", "Floatingliteral", "Stringliteral",
"Userdefinedintegerliteral", "Userdefinedfloatingliteral",
"Userdefinedstringliteral", "Userdefinedcharacterliteral",
"Whitespace", "Newline", "BlockComment", "LineComment" ]
RULE_translationunit = 0
RULE_primaryexpression = 1
RULE_idexpression = 2
RULE_unqualifiedid = 3
RULE_qualifiedid = 4
RULE_nestednamespecifier = 5
RULE_lambdaexpression = 6
RULE_lambdaintroducer = 7
RULE_lambdacapture = 8
RULE_capturedefault = 9
RULE_capturelist = 10
RULE_capture = 11
RULE_simplecapture = 12
RULE_initcapture = 13
RULE_lambdadeclarator = 14
RULE_postfixexpression = 15
RULE_typeidofexpr = 16
RULE_typeidofthetypeid = 17
RULE_expressionlist = 18
RULE_pseudodestructorname = 19
RULE_unaryexpression = 20
RULE_unaryoperator = 21
RULE_newexpression = 22
RULE_newplacement = 23
RULE_newtypeid = 24
RULE_newdeclarator = 25
RULE_noptrnewdeclarator = 26
RULE_newinitializer = 27
RULE_deleteexpression = 28
RULE_noexceptexpression = 29
RULE_castexpression = 30
RULE_pmexpression = 31
RULE_multiplicativeexpression = 32
RULE_additiveexpression = 33
RULE_shiftexpression = 34
RULE_shiftoperator = 35
RULE_relationalexpression = 36
RULE_equalityexpression = 37
RULE_andexpression = 38
RULE_exclusiveorexpression = 39
RULE_inclusiveorexpression = 40
RULE_logicalandexpression = 41
RULE_logicalorexpression = 42
RULE_conditionalexpression = 43
RULE_assignmentexpression = 44
RULE_assignmentoperator = 45
RULE_expression = 46
RULE_constantexpression = 47
RULE_statement = 48
RULE_labeledstatement = 49
RULE_expressionstatement = 50
RULE_compoundstatement = 51
RULE_statementseq = 52
RULE_selectionstatement = 53
RULE_condition = 54
RULE_iterationstatement = 55
RULE_forinitstatement = 56
RULE_forrangedeclaration = 57
RULE_forrangeinitializer = 58
RULE_jumpstatement = 59
RULE_declarationstatement = 60
RULE_declarationseq = 61
RULE_declaration = 62
RULE_blockdeclaration = 63
RULE_aliasdeclaration = 64
RULE_simpledeclaration = 65
RULE_static_assertdeclaration = 66
RULE_emptydeclaration = 67
RULE_attributedeclaration = 68
RULE_declspecifier = 69
RULE_declspecifierseq = 70
RULE_storageclassspecifier = 71
RULE_functionspecifier = 72
RULE_typedefname = 73
RULE_typespecifier = 74
RULE_trailingtypespecifier = 75
RULE_typespecifierseq = 76
RULE_trailingtypespecifierseq = 77
RULE_simpletypespecifier = 78
RULE_thetypename = 79
RULE_decltypespecifier = 80
RULE_elaboratedtypespecifier = 81
RULE_enumname = 82
RULE_enumspecifier = 83
RULE_enumhead = 84
RULE_opaqueenumdeclaration = 85
RULE_enumkey = 86
RULE_enumbase = 87
RULE_enumeratorlist = 88
RULE_enumeratordefinition = 89
RULE_enumerator = 90
RULE_namespacename = 91
RULE_originalnamespacename = 92
RULE_namespacedefinition = 93
RULE_namednamespacedefinition = 94
RULE_originalnamespacedefinition = 95
RULE_extensionnamespacedefinition = 96
RULE_unnamednamespacedefinition = 97
RULE_namespacebody = 98
RULE_namespacealias = 99
RULE_namespacealiasdefinition = 100
RULE_qualifiednamespacespecifier = 101
RULE_usingdeclaration = 102
RULE_usingdirective = 103
RULE_asmdefinition = 104
RULE_linkagespecification = 105
RULE_attributespecifierseq = 106
RULE_attributespecifier = 107
RULE_alignmentspecifier = 108
RULE_attributelist = 109
RULE_attribute = 110
RULE_attributetoken = 111
RULE_attributescopedtoken = 112
RULE_attributenamespace = 113
RULE_attributeargumentclause = 114
RULE_balancedtokenseq = 115
RULE_balancedtoken = 116
RULE_initdeclaratorlist = 117
RULE_initdeclarator = 118
RULE_declarator = 119
RULE_ptrdeclarator = 120
RULE_noptrdeclarator = 121
RULE_parametersandqualifiers = 122
RULE_trailingreturntype = 123
RULE_ptroperator = 124
RULE_cvqualifierseq = 125
RULE_cvqualifier = 126
RULE_refqualifier = 127
RULE_declaratorid = 128
RULE_thetypeid = 129
RULE_abstractdeclarator = 130
RULE_ptrabstractdeclarator = 131
RULE_noptrabstractdeclarator = 132
RULE_abstractpackdeclarator = 133
RULE_noptrabstractpackdeclarator = 134
RULE_parameterdeclarationclause = 135
RULE_parameterdeclarationlist = 136
RULE_parameterdeclaration = 137
RULE_functiondefinition = 138
RULE_functionbody = 139
RULE_initializer = 140
RULE_braceorequalinitializer = 141
RULE_initializerclause = 142
RULE_initializerlist = 143
RULE_bracedinitlist = 144
RULE_classname = 145
RULE_classspecifier = 146
RULE_classhead = 147
RULE_classheadname = 148
RULE_classvirtspecifier = 149
RULE_classkey = 150
RULE_memberspecification = 151
RULE_memberdeclaration = 152
RULE_memberdeclaratorlist = 153
RULE_memberdeclarator = 154
RULE_virtspecifierseq = 155
RULE_virtspecifier = 156
RULE_purespecifier = 157
RULE_baseclause = 158
RULE_basespecifierlist = 159
RULE_basespecifier = 160
RULE_classordecltype = 161
RULE_basetypespecifier = 162
RULE_accessspecifier = 163
RULE_conversionfunctionid = 164
RULE_conversiontypeid = 165
RULE_conversiondeclarator = 166
RULE_ctorinitializer = 167
RULE_meminitializerlist = 168
RULE_meminitializer = 169
RULE_meminitializerid = 170
RULE_operatorfunctionid = 171
RULE_literaloperatorid = 172
RULE_templatedeclaration = 173
RULE_templateparameterlist = 174
RULE_templateparameter = 175
RULE_typeparameter = 176
RULE_simpletemplateid = 177
RULE_templateid = 178
RULE_templatename = 179
RULE_templateargumentlist = 180
RULE_templateargument = 181
RULE_typenamespecifier = 182
RULE_explicitinstantiation = 183
RULE_explicitspecialization = 184
RULE_tryblock = 185
RULE_functiontryblock = 186
RULE_handlerseq = 187
RULE_handler = 188
RULE_exceptiondeclaration = 189
RULE_throwexpression = 190
RULE_exceptionspecification = 191
RULE_dynamicexceptionspecification = 192
RULE_typeidlist = 193
RULE_noexceptspecification = 194
RULE_theoperator = 195
RULE_literal = 196
RULE_booleanliteral = 197
RULE_pointerliteral = 198
RULE_userdefinedliteral = 199
ruleNames = [ "translationunit", "primaryexpression", "idexpression",
"unqualifiedid", "qualifiedid", "nestednamespecifier",
"lambdaexpression", "lambdaintroducer", "lambdacapture",
"capturedefault", "capturelist", "capture", "simplecapture",
"initcapture", "lambdadeclarator", "postfixexpression",
"typeidofexpr", "typeidofthetypeid", "expressionlist",
"pseudodestructorname", "unaryexpression", "unaryoperator",
"newexpression", "newplacement", "newtypeid", "newdeclarator",
"noptrnewdeclarator", "newinitializer", "deleteexpression",
"noexceptexpression", "castexpression", "pmexpression",
"multiplicativeexpression", "additiveexpression", "shiftexpression",
"shiftoperator", "relationalexpression", "equalityexpression",
"andexpression", "exclusiveorexpression", "inclusiveorexpression",
"logicalandexpression", "logicalorexpression", "conditionalexpression",
"assignmentexpression", "assignmentoperator", "expression",
"constantexpression", "statement", "labeledstatement",
"expressionstatement", "compoundstatement", "statementseq",
"selectionstatement", "condition", "iterationstatement",
"forinitstatement", "forrangedeclaration", "forrangeinitializer",
"jumpstatement", "declarationstatement", "declarationseq",
"declaration", "blockdeclaration", "aliasdeclaration",
"simpledeclaration", "static_assertdeclaration", "emptydeclaration",
"attributedeclaration", "declspecifier", "declspecifierseq",
"storageclassspecifier", "functionspecifier", "typedefname",
"typespecifier", "trailingtypespecifier", "typespecifierseq",
"trailingtypespecifierseq", "simpletypespecifier", "thetypename",
"decltypespecifier", "elaboratedtypespecifier", "enumname",
"enumspecifier", "enumhead", "opaqueenumdeclaration",
"enumkey", "enumbase", "enumeratorlist", "enumeratordefinition",
"enumerator", "namespacename", "originalnamespacename",
"namespacedefinition", "namednamespacedefinition", "originalnamespacedefinition",
"extensionnamespacedefinition", "unnamednamespacedefinition",
"namespacebody", "namespacealias", "namespacealiasdefinition",
"qualifiednamespacespecifier", "usingdeclaration", "usingdirective",
"asmdefinition", "linkagespecification", "attributespecifierseq",
"attributespecifier", "alignmentspecifier", "attributelist",
"attribute", "attributetoken", "attributescopedtoken",
"attributenamespace", "attributeargumentclause", "balancedtokenseq",
"balancedtoken", "initdeclaratorlist", "initdeclarator",
"declarator", "ptrdeclarator", "noptrdeclarator", "parametersandqualifiers",
"trailingreturntype", "ptroperator", "cvqualifierseq",
"cvqualifier", "refqualifier", "declaratorid", "thetypeid",
"abstractdeclarator", "ptrabstractdeclarator", "noptrabstractdeclarator",
"abstractpackdeclarator", "noptrabstractpackdeclarator",
"parameterdeclarationclause", "parameterdeclarationlist",
"parameterdeclaration", "functiondefinition", "functionbody",
"initializer", "braceorequalinitializer", "initializerclause",
"initializerlist", "bracedinitlist", "classname", "classspecifier",
"classhead", "classheadname", "classvirtspecifier", "classkey",
"memberspecification", "memberdeclaration", "memberdeclaratorlist",
"memberdeclarator", "virtspecifierseq", "virtspecifier",
"purespecifier", "baseclause", "basespecifierlist", "basespecifier",
"classordecltype", "basetypespecifier", "accessspecifier",
"conversionfunctionid", "conversiontypeid", "conversiondeclarator",
"ctorinitializer", "meminitializerlist", "meminitializer",
"meminitializerid", "operatorfunctionid", "literaloperatorid",
"templatedeclaration", "templateparameterlist", "templateparameter",
"typeparameter", "simpletemplateid", "templateid", "templatename",
"templateargumentlist", "templateargument", "typenamespecifier",
"explicitinstantiation", "explicitspecialization", "tryblock",
"functiontryblock", "handlerseq", "handler", "exceptiondeclaration",
"throwexpression", "exceptionspecification", "dynamicexceptionspecification",
"typeidlist", "noexceptspecification", "theoperator",
"literal", "booleanliteral", "pointerliteral", "userdefinedliteral" ]
EOF = Token.EOF
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
MultiLineMacro=7
Directive=8
Alignas=9
Alignof=10
Asm=11
Auto=12
Bool=13
Break=14
Case=15
Catch=16
Char=17
Char16=18
Char32=19
Class=20
Const=21
Constexpr=22
Const_cast=23
Continue=24
Decltype=25
Default=26
Delete=27
Do=28
Double=29
Dynamic_cast=30
Else=31
Enum=32
Explicit=33
Export=34
Extern=35
BFalse=36
Final=37
Float=38
For=39
Friend=40
Goto=41
If=42
Inline=43
Int=44
Long=45
Mutable=46
Namespace=47
New=48
Noexcept=49
Nullptr=50
Operator=51
Override=52
Private=53
Protected=54
Public=55
Register=56
Reinterpret_cast=57
Return=58
Short=59
Signed=60
Sizeof=61
Static=62
Static_assert=63
Static_cast=64
Struct=65
Switch=66
Template=67
This=68
Thread_local=69
Throw=70
BTrue=71
Try=72
Typedef=73
Typeid_=74
Typename_=75
Union=76
Unsigned=77
Using=78
Virtual=79
Void=80
Volatile=81
Wchar=82
While=83
LeftParen=84
RightParen=85
LeftBracket=86
RightBracket=87
LeftBrace=88
RightBrace=89
Plus=90
Minus=91
Star=92
Div=93
Mod=94
Caret=95
And=96
Or=97
Tilde=98
Not=99
Assign=100
Less=101
Greater=102
PlusAssign=103
MinusAssign=104
StarAssign=105
DivAssign=106
ModAssign=107
XorAssign=108
AndAssign=109
OrAssign=110
LeftShift=111
RightShift=112
LeftShiftAssign=113
RightShiftAssign=114
Equal=115
NotEqual=116
LessEqual=117
GreaterEqual=118
AndAnd=119
OrOr=120
PlusPlus=121
MinusMinus=122
Comma=123
ArrowStar=124
Arrow=125
Question=126
Colon=127
Doublecolon=128
Semi=129
Dot=130
DotStar=131
Ellipsis=132
Identifier=133
Integerliteral=134
Decimalliteral=135
Octalliteral=136
Hexadecimalliteral=137
Binaryliteral=138
Integersuffix=139
Characterliteral=140
Floatingliteral=141
Stringliteral=142
Userdefinedintegerliteral=143
Userdefinedfloatingliteral=144
Userdefinedstringliteral=145
Userdefinedcharacterliteral=146
Whitespace=147
Newline=148
BlockComment=149
LineComment=150
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.8")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class TranslationunitContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def EOF(self):
return self.getToken(CPP14Parser.EOF, 0)
def declarationseq(self):
return self.getTypedRuleContext(CPP14Parser.DeclarationseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_translationunit
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTranslationunit" ):
listener.enterTranslationunit(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTranslationunit" ):
listener.exitTranslationunit(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTranslationunit" ):
return visitor.visitTranslationunit(self)
else:
return visitor.visitChildren(self)
def translationunit(self):
localctx = CPP14Parser.TranslationunitContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_translationunit)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 401
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__2) | (1 << CPP14Parser.Alignas) | (1 << CPP14Parser.Asm) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Class) | (1 << CPP14Parser.Const) | (1 << CPP14Parser.Constexpr) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Enum) | (1 << CPP14Parser.Explicit) | (1 << CPP14Parser.Extern) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Friend) | (1 << CPP14Parser.Inline) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.Mutable) | (1 << CPP14Parser.Namespace) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Register) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Static) | (1 << CPP14Parser.Static_assert))) != 0) or ((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (CPP14Parser.Struct - 65)) | (1 << (CPP14Parser.Template - 65)) | (1 << (CPP14Parser.Thread_local - 65)) | (1 << (CPP14Parser.Typedef - 65)) | (1 << (CPP14Parser.Typename_ - 65)) | (1 << (CPP14Parser.Union - 65)) | (1 << (CPP14Parser.Unsigned - 65)) | (1 << (CPP14Parser.Using - 65)) | (1 << (CPP14Parser.Virtual - 65)) | (1 << (CPP14Parser.Void - 65)) | (1 << (CPP14Parser.Volatile - 65)) | (1 << (CPP14Parser.Wchar - 65)) | (1 << (CPP14Parser.LeftParen - 65)) | (1 << (CPP14Parser.LeftBracket - 65)) | (1 << (CPP14Parser.Star - 65)) | (1 << (CPP14Parser.And - 65)) | (1 << (CPP14Parser.Tilde - 65)) | (1 << (CPP14Parser.Doublecolon - 65)))) != 0) or ((((_la - 129)) & ~0x3f) == 0 and ((1 << (_la - 129)) & ((1 << (CPP14Parser.Semi - 129)) | (1 << (CPP14Parser.Ellipsis - 129)) | (1 << (CPP14Parser.Identifier - 129)))) != 0):
self.state = 400
self.declarationseq(0)
self.state = 403
self.match(CPP14Parser.EOF)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PrimaryexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def literal(self):
return self.getTypedRuleContext(CPP14Parser.LiteralContext,0)
def This(self):
return self.getToken(CPP14Parser.This, 0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def expression(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def idexpression(self):
return self.getTypedRuleContext(CPP14Parser.IdexpressionContext,0)
def lambdaexpression(self):
return self.getTypedRuleContext(CPP14Parser.LambdaexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_primaryexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPrimaryexpression" ):
listener.enterPrimaryexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPrimaryexpression" ):
listener.exitPrimaryexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitPrimaryexpression" ):
return visitor.visitPrimaryexpression(self)
else:
return visitor.visitChildren(self)
def primaryexpression(self):
localctx = CPP14Parser.PrimaryexpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_primaryexpression)
try:
self.state = 413
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.BFalse, CPP14Parser.Nullptr, CPP14Parser.BTrue, CPP14Parser.Integerliteral, CPP14Parser.Characterliteral, CPP14Parser.Floatingliteral, CPP14Parser.Stringliteral, CPP14Parser.Userdefinedintegerliteral, CPP14Parser.Userdefinedfloatingliteral, CPP14Parser.Userdefinedstringliteral, CPP14Parser.Userdefinedcharacterliteral]:
self.enterOuterAlt(localctx, 1)
self.state = 405
self.literal()
pass
elif token in [CPP14Parser.This]:
self.enterOuterAlt(localctx, 2)
self.state = 406
self.match(CPP14Parser.This)
pass
elif token in [CPP14Parser.LeftParen]:
self.enterOuterAlt(localctx, 3)
self.state = 407
self.match(CPP14Parser.LeftParen)
self.state = 408
self.expression(0)
self.state = 409
self.match(CPP14Parser.RightParen)
pass
elif token in [CPP14Parser.Decltype, CPP14Parser.Operator, CPP14Parser.Tilde, CPP14Parser.Doublecolon, CPP14Parser.Identifier]:
self.enterOuterAlt(localctx, 4)
self.state = 411
self.idexpression()
pass
elif token in [CPP14Parser.LeftBracket]:
self.enterOuterAlt(localctx, 5)
self.state = 412
self.lambdaexpression()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IdexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unqualifiedid(self):
return self.getTypedRuleContext(CPP14Parser.UnqualifiedidContext,0)
def qualifiedid(self):
return self.getTypedRuleContext(CPP14Parser.QualifiedidContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_idexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIdexpression" ):
listener.enterIdexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIdexpression" ):
listener.exitIdexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitIdexpression" ):
return visitor.visitIdexpression(self)
else:
return visitor.visitChildren(self)
def idexpression(self):
localctx = CPP14Parser.IdexpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_idexpression)
try:
self.state = 417
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,2,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 415
self.unqualifiedid()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 416
self.qualifiedid()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UnqualifiedidContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def operatorfunctionid(self):
return self.getTypedRuleContext(CPP14Parser.OperatorfunctionidContext,0)
def conversionfunctionid(self):
return self.getTypedRuleContext(CPP14Parser.ConversionfunctionidContext,0)
def literaloperatorid(self):
return self.getTypedRuleContext(CPP14Parser.LiteraloperatoridContext,0)
def Tilde(self):
return self.getToken(CPP14Parser.Tilde, 0)
def classname(self):
return self.getTypedRuleContext(CPP14Parser.ClassnameContext,0)
def decltypespecifier(self):
return self.getTypedRuleContext(CPP14Parser.DecltypespecifierContext,0)
def templateid(self):
return self.getTypedRuleContext(CPP14Parser.TemplateidContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_unqualifiedid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUnqualifiedid" ):
listener.enterUnqualifiedid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUnqualifiedid" ):
listener.exitUnqualifiedid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitUnqualifiedid" ):
return visitor.visitUnqualifiedid(self)
else:
return visitor.visitChildren(self)
def unqualifiedid(self):
localctx = CPP14Parser.UnqualifiedidContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_unqualifiedid)
try:
self.state = 428
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,3,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 419
self.match(CPP14Parser.Identifier)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 420
self.operatorfunctionid()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 421
self.conversionfunctionid()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 422
self.literaloperatorid()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 423
self.match(CPP14Parser.Tilde)
self.state = 424
self.classname()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 425
self.match(CPP14Parser.Tilde)
self.state = 426
self.decltypespecifier()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 427
self.templateid()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class QualifiedidContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def nestednamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.NestednamespecifierContext,0)
def unqualifiedid(self):
return self.getTypedRuleContext(CPP14Parser.UnqualifiedidContext,0)
def Template(self):
return self.getToken(CPP14Parser.Template, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_qualifiedid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterQualifiedid" ):
listener.enterQualifiedid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitQualifiedid" ):
listener.exitQualifiedid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitQualifiedid" ):
return visitor.visitQualifiedid(self)
else:
return visitor.visitChildren(self)
def qualifiedid(self):
localctx = CPP14Parser.QualifiedidContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_qualifiedid)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 430
self.nestednamespecifier(0)
self.state = 432
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Template:
self.state = 431
self.match(CPP14Parser.Template)
self.state = 434
self.unqualifiedid()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NestednamespecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Doublecolon(self):
return self.getToken(CPP14Parser.Doublecolon, 0)
def thetypename(self):
return self.getTypedRuleContext(CPP14Parser.ThetypenameContext,0)
def namespacename(self):
return self.getTypedRuleContext(CPP14Parser.NamespacenameContext,0)
def decltypespecifier(self):
return self.getTypedRuleContext(CPP14Parser.DecltypespecifierContext,0)
def nestednamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.NestednamespecifierContext,0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def simpletemplateid(self):
return self.getTypedRuleContext(CPP14Parser.SimpletemplateidContext,0)
def Template(self):
return self.getToken(CPP14Parser.Template, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_nestednamespecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNestednamespecifier" ):
listener.enterNestednamespecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNestednamespecifier" ):
listener.exitNestednamespecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNestednamespecifier" ):
return visitor.visitNestednamespecifier(self)
else:
return visitor.visitChildren(self)
def nestednamespecifier(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.NestednamespecifierContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 10
self.enterRecursionRule(localctx, 10, self.RULE_nestednamespecifier, _p)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 447
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,5,self._ctx)
if la_ == 1:
self.state = 437
self.match(CPP14Parser.Doublecolon)
pass
elif la_ == 2:
self.state = 438
self.thetypename()
self.state = 439
self.match(CPP14Parser.Doublecolon)
pass
elif la_ == 3:
self.state = 441
self.namespacename()
self.state = 442
self.match(CPP14Parser.Doublecolon)
pass
elif la_ == 4:
self.state = 444
self.decltypespecifier()
self.state = 445
self.match(CPP14Parser.Doublecolon)
pass
self._ctx.stop = self._input.LT(-1)
self.state = 461
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,8,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 459
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,7,self._ctx)
if la_ == 1:
localctx = CPP14Parser.NestednamespecifierContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_nestednamespecifier)
self.state = 449
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 450
self.match(CPP14Parser.Identifier)
self.state = 451
self.match(CPP14Parser.Doublecolon)
pass
elif la_ == 2:
localctx = CPP14Parser.NestednamespecifierContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_nestednamespecifier)
self.state = 452
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 454
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Template:
self.state = 453
self.match(CPP14Parser.Template)
self.state = 456
self.simpletemplateid()
self.state = 457
self.match(CPP14Parser.Doublecolon)
pass
self.state = 463
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,8,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class LambdaexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def lambdaintroducer(self):
return self.getTypedRuleContext(CPP14Parser.LambdaintroducerContext,0)
def compoundstatement(self):
return self.getTypedRuleContext(CPP14Parser.CompoundstatementContext,0)
def lambdadeclarator(self):
return self.getTypedRuleContext(CPP14Parser.LambdadeclaratorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_lambdaexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLambdaexpression" ):
listener.enterLambdaexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLambdaexpression" ):
listener.exitLambdaexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLambdaexpression" ):
return visitor.visitLambdaexpression(self)
else:
return visitor.visitChildren(self)
def lambdaexpression(self):
localctx = CPP14Parser.LambdaexpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_lambdaexpression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 464
self.lambdaintroducer()
self.state = 466
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.LeftParen:
self.state = 465
self.lambdadeclarator()
self.state = 468
self.compoundstatement()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LambdaintroducerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LeftBracket(self):
return self.getToken(CPP14Parser.LeftBracket, 0)
def RightBracket(self):
return self.getToken(CPP14Parser.RightBracket, 0)
def lambdacapture(self):
return self.getTypedRuleContext(CPP14Parser.LambdacaptureContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_lambdaintroducer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLambdaintroducer" ):
listener.enterLambdaintroducer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLambdaintroducer" ):
listener.exitLambdaintroducer(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLambdaintroducer" ):
return visitor.visitLambdaintroducer(self)
else:
return visitor.visitChildren(self)
def lambdaintroducer(self):
localctx = CPP14Parser.LambdaintroducerContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_lambdaintroducer)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 470
self.match(CPP14Parser.LeftBracket)
self.state = 472
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 68)) & ~0x3f) == 0 and ((1 << (_la - 68)) & ((1 << (CPP14Parser.This - 68)) | (1 << (CPP14Parser.And - 68)) | (1 << (CPP14Parser.Assign - 68)))) != 0) or _la==CPP14Parser.Identifier:
self.state = 471
self.lambdacapture()
self.state = 474
self.match(CPP14Parser.RightBracket)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LambdacaptureContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def capturedefault(self):
return self.getTypedRuleContext(CPP14Parser.CapturedefaultContext,0)
def capturelist(self):
return self.getTypedRuleContext(CPP14Parser.CapturelistContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_lambdacapture
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLambdacapture" ):
listener.enterLambdacapture(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLambdacapture" ):
listener.exitLambdacapture(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLambdacapture" ):
return visitor.visitLambdacapture(self)
else:
return visitor.visitChildren(self)
def lambdacapture(self):
localctx = CPP14Parser.LambdacaptureContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_lambdacapture)
try:
self.state = 482
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,11,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 476
self.capturedefault()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 477
self.capturelist(0)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 478
self.capturedefault()
self.state = 479
self.match(CPP14Parser.Comma)
self.state = 480
self.capturelist(0)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CapturedefaultContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def And(self):
return self.getToken(CPP14Parser.And, 0)
def Assign(self):
return self.getToken(CPP14Parser.Assign, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_capturedefault
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCapturedefault" ):
listener.enterCapturedefault(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCapturedefault" ):
listener.exitCapturedefault(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCapturedefault" ):
return visitor.visitCapturedefault(self)
else:
return visitor.visitChildren(self)
def capturedefault(self):
localctx = CPP14Parser.CapturedefaultContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_capturedefault)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 484
_la = self._input.LA(1)
if not(_la==CPP14Parser.And or _la==CPP14Parser.Assign):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CapturelistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def capture(self):
return self.getTypedRuleContext(CPP14Parser.CaptureContext,0)
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def capturelist(self):
return self.getTypedRuleContext(CPP14Parser.CapturelistContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_capturelist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCapturelist" ):
listener.enterCapturelist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCapturelist" ):
listener.exitCapturelist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCapturelist" ):
return visitor.visitCapturelist(self)
else:
return visitor.visitChildren(self)
def capturelist(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.CapturelistContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 20
self.enterRecursionRule(localctx, 20, self.RULE_capturelist, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 487
self.capture()
self.state = 489
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,12,self._ctx)
if la_ == 1:
self.state = 488
self.match(CPP14Parser.Ellipsis)
self._ctx.stop = self._input.LT(-1)
self.state = 499
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,14,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.CapturelistContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_capturelist)
self.state = 491
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 492
self.match(CPP14Parser.Comma)
self.state = 493
self.capture()
self.state = 495
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,13,self._ctx)
if la_ == 1:
self.state = 494
self.match(CPP14Parser.Ellipsis)
self.state = 501
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,14,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class CaptureContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def simplecapture(self):
return self.getTypedRuleContext(CPP14Parser.SimplecaptureContext,0)
def initcapture(self):
return self.getTypedRuleContext(CPP14Parser.InitcaptureContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_capture
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCapture" ):
listener.enterCapture(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCapture" ):
listener.exitCapture(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCapture" ):
return visitor.visitCapture(self)
else:
return visitor.visitChildren(self)
def capture(self):
localctx = CPP14Parser.CaptureContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_capture)
try:
self.state = 504
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,15,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 502
self.simplecapture()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 503
self.initcapture()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SimplecaptureContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def And(self):
return self.getToken(CPP14Parser.And, 0)
def This(self):
return self.getToken(CPP14Parser.This, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_simplecapture
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSimplecapture" ):
listener.enterSimplecapture(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSimplecapture" ):
listener.exitSimplecapture(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitSimplecapture" ):
return visitor.visitSimplecapture(self)
else:
return visitor.visitChildren(self)
def simplecapture(self):
localctx = CPP14Parser.SimplecaptureContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_simplecapture)
try:
self.state = 510
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.Identifier]:
self.enterOuterAlt(localctx, 1)
self.state = 506
self.match(CPP14Parser.Identifier)
pass
elif token in [CPP14Parser.And]:
self.enterOuterAlt(localctx, 2)
self.state = 507
self.match(CPP14Parser.And)
self.state = 508
self.match(CPP14Parser.Identifier)
pass
elif token in [CPP14Parser.This]:
self.enterOuterAlt(localctx, 3)
self.state = 509
self.match(CPP14Parser.This)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InitcaptureContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def initializer(self):
return self.getTypedRuleContext(CPP14Parser.InitializerContext,0)
def And(self):
return self.getToken(CPP14Parser.And, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_initcapture
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInitcapture" ):
listener.enterInitcapture(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInitcapture" ):
listener.exitInitcapture(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInitcapture" ):
return visitor.visitInitcapture(self)
else:
return visitor.visitChildren(self)
def initcapture(self):
localctx = CPP14Parser.InitcaptureContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_initcapture)
try:
self.state = 517
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.Identifier]:
self.enterOuterAlt(localctx, 1)
self.state = 512
self.match(CPP14Parser.Identifier)
self.state = 513
self.initializer()
pass
elif token in [CPP14Parser.And]:
self.enterOuterAlt(localctx, 2)
self.state = 514
self.match(CPP14Parser.And)
self.state = 515
self.match(CPP14Parser.Identifier)
self.state = 516
self.initializer()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LambdadeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def parameterdeclarationclause(self):
return self.getTypedRuleContext(CPP14Parser.ParameterdeclarationclauseContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def Mutable(self):
return self.getToken(CPP14Parser.Mutable, 0)
def exceptionspecification(self):
return self.getTypedRuleContext(CPP14Parser.ExceptionspecificationContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def trailingreturntype(self):
return self.getTypedRuleContext(CPP14Parser.TrailingreturntypeContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_lambdadeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLambdadeclarator" ):
listener.enterLambdadeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLambdadeclarator" ):
listener.exitLambdadeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLambdadeclarator" ):
return visitor.visitLambdadeclarator(self)
else:
return visitor.visitChildren(self)
def lambdadeclarator(self):
localctx = CPP14Parser.LambdadeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_lambdadeclarator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 519
self.match(CPP14Parser.LeftParen)
self.state = 520
self.parameterdeclarationclause()
self.state = 521
self.match(CPP14Parser.RightParen)
self.state = 523
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Mutable:
self.state = 522
self.match(CPP14Parser.Mutable)
self.state = 526
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Noexcept or _la==CPP14Parser.Throw:
self.state = 525
self.exceptionspecification()
self.state = 529
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 528
self.attributespecifierseq(0)
self.state = 532
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Arrow:
self.state = 531
self.trailingreturntype()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PostfixexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def primaryexpression(self):
return self.getTypedRuleContext(CPP14Parser.PrimaryexpressionContext,0)
def simpletypespecifier(self):
return self.getTypedRuleContext(CPP14Parser.SimpletypespecifierContext,0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def expressionlist(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionlistContext,0)
def typenamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.TypenamespecifierContext,0)
def bracedinitlist(self):
return self.getTypedRuleContext(CPP14Parser.BracedinitlistContext,0)
def Dynamic_cast(self):
return self.getToken(CPP14Parser.Dynamic_cast, 0)
def Less(self):
return self.getToken(CPP14Parser.Less, 0)
def thetypeid(self):
return self.getTypedRuleContext(CPP14Parser.ThetypeidContext,0)
def Greater(self):
return self.getToken(CPP14Parser.Greater, 0)
def expression(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionContext,0)
def Static_cast(self):
return self.getToken(CPP14Parser.Static_cast, 0)
def Reinterpret_cast(self):
return self.getToken(CPP14Parser.Reinterpret_cast, 0)
def Const_cast(self):
return self.getToken(CPP14Parser.Const_cast, 0)
def typeidofthetypeid(self):
return self.getTypedRuleContext(CPP14Parser.TypeidofthetypeidContext,0)
def postfixexpression(self):
return self.getTypedRuleContext(CPP14Parser.PostfixexpressionContext,0)
def LeftBracket(self):
return self.getToken(CPP14Parser.LeftBracket, 0)
def RightBracket(self):
return self.getToken(CPP14Parser.RightBracket, 0)
def Dot(self):
return self.getToken(CPP14Parser.Dot, 0)
def idexpression(self):
return self.getTypedRuleContext(CPP14Parser.IdexpressionContext,0)
def Template(self):
return self.getToken(CPP14Parser.Template, 0)
def Arrow(self):
return self.getToken(CPP14Parser.Arrow, 0)
def pseudodestructorname(self):
return self.getTypedRuleContext(CPP14Parser.PseudodestructornameContext,0)
def PlusPlus(self):
return self.getToken(CPP14Parser.PlusPlus, 0)
def MinusMinus(self):
return self.getToken(CPP14Parser.MinusMinus, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_postfixexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPostfixexpression" ):
listener.enterPostfixexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPostfixexpression" ):
listener.exitPostfixexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitPostfixexpression" ):
return visitor.visitPostfixexpression(self)
else:
return visitor.visitChildren(self)
def postfixexpression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.PostfixexpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 30
self.enterRecursionRule(localctx, 30, self.RULE_postfixexpression, _p)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 598
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,24,self._ctx)
if la_ == 1:
self.state = 535
self.primaryexpression()
pass
elif la_ == 2:
self.state = 536
self.simpletypespecifier()
self.state = 537
self.match(CPP14Parser.LeftParen)
self.state = 539
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.Throw - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.LeftBrace - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 538
self.expressionlist()
self.state = 541
self.match(CPP14Parser.RightParen)
pass
elif la_ == 3:
self.state = 543
self.typenamespecifier()
self.state = 544
self.match(CPP14Parser.LeftParen)
self.state = 546
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.Throw - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.LeftBrace - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 545
self.expressionlist()
self.state = 548
self.match(CPP14Parser.RightParen)
pass
elif la_ == 4:
self.state = 550
self.simpletypespecifier()
self.state = 551
self.bracedinitlist()
pass
elif la_ == 5:
self.state = 553
self.typenamespecifier()
self.state = 554
self.bracedinitlist()
pass
elif la_ == 6:
self.state = 556
self.match(CPP14Parser.Dynamic_cast)
self.state = 557
self.match(CPP14Parser.Less)
self.state = 558
self.thetypeid()
self.state = 559
self.match(CPP14Parser.Greater)
self.state = 560
self.match(CPP14Parser.LeftParen)
self.state = 561
self.expression(0)
self.state = 562
self.match(CPP14Parser.RightParen)
pass
elif la_ == 7:
self.state = 564
self.match(CPP14Parser.Static_cast)
self.state = 565
self.match(CPP14Parser.Less)
self.state = 566
self.thetypeid()
self.state = 567
self.match(CPP14Parser.Greater)
self.state = 568
self.match(CPP14Parser.LeftParen)
self.state = 569
self.expression(0)
self.state = 570
self.match(CPP14Parser.RightParen)
pass
elif la_ == 8:
self.state = 572
self.match(CPP14Parser.Reinterpret_cast)
self.state = 573
self.match(CPP14Parser.Less)
self.state = 574
self.thetypeid()
self.state = 575
self.match(CPP14Parser.Greater)
self.state = 576
self.match(CPP14Parser.LeftParen)
self.state = 577
self.expression(0)
self.state = 578
self.match(CPP14Parser.RightParen)
pass
elif la_ == 9:
self.state = 580
self.match(CPP14Parser.Const_cast)
self.state = 581
self.match(CPP14Parser.Less)
self.state = 582
self.thetypeid()
self.state = 583
self.match(CPP14Parser.Greater)
self.state = 584
self.match(CPP14Parser.LeftParen)
self.state = 585
self.expression(0)
self.state = 586
self.match(CPP14Parser.RightParen)
pass
elif la_ == 10:
self.state = 588
self.typeidofthetypeid()
self.state = 589
self.match(CPP14Parser.LeftParen)
self.state = 590
self.expression(0)
self.state = 591
self.match(CPP14Parser.RightParen)
pass
elif la_ == 11:
self.state = 593
self.typeidofthetypeid()
self.state = 594
self.match(CPP14Parser.LeftParen)
self.state = 595
self.thetypeid()
self.state = 596
self.match(CPP14Parser.RightParen)
pass
self._ctx.stop = self._input.LT(-1)
self.state = 640
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,29,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 638
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,28,self._ctx)
if la_ == 1:
localctx = CPP14Parser.PostfixexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_postfixexpression)
self.state = 600
if not self.precpred(self._ctx, 19):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 19)")
self.state = 601
self.match(CPP14Parser.LeftBracket)
self.state = 602
self.expression(0)
self.state = 603
self.match(CPP14Parser.RightBracket)
pass
elif la_ == 2:
localctx = CPP14Parser.PostfixexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_postfixexpression)
self.state = 605
if not self.precpred(self._ctx, 18):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 18)")
self.state = 606
self.match(CPP14Parser.LeftBracket)
self.state = 607
self.bracedinitlist()
self.state = 608
self.match(CPP14Parser.RightBracket)
pass
elif la_ == 3:
localctx = CPP14Parser.PostfixexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_postfixexpression)
self.state = 610
if not self.precpred(self._ctx, 17):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 17)")
self.state = 611
self.match(CPP14Parser.LeftParen)
self.state = 613
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.Throw - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.LeftBrace - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 612
self.expressionlist()
self.state = 615
self.match(CPP14Parser.RightParen)
pass
elif la_ == 4:
localctx = CPP14Parser.PostfixexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_postfixexpression)
self.state = 616
if not self.precpred(self._ctx, 12):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 12)")
self.state = 617
self.match(CPP14Parser.Dot)
self.state = 619
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Template:
self.state = 618
self.match(CPP14Parser.Template)
self.state = 621
self.idexpression()
pass
elif la_ == 5:
localctx = CPP14Parser.PostfixexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_postfixexpression)
self.state = 622
if not self.precpred(self._ctx, 11):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 11)")
self.state = 623
self.match(CPP14Parser.Arrow)
self.state = 625
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Template:
self.state = 624
self.match(CPP14Parser.Template)
self.state = 627
self.idexpression()
pass
elif la_ == 6:
localctx = CPP14Parser.PostfixexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_postfixexpression)
self.state = 628
if not self.precpred(self._ctx, 10):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 10)")
self.state = 629
self.match(CPP14Parser.Dot)
self.state = 630
self.pseudodestructorname()
pass
elif la_ == 7:
localctx = CPP14Parser.PostfixexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_postfixexpression)
self.state = 631
if not self.precpred(self._ctx, 9):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 9)")
self.state = 632
self.match(CPP14Parser.Arrow)
self.state = 633
self.pseudodestructorname()
pass
elif la_ == 8:
localctx = CPP14Parser.PostfixexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_postfixexpression)
self.state = 634
if not self.precpred(self._ctx, 8):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 8)")
self.state = 635
self.match(CPP14Parser.PlusPlus)
pass
elif la_ == 9:
localctx = CPP14Parser.PostfixexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_postfixexpression)
self.state = 636
if not self.precpred(self._ctx, 7):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 7)")
self.state = 637
self.match(CPP14Parser.MinusMinus)
pass
self.state = 642
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,29,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class TypeidofexprContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Typeid_(self):
return self.getToken(CPP14Parser.Typeid_, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_typeidofexpr
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeidofexpr" ):
listener.enterTypeidofexpr(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeidofexpr" ):
listener.exitTypeidofexpr(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeidofexpr" ):
return visitor.visitTypeidofexpr(self)
else:
return visitor.visitChildren(self)
def typeidofexpr(self):
localctx = CPP14Parser.TypeidofexprContext(self, self._ctx, self.state)
self.enterRule(localctx, 32, self.RULE_typeidofexpr)
try:
self.enterOuterAlt(localctx, 1)
self.state = 643
self.match(CPP14Parser.Typeid_)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeidofthetypeidContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Typeid_(self):
return self.getToken(CPP14Parser.Typeid_, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_typeidofthetypeid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeidofthetypeid" ):
listener.enterTypeidofthetypeid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeidofthetypeid" ):
listener.exitTypeidofthetypeid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeidofthetypeid" ):
return visitor.visitTypeidofthetypeid(self)
else:
return visitor.visitChildren(self)
def typeidofthetypeid(self):
localctx = CPP14Parser.TypeidofthetypeidContext(self, self._ctx, self.state)
self.enterRule(localctx, 34, self.RULE_typeidofthetypeid)
try:
self.enterOuterAlt(localctx, 1)
self.state = 645
self.match(CPP14Parser.Typeid_)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExpressionlistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def initializerlist(self):
return self.getTypedRuleContext(CPP14Parser.InitializerlistContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_expressionlist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExpressionlist" ):
listener.enterExpressionlist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExpressionlist" ):
listener.exitExpressionlist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExpressionlist" ):
return visitor.visitExpressionlist(self)
else:
return visitor.visitChildren(self)
def expressionlist(self):
localctx = CPP14Parser.ExpressionlistContext(self, self._ctx, self.state)
self.enterRule(localctx, 36, self.RULE_expressionlist)
try:
self.enterOuterAlt(localctx, 1)
self.state = 647
self.initializerlist(0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PseudodestructornameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thetypename(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(CPP14Parser.ThetypenameContext)
else:
return self.getTypedRuleContext(CPP14Parser.ThetypenameContext,i)
def Doublecolon(self):
return self.getToken(CPP14Parser.Doublecolon, 0)
def Tilde(self):
return self.getToken(CPP14Parser.Tilde, 0)
def nestednamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.NestednamespecifierContext,0)
def Template(self):
return self.getToken(CPP14Parser.Template, 0)
def simpletemplateid(self):
return self.getTypedRuleContext(CPP14Parser.SimpletemplateidContext,0)
def decltypespecifier(self):
return self.getTypedRuleContext(CPP14Parser.DecltypespecifierContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_pseudodestructorname
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPseudodestructorname" ):
listener.enterPseudodestructorname(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPseudodestructorname" ):
listener.exitPseudodestructorname(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitPseudodestructorname" ):
return visitor.visitPseudodestructorname(self)
else:
return visitor.visitChildren(self)
def pseudodestructorname(self):
localctx = CPP14Parser.PseudodestructornameContext(self, self._ctx, self.state)
self.enterRule(localctx, 38, self.RULE_pseudodestructorname)
self._la = 0 # Token type
try:
self.state = 671
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,32,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 650
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,30,self._ctx)
if la_ == 1:
self.state = 649
self.nestednamespecifier(0)
self.state = 652
self.thetypename()
self.state = 653
self.match(CPP14Parser.Doublecolon)
self.state = 654
self.match(CPP14Parser.Tilde)
self.state = 655
self.thetypename()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 657
self.nestednamespecifier(0)
self.state = 658
self.match(CPP14Parser.Template)
self.state = 659
self.simpletemplateid()
self.state = 660
self.match(CPP14Parser.Doublecolon)
self.state = 661
self.match(CPP14Parser.Tilde)
self.state = 662
self.thetypename()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 665
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Decltype or _la==CPP14Parser.Doublecolon or _la==CPP14Parser.Identifier:
self.state = 664
self.nestednamespecifier(0)
self.state = 667
self.match(CPP14Parser.Tilde)
self.state = 668
self.thetypename()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 669
self.match(CPP14Parser.Tilde)
self.state = 670
self.decltypespecifier()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UnaryexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def postfixexpression(self):
return self.getTypedRuleContext(CPP14Parser.PostfixexpressionContext,0)
def PlusPlus(self):
return self.getToken(CPP14Parser.PlusPlus, 0)
def castexpression(self):
return self.getTypedRuleContext(CPP14Parser.CastexpressionContext,0)
def MinusMinus(self):
return self.getToken(CPP14Parser.MinusMinus, 0)
def unaryoperator(self):
return self.getTypedRuleContext(CPP14Parser.UnaryoperatorContext,0)
def Sizeof(self):
return self.getToken(CPP14Parser.Sizeof, 0)
def unaryexpression(self):
return self.getTypedRuleContext(CPP14Parser.UnaryexpressionContext,0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def thetypeid(self):
return self.getTypedRuleContext(CPP14Parser.ThetypeidContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def Alignof(self):
return self.getToken(CPP14Parser.Alignof, 0)
def noexceptexpression(self):
return self.getTypedRuleContext(CPP14Parser.NoexceptexpressionContext,0)
def newexpression(self):
return self.getTypedRuleContext(CPP14Parser.NewexpressionContext,0)
def deleteexpression(self):
return self.getTypedRuleContext(CPP14Parser.DeleteexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_unaryexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUnaryexpression" ):
listener.enterUnaryexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUnaryexpression" ):
listener.exitUnaryexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitUnaryexpression" ):
return visitor.visitUnaryexpression(self)
else:
return visitor.visitChildren(self)
def unaryexpression(self):
localctx = CPP14Parser.UnaryexpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 40, self.RULE_unaryexpression)
try:
self.state = 701
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,33,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 673
self.postfixexpression(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 674
self.match(CPP14Parser.PlusPlus)
self.state = 675
self.castexpression()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 676
self.match(CPP14Parser.MinusMinus)
self.state = 677
self.castexpression()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 678
self.unaryoperator()
self.state = 679
self.castexpression()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 681
self.match(CPP14Parser.Sizeof)
self.state = 682
self.unaryexpression()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 683
self.match(CPP14Parser.Sizeof)
self.state = 684
self.match(CPP14Parser.LeftParen)
self.state = 685
self.thetypeid()
self.state = 686
self.match(CPP14Parser.RightParen)
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 688
self.match(CPP14Parser.Sizeof)
self.state = 689
self.match(CPP14Parser.Ellipsis)
self.state = 690
self.match(CPP14Parser.LeftParen)
self.state = 691
self.match(CPP14Parser.Identifier)
self.state = 692
self.match(CPP14Parser.RightParen)
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 693
self.match(CPP14Parser.Alignof)
self.state = 694
self.match(CPP14Parser.LeftParen)
self.state = 695
self.thetypeid()
self.state = 696
self.match(CPP14Parser.RightParen)
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 698
self.noexceptexpression()
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 699
self.newexpression()
pass
elif la_ == 11:
self.enterOuterAlt(localctx, 11)
self.state = 700
self.deleteexpression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UnaryoperatorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Or(self):
return self.getToken(CPP14Parser.Or, 0)
def Star(self):
return self.getToken(CPP14Parser.Star, 0)
def And(self):
return self.getToken(CPP14Parser.And, 0)
def Plus(self):
return self.getToken(CPP14Parser.Plus, 0)
def Tilde(self):
return self.getToken(CPP14Parser.Tilde, 0)
def Minus(self):
return self.getToken(CPP14Parser.Minus, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_unaryoperator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUnaryoperator" ):
listener.enterUnaryoperator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUnaryoperator" ):
listener.exitUnaryoperator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitUnaryoperator" ):
return visitor.visitUnaryoperator(self)
else:
return visitor.visitChildren(self)
def unaryoperator(self):
localctx = CPP14Parser.UnaryoperatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 42, self.RULE_unaryoperator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 703
_la = self._input.LA(1)
if not(_la==CPP14Parser.T__0 or _la==CPP14Parser.T__1 or ((((_la - 90)) & ~0x3f) == 0 and ((1 << (_la - 90)) & ((1 << (CPP14Parser.Plus - 90)) | (1 << (CPP14Parser.Minus - 90)) | (1 << (CPP14Parser.Star - 90)) | (1 << (CPP14Parser.And - 90)) | (1 << (CPP14Parser.Or - 90)) | (1 << (CPP14Parser.Tilde - 90)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NewexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def New(self):
return self.getToken(CPP14Parser.New, 0)
def newtypeid(self):
return self.getTypedRuleContext(CPP14Parser.NewtypeidContext,0)
def Doublecolon(self):
return self.getToken(CPP14Parser.Doublecolon, 0)
def newplacement(self):
return self.getTypedRuleContext(CPP14Parser.NewplacementContext,0)
def newinitializer(self):
return self.getTypedRuleContext(CPP14Parser.NewinitializerContext,0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def thetypeid(self):
return self.getTypedRuleContext(CPP14Parser.ThetypeidContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_newexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNewexpression" ):
listener.enterNewexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNewexpression" ):
listener.exitNewexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNewexpression" ):
return visitor.visitNewexpression(self)
else:
return visitor.visitChildren(self)
def newexpression(self):
localctx = CPP14Parser.NewexpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 44, self.RULE_newexpression)
self._la = 0 # Token type
try:
self.state = 729
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,40,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 706
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Doublecolon:
self.state = 705
self.match(CPP14Parser.Doublecolon)
self.state = 708
self.match(CPP14Parser.New)
self.state = 710
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.LeftParen:
self.state = 709
self.newplacement()
self.state = 712
self.newtypeid()
self.state = 714
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,36,self._ctx)
if la_ == 1:
self.state = 713
self.newinitializer()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 717
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Doublecolon:
self.state = 716
self.match(CPP14Parser.Doublecolon)
self.state = 719
self.match(CPP14Parser.New)
self.state = 721
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,38,self._ctx)
if la_ == 1:
self.state = 720
self.newplacement()
self.state = 723
self.match(CPP14Parser.LeftParen)
self.state = 724
self.thetypeid()
self.state = 725
self.match(CPP14Parser.RightParen)
self.state = 727
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,39,self._ctx)
if la_ == 1:
self.state = 726
self.newinitializer()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NewplacementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def expressionlist(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionlistContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_newplacement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNewplacement" ):
listener.enterNewplacement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNewplacement" ):
listener.exitNewplacement(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNewplacement" ):
return visitor.visitNewplacement(self)
else:
return visitor.visitChildren(self)
def newplacement(self):
localctx = CPP14Parser.NewplacementContext(self, self._ctx, self.state)
self.enterRule(localctx, 46, self.RULE_newplacement)
try:
self.enterOuterAlt(localctx, 1)
self.state = 731
self.match(CPP14Parser.LeftParen)
self.state = 732
self.expressionlist()
self.state = 733
self.match(CPP14Parser.RightParen)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NewtypeidContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.TypespecifierseqContext,0)
def newdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.NewdeclaratorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_newtypeid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNewtypeid" ):
listener.enterNewtypeid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNewtypeid" ):
listener.exitNewtypeid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNewtypeid" ):
return visitor.visitNewtypeid(self)
else:
return visitor.visitChildren(self)
def newtypeid(self):
localctx = CPP14Parser.NewtypeidContext(self, self._ctx, self.state)
self.enterRule(localctx, 48, self.RULE_newtypeid)
try:
self.enterOuterAlt(localctx, 1)
self.state = 735
self.typespecifierseq()
self.state = 737
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,41,self._ctx)
if la_ == 1:
self.state = 736
self.newdeclarator()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NewdeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ptroperator(self):
return self.getTypedRuleContext(CPP14Parser.PtroperatorContext,0)
def newdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.NewdeclaratorContext,0)
def noptrnewdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.NoptrnewdeclaratorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_newdeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNewdeclarator" ):
listener.enterNewdeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNewdeclarator" ):
listener.exitNewdeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNewdeclarator" ):
return visitor.visitNewdeclarator(self)
else:
return visitor.visitChildren(self)
def newdeclarator(self):
localctx = CPP14Parser.NewdeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 50, self.RULE_newdeclarator)
try:
self.state = 744
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.T__2, CPP14Parser.Decltype, CPP14Parser.Star, CPP14Parser.And, CPP14Parser.Doublecolon, CPP14Parser.Identifier]:
self.enterOuterAlt(localctx, 1)
self.state = 739
self.ptroperator()
self.state = 741
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,42,self._ctx)
if la_ == 1:
self.state = 740
self.newdeclarator()
pass
elif token in [CPP14Parser.LeftBracket]:
self.enterOuterAlt(localctx, 2)
self.state = 743
self.noptrnewdeclarator(0)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NoptrnewdeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LeftBracket(self):
return self.getToken(CPP14Parser.LeftBracket, 0)
def expression(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionContext,0)
def RightBracket(self):
return self.getToken(CPP14Parser.RightBracket, 0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def noptrnewdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.NoptrnewdeclaratorContext,0)
def constantexpression(self):
return self.getTypedRuleContext(CPP14Parser.ConstantexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_noptrnewdeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNoptrnewdeclarator" ):
listener.enterNoptrnewdeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNoptrnewdeclarator" ):
listener.exitNoptrnewdeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNoptrnewdeclarator" ):
return visitor.visitNoptrnewdeclarator(self)
else:
return visitor.visitChildren(self)
def noptrnewdeclarator(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.NoptrnewdeclaratorContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 52
self.enterRecursionRule(localctx, 52, self.RULE_noptrnewdeclarator, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 747
self.match(CPP14Parser.LeftBracket)
self.state = 748
self.expression(0)
self.state = 749
self.match(CPP14Parser.RightBracket)
self.state = 751
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,44,self._ctx)
if la_ == 1:
self.state = 750
self.attributespecifierseq(0)
self._ctx.stop = self._input.LT(-1)
self.state = 762
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,46,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.NoptrnewdeclaratorContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_noptrnewdeclarator)
self.state = 753
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 754
self.match(CPP14Parser.LeftBracket)
self.state = 755
self.constantexpression()
self.state = 756
self.match(CPP14Parser.RightBracket)
self.state = 758
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,45,self._ctx)
if la_ == 1:
self.state = 757
self.attributespecifierseq(0)
self.state = 764
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,46,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class NewinitializerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def expressionlist(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionlistContext,0)
def bracedinitlist(self):
return self.getTypedRuleContext(CPP14Parser.BracedinitlistContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_newinitializer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNewinitializer" ):
listener.enterNewinitializer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNewinitializer" ):
listener.exitNewinitializer(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNewinitializer" ):
return visitor.visitNewinitializer(self)
else:
return visitor.visitChildren(self)
def newinitializer(self):
localctx = CPP14Parser.NewinitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 54, self.RULE_newinitializer)
self._la = 0 # Token type
try:
self.state = 771
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.LeftParen]:
self.enterOuterAlt(localctx, 1)
self.state = 765
self.match(CPP14Parser.LeftParen)
self.state = 767
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.Throw - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.LeftBrace - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 766
self.expressionlist()
self.state = 769
self.match(CPP14Parser.RightParen)
pass
elif token in [CPP14Parser.LeftBrace]:
self.enterOuterAlt(localctx, 2)
self.state = 770
self.bracedinitlist()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeleteexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Delete(self):
return self.getToken(CPP14Parser.Delete, 0)
def castexpression(self):
return self.getTypedRuleContext(CPP14Parser.CastexpressionContext,0)
def Doublecolon(self):
return self.getToken(CPP14Parser.Doublecolon, 0)
def LeftBracket(self):
return self.getToken(CPP14Parser.LeftBracket, 0)
def RightBracket(self):
return self.getToken(CPP14Parser.RightBracket, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_deleteexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeleteexpression" ):
listener.enterDeleteexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeleteexpression" ):
listener.exitDeleteexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitDeleteexpression" ):
return visitor.visitDeleteexpression(self)
else:
return visitor.visitChildren(self)
def deleteexpression(self):
localctx = CPP14Parser.DeleteexpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 56, self.RULE_deleteexpression)
self._la = 0 # Token type
try:
self.state = 785
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,51,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 774
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Doublecolon:
self.state = 773
self.match(CPP14Parser.Doublecolon)
self.state = 776
self.match(CPP14Parser.Delete)
self.state = 777
self.castexpression()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 779
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Doublecolon:
self.state = 778
self.match(CPP14Parser.Doublecolon)
self.state = 781
self.match(CPP14Parser.Delete)
self.state = 782
self.match(CPP14Parser.LeftBracket)
self.state = 783
self.match(CPP14Parser.RightBracket)
self.state = 784
self.castexpression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NoexceptexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Noexcept(self):
return self.getToken(CPP14Parser.Noexcept, 0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def expression(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_noexceptexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNoexceptexpression" ):
listener.enterNoexceptexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNoexceptexpression" ):
listener.exitNoexceptexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNoexceptexpression" ):
return visitor.visitNoexceptexpression(self)
else:
return visitor.visitChildren(self)
def noexceptexpression(self):
localctx = CPP14Parser.NoexceptexpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 58, self.RULE_noexceptexpression)
try:
self.enterOuterAlt(localctx, 1)
self.state = 787
self.match(CPP14Parser.Noexcept)
self.state = 788
self.match(CPP14Parser.LeftParen)
self.state = 789
self.expression(0)
self.state = 790
self.match(CPP14Parser.RightParen)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CastexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unaryexpression(self):
return self.getTypedRuleContext(CPP14Parser.UnaryexpressionContext,0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def thetypeid(self):
return self.getTypedRuleContext(CPP14Parser.ThetypeidContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def castexpression(self):
return self.getTypedRuleContext(CPP14Parser.CastexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_castexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCastexpression" ):
listener.enterCastexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCastexpression" ):
listener.exitCastexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCastexpression" ):
return visitor.visitCastexpression(self)
else:
return visitor.visitChildren(self)
def castexpression(self):
localctx = CPP14Parser.CastexpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 60, self.RULE_castexpression)
try:
self.state = 798
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,52,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 792
self.unaryexpression()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 793
self.match(CPP14Parser.LeftParen)
self.state = 794
self.thetypeid()
self.state = 795
self.match(CPP14Parser.RightParen)
self.state = 796
self.castexpression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PmexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def castexpression(self):
return self.getTypedRuleContext(CPP14Parser.CastexpressionContext,0)
def pmexpression(self):
return self.getTypedRuleContext(CPP14Parser.PmexpressionContext,0)
def DotStar(self):
return self.getToken(CPP14Parser.DotStar, 0)
def ArrowStar(self):
return self.getToken(CPP14Parser.ArrowStar, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_pmexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPmexpression" ):
listener.enterPmexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPmexpression" ):
listener.exitPmexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitPmexpression" ):
return visitor.visitPmexpression(self)
else:
return visitor.visitChildren(self)
def pmexpression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.PmexpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 62
self.enterRecursionRule(localctx, 62, self.RULE_pmexpression, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 801
self.castexpression()
self._ctx.stop = self._input.LT(-1)
self.state = 811
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,54,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 809
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,53,self._ctx)
if la_ == 1:
localctx = CPP14Parser.PmexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_pmexpression)
self.state = 803
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 804
self.match(CPP14Parser.DotStar)
self.state = 805
self.castexpression()
pass
elif la_ == 2:
localctx = CPP14Parser.PmexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_pmexpression)
self.state = 806
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 807
self.match(CPP14Parser.ArrowStar)
self.state = 808
self.castexpression()
pass
self.state = 813
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,54,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class MultiplicativeexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def pmexpression(self):
return self.getTypedRuleContext(CPP14Parser.PmexpressionContext,0)
def multiplicativeexpression(self):
return self.getTypedRuleContext(CPP14Parser.MultiplicativeexpressionContext,0)
def Star(self):
return self.getToken(CPP14Parser.Star, 0)
def Div(self):
return self.getToken(CPP14Parser.Div, 0)
def Mod(self):
return self.getToken(CPP14Parser.Mod, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_multiplicativeexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMultiplicativeexpression" ):
listener.enterMultiplicativeexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMultiplicativeexpression" ):
listener.exitMultiplicativeexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitMultiplicativeexpression" ):
return visitor.visitMultiplicativeexpression(self)
else:
return visitor.visitChildren(self)
def multiplicativeexpression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.MultiplicativeexpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 64
self.enterRecursionRule(localctx, 64, self.RULE_multiplicativeexpression, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 815
self.pmexpression(0)
self._ctx.stop = self._input.LT(-1)
self.state = 828
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,56,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 826
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,55,self._ctx)
if la_ == 1:
localctx = CPP14Parser.MultiplicativeexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_multiplicativeexpression)
self.state = 817
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 818
self.match(CPP14Parser.Star)
self.state = 819
self.pmexpression(0)
pass
elif la_ == 2:
localctx = CPP14Parser.MultiplicativeexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_multiplicativeexpression)
self.state = 820
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 821
self.match(CPP14Parser.Div)
self.state = 822
self.pmexpression(0)
pass
elif la_ == 3:
localctx = CPP14Parser.MultiplicativeexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_multiplicativeexpression)
self.state = 823
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 824
self.match(CPP14Parser.Mod)
self.state = 825
self.pmexpression(0)
pass
self.state = 830
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,56,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class AdditiveexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def multiplicativeexpression(self):
return self.getTypedRuleContext(CPP14Parser.MultiplicativeexpressionContext,0)
def additiveexpression(self):
return self.getTypedRuleContext(CPP14Parser.AdditiveexpressionContext,0)
def Plus(self):
return self.getToken(CPP14Parser.Plus, 0)
def Minus(self):
return self.getToken(CPP14Parser.Minus, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_additiveexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAdditiveexpression" ):
listener.enterAdditiveexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAdditiveexpression" ):
listener.exitAdditiveexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAdditiveexpression" ):
return visitor.visitAdditiveexpression(self)
else:
return visitor.visitChildren(self)
def additiveexpression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.AdditiveexpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 66
self.enterRecursionRule(localctx, 66, self.RULE_additiveexpression, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 832
self.multiplicativeexpression(0)
self._ctx.stop = self._input.LT(-1)
self.state = 842
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,58,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 840
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,57,self._ctx)
if la_ == 1:
localctx = CPP14Parser.AdditiveexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_additiveexpression)
self.state = 834
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 835
self.match(CPP14Parser.Plus)
self.state = 836
self.multiplicativeexpression(0)
pass
elif la_ == 2:
localctx = CPP14Parser.AdditiveexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_additiveexpression)
self.state = 837
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 838
self.match(CPP14Parser.Minus)
self.state = 839
self.multiplicativeexpression(0)
pass
self.state = 844
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,58,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class ShiftexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def additiveexpression(self):
return self.getTypedRuleContext(CPP14Parser.AdditiveexpressionContext,0)
def shiftexpression(self):
return self.getTypedRuleContext(CPP14Parser.ShiftexpressionContext,0)
def shiftoperator(self):
return self.getTypedRuleContext(CPP14Parser.ShiftoperatorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_shiftexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterShiftexpression" ):
listener.enterShiftexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitShiftexpression" ):
listener.exitShiftexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitShiftexpression" ):
return visitor.visitShiftexpression(self)
else:
return visitor.visitChildren(self)
def shiftexpression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.ShiftexpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 68
self.enterRecursionRule(localctx, 68, self.RULE_shiftexpression, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 846
self.additiveexpression(0)
self._ctx.stop = self._input.LT(-1)
self.state = 854
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,59,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.ShiftexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_shiftexpression)
self.state = 848
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 849
self.shiftoperator()
self.state = 850
self.additiveexpression(0)
self.state = 856
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,59,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class ShiftoperatorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def RightShift(self):
return self.getToken(CPP14Parser.RightShift, 0)
def LeftShift(self):
return self.getToken(CPP14Parser.LeftShift, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_shiftoperator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterShiftoperator" ):
listener.enterShiftoperator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitShiftoperator" ):
listener.exitShiftoperator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitShiftoperator" ):
return visitor.visitShiftoperator(self)
else:
return visitor.visitChildren(self)
def shiftoperator(self):
localctx = CPP14Parser.ShiftoperatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 70, self.RULE_shiftoperator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 857
_la = self._input.LA(1)
if not(_la==CPP14Parser.LeftShift or _la==CPP14Parser.RightShift):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class RelationalexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def shiftexpression(self):
return self.getTypedRuleContext(CPP14Parser.ShiftexpressionContext,0)
def relationalexpression(self):
return self.getTypedRuleContext(CPP14Parser.RelationalexpressionContext,0)
def Less(self):
return self.getToken(CPP14Parser.Less, 0)
def Greater(self):
return self.getToken(CPP14Parser.Greater, 0)
def LessEqual(self):
return self.getToken(CPP14Parser.LessEqual, 0)
def GreaterEqual(self):
return self.getToken(CPP14Parser.GreaterEqual, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_relationalexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterRelationalexpression" ):
listener.enterRelationalexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitRelationalexpression" ):
listener.exitRelationalexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitRelationalexpression" ):
return visitor.visitRelationalexpression(self)
else:
return visitor.visitChildren(self)
def relationalexpression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.RelationalexpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 72
self.enterRecursionRule(localctx, 72, self.RULE_relationalexpression, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 860
self.shiftexpression(0)
self._ctx.stop = self._input.LT(-1)
self.state = 876
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,61,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 874
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,60,self._ctx)
if la_ == 1:
localctx = CPP14Parser.RelationalexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_relationalexpression)
self.state = 862
if not self.precpred(self._ctx, 4):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 4)")
self.state = 863
self.match(CPP14Parser.Less)
self.state = 864
self.shiftexpression(0)
pass
elif la_ == 2:
localctx = CPP14Parser.RelationalexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_relationalexpression)
self.state = 865
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 866
self.match(CPP14Parser.Greater)
self.state = 867
self.shiftexpression(0)
pass
elif la_ == 3:
localctx = CPP14Parser.RelationalexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_relationalexpression)
self.state = 868
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 869
self.match(CPP14Parser.LessEqual)
self.state = 870
self.shiftexpression(0)
pass
elif la_ == 4:
localctx = CPP14Parser.RelationalexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_relationalexpression)
self.state = 871
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 872
self.match(CPP14Parser.GreaterEqual)
self.state = 873
self.shiftexpression(0)
pass
self.state = 878
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,61,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class EqualityexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def relationalexpression(self):
return self.getTypedRuleContext(CPP14Parser.RelationalexpressionContext,0)
def equalityexpression(self):
return self.getTypedRuleContext(CPP14Parser.EqualityexpressionContext,0)
def Equal(self):
return self.getToken(CPP14Parser.Equal, 0)
def NotEqual(self):
return self.getToken(CPP14Parser.NotEqual, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_equalityexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEqualityexpression" ):
listener.enterEqualityexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEqualityexpression" ):
listener.exitEqualityexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEqualityexpression" ):
return visitor.visitEqualityexpression(self)
else:
return visitor.visitChildren(self)
def equalityexpression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.EqualityexpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 74
self.enterRecursionRule(localctx, 74, self.RULE_equalityexpression, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 880
self.relationalexpression(0)
self._ctx.stop = self._input.LT(-1)
self.state = 890
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,63,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 888
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,62,self._ctx)
if la_ == 1:
localctx = CPP14Parser.EqualityexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_equalityexpression)
self.state = 882
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 883
self.match(CPP14Parser.Equal)
self.state = 884
self.relationalexpression(0)
pass
elif la_ == 2:
localctx = CPP14Parser.EqualityexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_equalityexpression)
self.state = 885
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 886
self.match(CPP14Parser.NotEqual)
self.state = 887
self.relationalexpression(0)
pass
self.state = 892
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,63,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class AndexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def equalityexpression(self):
return self.getTypedRuleContext(CPP14Parser.EqualityexpressionContext,0)
def andexpression(self):
return self.getTypedRuleContext(CPP14Parser.AndexpressionContext,0)
def And(self):
return self.getToken(CPP14Parser.And, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_andexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAndexpression" ):
listener.enterAndexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAndexpression" ):
listener.exitAndexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAndexpression" ):
return visitor.visitAndexpression(self)
else:
return visitor.visitChildren(self)
def andexpression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.AndexpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 76
self.enterRecursionRule(localctx, 76, self.RULE_andexpression, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 894
self.equalityexpression(0)
self._ctx.stop = self._input.LT(-1)
self.state = 901
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,64,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.AndexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_andexpression)
self.state = 896
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 897
self.match(CPP14Parser.And)
self.state = 898
self.equalityexpression(0)
self.state = 903
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,64,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class ExclusiveorexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def andexpression(self):
return self.getTypedRuleContext(CPP14Parser.AndexpressionContext,0)
def exclusiveorexpression(self):
return self.getTypedRuleContext(CPP14Parser.ExclusiveorexpressionContext,0)
def Caret(self):
return self.getToken(CPP14Parser.Caret, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_exclusiveorexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExclusiveorexpression" ):
listener.enterExclusiveorexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExclusiveorexpression" ):
listener.exitExclusiveorexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExclusiveorexpression" ):
return visitor.visitExclusiveorexpression(self)
else:
return visitor.visitChildren(self)
def exclusiveorexpression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.ExclusiveorexpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 78
self.enterRecursionRule(localctx, 78, self.RULE_exclusiveorexpression, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 905
self.andexpression(0)
self._ctx.stop = self._input.LT(-1)
self.state = 912
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,65,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.ExclusiveorexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_exclusiveorexpression)
self.state = 907
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 908
self.match(CPP14Parser.Caret)
self.state = 909
self.andexpression(0)
self.state = 914
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,65,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class InclusiveorexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def exclusiveorexpression(self):
return self.getTypedRuleContext(CPP14Parser.ExclusiveorexpressionContext,0)
def inclusiveorexpression(self):
return self.getTypedRuleContext(CPP14Parser.InclusiveorexpressionContext,0)
def Or(self):
return self.getToken(CPP14Parser.Or, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_inclusiveorexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInclusiveorexpression" ):
listener.enterInclusiveorexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInclusiveorexpression" ):
listener.exitInclusiveorexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInclusiveorexpression" ):
return visitor.visitInclusiveorexpression(self)
else:
return visitor.visitChildren(self)
def inclusiveorexpression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.InclusiveorexpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 80
self.enterRecursionRule(localctx, 80, self.RULE_inclusiveorexpression, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 916
self.exclusiveorexpression(0)
self._ctx.stop = self._input.LT(-1)
self.state = 923
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,66,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.InclusiveorexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_inclusiveorexpression)
self.state = 918
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 919
self.match(CPP14Parser.Or)
self.state = 920
self.exclusiveorexpression(0)
self.state = 925
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,66,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class LogicalandexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def inclusiveorexpression(self):
return self.getTypedRuleContext(CPP14Parser.InclusiveorexpressionContext,0)
def logicalandexpression(self):
return self.getTypedRuleContext(CPP14Parser.LogicalandexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_logicalandexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLogicalandexpression" ):
listener.enterLogicalandexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLogicalandexpression" ):
listener.exitLogicalandexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLogicalandexpression" ):
return visitor.visitLogicalandexpression(self)
else:
return visitor.visitChildren(self)
def logicalandexpression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.LogicalandexpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 82
self.enterRecursionRule(localctx, 82, self.RULE_logicalandexpression, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 927
self.inclusiveorexpression(0)
self._ctx.stop = self._input.LT(-1)
self.state = 937
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,68,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 935
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,67,self._ctx)
if la_ == 1:
localctx = CPP14Parser.LogicalandexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_logicalandexpression)
self.state = 929
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 930
self.match(CPP14Parser.T__2)
self.state = 931
self.inclusiveorexpression(0)
pass
elif la_ == 2:
localctx = CPP14Parser.LogicalandexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_logicalandexpression)
self.state = 932
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 933
self.match(CPP14Parser.T__3)
self.state = 934
self.inclusiveorexpression(0)
pass
self.state = 939
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,68,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class LogicalorexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def logicalandexpression(self):
return self.getTypedRuleContext(CPP14Parser.LogicalandexpressionContext,0)
def logicalorexpression(self):
return self.getTypedRuleContext(CPP14Parser.LogicalorexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_logicalorexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLogicalorexpression" ):
listener.enterLogicalorexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLogicalorexpression" ):
listener.exitLogicalorexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLogicalorexpression" ):
return visitor.visitLogicalorexpression(self)
else:
return visitor.visitChildren(self)
def logicalorexpression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.LogicalorexpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 84
self.enterRecursionRule(localctx, 84, self.RULE_logicalorexpression, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 941
self.logicalandexpression(0)
self._ctx.stop = self._input.LT(-1)
self.state = 951
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,70,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 949
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,69,self._ctx)
if la_ == 1:
localctx = CPP14Parser.LogicalorexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_logicalorexpression)
self.state = 943
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 944
self.match(CPP14Parser.T__4)
self.state = 945
self.logicalandexpression(0)
pass
elif la_ == 2:
localctx = CPP14Parser.LogicalorexpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_logicalorexpression)
self.state = 946
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 947
self.match(CPP14Parser.T__5)
self.state = 948
self.logicalandexpression(0)
pass
self.state = 953
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,70,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class ConditionalexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def logicalorexpression(self):
return self.getTypedRuleContext(CPP14Parser.LogicalorexpressionContext,0)
def Question(self):
return self.getToken(CPP14Parser.Question, 0)
def expression(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionContext,0)
def Colon(self):
return self.getToken(CPP14Parser.Colon, 0)
def assignmentexpression(self):
return self.getTypedRuleContext(CPP14Parser.AssignmentexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_conditionalexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterConditionalexpression" ):
listener.enterConditionalexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitConditionalexpression" ):
listener.exitConditionalexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitConditionalexpression" ):
return visitor.visitConditionalexpression(self)
else:
return visitor.visitChildren(self)
def conditionalexpression(self):
localctx = CPP14Parser.ConditionalexpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 86, self.RULE_conditionalexpression)
try:
self.state = 961
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,71,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 954
self.logicalorexpression(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 955
self.logicalorexpression(0)
self.state = 956
self.match(CPP14Parser.Question)
self.state = 957
self.expression(0)
self.state = 958
self.match(CPP14Parser.Colon)
self.state = 959
self.assignmentexpression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AssignmentexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def conditionalexpression(self):
return self.getTypedRuleContext(CPP14Parser.ConditionalexpressionContext,0)
def logicalorexpression(self):
return self.getTypedRuleContext(CPP14Parser.LogicalorexpressionContext,0)
def assignmentoperator(self):
return self.getTypedRuleContext(CPP14Parser.AssignmentoperatorContext,0)
def initializerclause(self):
return self.getTypedRuleContext(CPP14Parser.InitializerclauseContext,0)
def throwexpression(self):
return self.getTypedRuleContext(CPP14Parser.ThrowexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_assignmentexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAssignmentexpression" ):
listener.enterAssignmentexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAssignmentexpression" ):
listener.exitAssignmentexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAssignmentexpression" ):
return visitor.visitAssignmentexpression(self)
else:
return visitor.visitChildren(self)
def assignmentexpression(self):
localctx = CPP14Parser.AssignmentexpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 88, self.RULE_assignmentexpression)
try:
self.state = 969
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,72,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 963
self.conditionalexpression()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 964
self.logicalorexpression(0)
self.state = 965
self.assignmentoperator()
self.state = 966
self.initializerclause()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 968
self.throwexpression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AssignmentoperatorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Assign(self):
return self.getToken(CPP14Parser.Assign, 0)
def StarAssign(self):
return self.getToken(CPP14Parser.StarAssign, 0)
def DivAssign(self):
return self.getToken(CPP14Parser.DivAssign, 0)
def ModAssign(self):
return self.getToken(CPP14Parser.ModAssign, 0)
def PlusAssign(self):
return self.getToken(CPP14Parser.PlusAssign, 0)
def MinusAssign(self):
return self.getToken(CPP14Parser.MinusAssign, 0)
def RightShiftAssign(self):
return self.getToken(CPP14Parser.RightShiftAssign, 0)
def LeftShiftAssign(self):
return self.getToken(CPP14Parser.LeftShiftAssign, 0)
def AndAssign(self):
return self.getToken(CPP14Parser.AndAssign, 0)
def XorAssign(self):
return self.getToken(CPP14Parser.XorAssign, 0)
def OrAssign(self):
return self.getToken(CPP14Parser.OrAssign, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_assignmentoperator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAssignmentoperator" ):
listener.enterAssignmentoperator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAssignmentoperator" ):
listener.exitAssignmentoperator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAssignmentoperator" ):
return visitor.visitAssignmentoperator(self)
else:
return visitor.visitChildren(self)
def assignmentoperator(self):
localctx = CPP14Parser.AssignmentoperatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 90, self.RULE_assignmentoperator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 971
_la = self._input.LA(1)
if not(((((_la - 100)) & ~0x3f) == 0 and ((1 << (_la - 100)) & ((1 << (CPP14Parser.Assign - 100)) | (1 << (CPP14Parser.PlusAssign - 100)) | (1 << (CPP14Parser.MinusAssign - 100)) | (1 << (CPP14Parser.StarAssign - 100)) | (1 << (CPP14Parser.DivAssign - 100)) | (1 << (CPP14Parser.ModAssign - 100)) | (1 << (CPP14Parser.XorAssign - 100)) | (1 << (CPP14Parser.AndAssign - 100)) | (1 << (CPP14Parser.OrAssign - 100)) | (1 << (CPP14Parser.LeftShiftAssign - 100)) | (1 << (CPP14Parser.RightShiftAssign - 100)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def assignmentexpression(self):
return self.getTypedRuleContext(CPP14Parser.AssignmentexpressionContext,0)
def expression(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_expression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExpression" ):
listener.enterExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExpression" ):
listener.exitExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExpression" ):
return visitor.visitExpression(self)
else:
return visitor.visitChildren(self)
def expression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.ExpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 92
self.enterRecursionRule(localctx, 92, self.RULE_expression, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 974
self.assignmentexpression()
self._ctx.stop = self._input.LT(-1)
self.state = 981
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,73,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 976
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 977
self.match(CPP14Parser.Comma)
self.state = 978
self.assignmentexpression()
self.state = 983
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,73,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class ConstantexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def conditionalexpression(self):
return self.getTypedRuleContext(CPP14Parser.ConditionalexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_constantexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterConstantexpression" ):
listener.enterConstantexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitConstantexpression" ):
listener.exitConstantexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitConstantexpression" ):
return visitor.visitConstantexpression(self)
else:
return visitor.visitChildren(self)
def constantexpression(self):
localctx = CPP14Parser.ConstantexpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 94, self.RULE_constantexpression)
try:
self.enterOuterAlt(localctx, 1)
self.state = 984
self.conditionalexpression()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def labeledstatement(self):
return self.getTypedRuleContext(CPP14Parser.LabeledstatementContext,0)
def expressionstatement(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionstatementContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def compoundstatement(self):
return self.getTypedRuleContext(CPP14Parser.CompoundstatementContext,0)
def selectionstatement(self):
return self.getTypedRuleContext(CPP14Parser.SelectionstatementContext,0)
def iterationstatement(self):
return self.getTypedRuleContext(CPP14Parser.IterationstatementContext,0)
def jumpstatement(self):
return self.getTypedRuleContext(CPP14Parser.JumpstatementContext,0)
def declarationstatement(self):
return self.getTypedRuleContext(CPP14Parser.DeclarationstatementContext,0)
def tryblock(self):
return self.getTypedRuleContext(CPP14Parser.TryblockContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_statement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStatement" ):
listener.enterStatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStatement" ):
listener.exitStatement(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitStatement" ):
return visitor.visitStatement(self)
else:
return visitor.visitChildren(self)
def statement(self):
localctx = CPP14Parser.StatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 96, self.RULE_statement)
self._la = 0 # Token type
try:
self.state = 1012
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,80,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 986
self.labeledstatement()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 988
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,74,self._ctx)
if la_ == 1:
self.state = 987
self.attributespecifierseq(0)
self.state = 990
self.expressionstatement()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 992
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 991
self.attributespecifierseq(0)
self.state = 994
self.compoundstatement()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 996
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 995
self.attributespecifierseq(0)
self.state = 998
self.selectionstatement()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1000
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 999
self.attributespecifierseq(0)
self.state = 1002
self.iterationstatement()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 1004
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1003
self.attributespecifierseq(0)
self.state = 1006
self.jumpstatement()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 1007
self.declarationstatement()
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 1009
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1008
self.attributespecifierseq(0)
self.state = 1011
self.tryblock()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LabeledstatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def Colon(self):
return self.getToken(CPP14Parser.Colon, 0)
def statement(self):
return self.getTypedRuleContext(CPP14Parser.StatementContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def Case(self):
return self.getToken(CPP14Parser.Case, 0)
def constantexpression(self):
return self.getTypedRuleContext(CPP14Parser.ConstantexpressionContext,0)
def Default(self):
return self.getToken(CPP14Parser.Default, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_labeledstatement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLabeledstatement" ):
listener.enterLabeledstatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLabeledstatement" ):
listener.exitLabeledstatement(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLabeledstatement" ):
return visitor.visitLabeledstatement(self)
else:
return visitor.visitChildren(self)
def labeledstatement(self):
localctx = CPP14Parser.LabeledstatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 98, self.RULE_labeledstatement)
self._la = 0 # Token type
try:
self.state = 1034
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,84,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1015
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1014
self.attributespecifierseq(0)
self.state = 1017
self.match(CPP14Parser.Identifier)
self.state = 1018
self.match(CPP14Parser.Colon)
self.state = 1019
self.statement()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1021
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1020
self.attributespecifierseq(0)
self.state = 1023
self.match(CPP14Parser.Case)
self.state = 1024
self.constantexpression()
self.state = 1025
self.match(CPP14Parser.Colon)
self.state = 1026
self.statement()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1029
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1028
self.attributespecifierseq(0)
self.state = 1031
self.match(CPP14Parser.Default)
self.state = 1032
self.match(CPP14Parser.Colon)
self.state = 1033
self.statement()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExpressionstatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def expression(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_expressionstatement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExpressionstatement" ):
listener.enterExpressionstatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExpressionstatement" ):
listener.exitExpressionstatement(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExpressionstatement" ):
return visitor.visitExpressionstatement(self)
else:
return visitor.visitChildren(self)
def expressionstatement(self):
localctx = CPP14Parser.ExpressionstatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 100, self.RULE_expressionstatement)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1037
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.Throw - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 1036
self.expression(0)
self.state = 1039
self.match(CPP14Parser.Semi)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CompoundstatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LeftBrace(self):
return self.getToken(CPP14Parser.LeftBrace, 0)
def RightBrace(self):
return self.getToken(CPP14Parser.RightBrace, 0)
def statementseq(self):
return self.getTypedRuleContext(CPP14Parser.StatementseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_compoundstatement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCompoundstatement" ):
listener.enterCompoundstatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCompoundstatement" ):
listener.exitCompoundstatement(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCompoundstatement" ):
return visitor.visitCompoundstatement(self)
else:
return visitor.visitChildren(self)
def compoundstatement(self):
localctx = CPP14Parser.CompoundstatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 102, self.RULE_compoundstatement)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1041
self.match(CPP14Parser.LeftBrace)
self.state = 1043
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.T__2) | (1 << CPP14Parser.Alignas) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Asm) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Break) | (1 << CPP14Parser.Case) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Class) | (1 << CPP14Parser.Const) | (1 << CPP14Parser.Constexpr) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Continue) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Default) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Do) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.Enum) | (1 << CPP14Parser.Explicit) | (1 << CPP14Parser.Extern) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.For) | (1 << CPP14Parser.Friend) | (1 << CPP14Parser.Goto) | (1 << CPP14Parser.If) | (1 << CPP14Parser.Inline) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.Mutable) | (1 << CPP14Parser.Namespace) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Register) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Return) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof) | (1 << CPP14Parser.Static) | (1 << CPP14Parser.Static_assert))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.Struct - 64)) | (1 << (CPP14Parser.Switch - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.Thread_local - 64)) | (1 << (CPP14Parser.Throw - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Try - 64)) | (1 << (CPP14Parser.Typedef - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Union - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Using - 64)) | (1 << (CPP14Parser.Virtual - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Volatile - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.While - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.LeftBrace - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Semi - 128)) | (1 << (CPP14Parser.Ellipsis - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 1042
self.statementseq(0)
self.state = 1045
self.match(CPP14Parser.RightBrace)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StatementseqContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def statement(self):
return self.getTypedRuleContext(CPP14Parser.StatementContext,0)
def statementseq(self):
return self.getTypedRuleContext(CPP14Parser.StatementseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_statementseq
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStatementseq" ):
listener.enterStatementseq(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStatementseq" ):
listener.exitStatementseq(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitStatementseq" ):
return visitor.visitStatementseq(self)
else:
return visitor.visitChildren(self)
def statementseq(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.StatementseqContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 104
self.enterRecursionRule(localctx, 104, self.RULE_statementseq, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1048
self.statement()
self._ctx.stop = self._input.LT(-1)
self.state = 1054
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,87,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.StatementseqContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_statementseq)
self.state = 1050
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 1051
self.statement()
self.state = 1056
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,87,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class SelectionstatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def If(self):
return self.getToken(CPP14Parser.If, 0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def condition(self):
return self.getTypedRuleContext(CPP14Parser.ConditionContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def statement(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(CPP14Parser.StatementContext)
else:
return self.getTypedRuleContext(CPP14Parser.StatementContext,i)
def Else(self):
return self.getToken(CPP14Parser.Else, 0)
def Switch(self):
return self.getToken(CPP14Parser.Switch, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_selectionstatement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSelectionstatement" ):
listener.enterSelectionstatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSelectionstatement" ):
listener.exitSelectionstatement(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitSelectionstatement" ):
return visitor.visitSelectionstatement(self)
else:
return visitor.visitChildren(self)
def selectionstatement(self):
localctx = CPP14Parser.SelectionstatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 106, self.RULE_selectionstatement)
try:
self.state = 1077
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,88,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1057
self.match(CPP14Parser.If)
self.state = 1058
self.match(CPP14Parser.LeftParen)
self.state = 1059
self.condition()
self.state = 1060
self.match(CPP14Parser.RightParen)
self.state = 1061
self.statement()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1063
self.match(CPP14Parser.If)
self.state = 1064
self.match(CPP14Parser.LeftParen)
self.state = 1065
self.condition()
self.state = 1066
self.match(CPP14Parser.RightParen)
self.state = 1067
self.statement()
self.state = 1068
self.match(CPP14Parser.Else)
self.state = 1069
self.statement()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1071
self.match(CPP14Parser.Switch)
self.state = 1072
self.match(CPP14Parser.LeftParen)
self.state = 1073
self.condition()
self.state = 1074
self.match(CPP14Parser.RightParen)
self.state = 1075
self.statement()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ConditionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionContext,0)
def declspecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.DeclspecifierseqContext,0)
def declarator(self):
return self.getTypedRuleContext(CPP14Parser.DeclaratorContext,0)
def Assign(self):
return self.getToken(CPP14Parser.Assign, 0)
def initializerclause(self):
return self.getTypedRuleContext(CPP14Parser.InitializerclauseContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def bracedinitlist(self):
return self.getTypedRuleContext(CPP14Parser.BracedinitlistContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_condition
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCondition" ):
listener.enterCondition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCondition" ):
listener.exitCondition(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCondition" ):
return visitor.visitCondition(self)
else:
return visitor.visitChildren(self)
def condition(self):
localctx = CPP14Parser.ConditionContext(self, self._ctx, self.state)
self.enterRule(localctx, 108, self.RULE_condition)
self._la = 0 # Token type
try:
self.state = 1095
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,91,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1079
self.expression(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1081
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1080
self.attributespecifierseq(0)
self.state = 1083
self.declspecifierseq()
self.state = 1084
self.declarator()
self.state = 1085
self.match(CPP14Parser.Assign)
self.state = 1086
self.initializerclause()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1089
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1088
self.attributespecifierseq(0)
self.state = 1091
self.declspecifierseq()
self.state = 1092
self.declarator()
self.state = 1093
self.bracedinitlist()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IterationstatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def While(self):
return self.getToken(CPP14Parser.While, 0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def condition(self):
return self.getTypedRuleContext(CPP14Parser.ConditionContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def statement(self):
return self.getTypedRuleContext(CPP14Parser.StatementContext,0)
def Do(self):
return self.getToken(CPP14Parser.Do, 0)
def expression(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionContext,0)
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def For(self):
return self.getToken(CPP14Parser.For, 0)
def forinitstatement(self):
return self.getTypedRuleContext(CPP14Parser.ForinitstatementContext,0)
def forrangedeclaration(self):
return self.getTypedRuleContext(CPP14Parser.ForrangedeclarationContext,0)
def Colon(self):
return self.getToken(CPP14Parser.Colon, 0)
def forrangeinitializer(self):
return self.getTypedRuleContext(CPP14Parser.ForrangeinitializerContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_iterationstatement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIterationstatement" ):
listener.enterIterationstatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIterationstatement" ):
listener.exitIterationstatement(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitIterationstatement" ):
return visitor.visitIterationstatement(self)
else:
return visitor.visitChildren(self)
def iterationstatement(self):
localctx = CPP14Parser.IterationstatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 110, self.RULE_iterationstatement)
self._la = 0 # Token type
try:
self.state = 1132
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,94,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1097
self.match(CPP14Parser.While)
self.state = 1098
self.match(CPP14Parser.LeftParen)
self.state = 1099
self.condition()
self.state = 1100
self.match(CPP14Parser.RightParen)
self.state = 1101
self.statement()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1103
self.match(CPP14Parser.Do)
self.state = 1104
self.statement()
self.state = 1105
self.match(CPP14Parser.While)
self.state = 1106
self.match(CPP14Parser.LeftParen)
self.state = 1107
self.expression(0)
self.state = 1108
self.match(CPP14Parser.RightParen)
self.state = 1109
self.match(CPP14Parser.Semi)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1111
self.match(CPP14Parser.For)
self.state = 1112
self.match(CPP14Parser.LeftParen)
self.state = 1113
self.forinitstatement()
self.state = 1115
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignas) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Class) | (1 << CPP14Parser.Const) | (1 << CPP14Parser.Constexpr) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.Enum) | (1 << CPP14Parser.Explicit) | (1 << CPP14Parser.Extern) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Friend) | (1 << CPP14Parser.Inline) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.Mutable) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Register) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof) | (1 << CPP14Parser.Static))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.Struct - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.Thread_local - 64)) | (1 << (CPP14Parser.Throw - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typedef - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Union - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Virtual - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Volatile - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 1114
self.condition()
self.state = 1117
self.match(CPP14Parser.Semi)
self.state = 1119
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.Throw - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 1118
self.expression(0)
self.state = 1121
self.match(CPP14Parser.RightParen)
self.state = 1122
self.statement()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1124
self.match(CPP14Parser.For)
self.state = 1125
self.match(CPP14Parser.LeftParen)
self.state = 1126
self.forrangedeclaration()
self.state = 1127
self.match(CPP14Parser.Colon)
self.state = 1128
self.forrangeinitializer()
self.state = 1129
self.match(CPP14Parser.RightParen)
self.state = 1130
self.statement()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ForinitstatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expressionstatement(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionstatementContext,0)
def simpledeclaration(self):
return self.getTypedRuleContext(CPP14Parser.SimpledeclarationContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_forinitstatement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterForinitstatement" ):
listener.enterForinitstatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitForinitstatement" ):
listener.exitForinitstatement(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitForinitstatement" ):
return visitor.visitForinitstatement(self)
else:
return visitor.visitChildren(self)
def forinitstatement(self):
localctx = CPP14Parser.ForinitstatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 112, self.RULE_forinitstatement)
try:
self.state = 1136
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,95,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1134
self.expressionstatement()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1135
self.simpledeclaration()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ForrangedeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declspecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.DeclspecifierseqContext,0)
def declarator(self):
return self.getTypedRuleContext(CPP14Parser.DeclaratorContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_forrangedeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterForrangedeclaration" ):
listener.enterForrangedeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitForrangedeclaration" ):
listener.exitForrangedeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitForrangedeclaration" ):
return visitor.visitForrangedeclaration(self)
else:
return visitor.visitChildren(self)
def forrangedeclaration(self):
localctx = CPP14Parser.ForrangedeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 114, self.RULE_forrangedeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1139
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1138
self.attributespecifierseq(0)
self.state = 1141
self.declspecifierseq()
self.state = 1142
self.declarator()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ForrangeinitializerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionContext,0)
def bracedinitlist(self):
return self.getTypedRuleContext(CPP14Parser.BracedinitlistContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_forrangeinitializer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterForrangeinitializer" ):
listener.enterForrangeinitializer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitForrangeinitializer" ):
listener.exitForrangeinitializer(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitForrangeinitializer" ):
return visitor.visitForrangeinitializer(self)
else:
return visitor.visitChildren(self)
def forrangeinitializer(self):
localctx = CPP14Parser.ForrangeinitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 116, self.RULE_forrangeinitializer)
try:
self.state = 1146
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.T__0, CPP14Parser.T__1, CPP14Parser.Alignof, CPP14Parser.Auto, CPP14Parser.Bool, CPP14Parser.Char, CPP14Parser.Char16, CPP14Parser.Char32, CPP14Parser.Const_cast, CPP14Parser.Decltype, CPP14Parser.Delete, CPP14Parser.Double, CPP14Parser.Dynamic_cast, CPP14Parser.BFalse, CPP14Parser.Float, CPP14Parser.Int, CPP14Parser.Long, CPP14Parser.New, CPP14Parser.Noexcept, CPP14Parser.Nullptr, CPP14Parser.Operator, CPP14Parser.Reinterpret_cast, CPP14Parser.Short, CPP14Parser.Signed, CPP14Parser.Sizeof, CPP14Parser.Static_cast, CPP14Parser.This, CPP14Parser.Throw, CPP14Parser.BTrue, CPP14Parser.Typeid_, CPP14Parser.Typename_, CPP14Parser.Unsigned, CPP14Parser.Void, CPP14Parser.Wchar, CPP14Parser.LeftParen, CPP14Parser.LeftBracket, CPP14Parser.Plus, CPP14Parser.Minus, CPP14Parser.Star, CPP14Parser.And, CPP14Parser.Or, CPP14Parser.Tilde, CPP14Parser.PlusPlus, CPP14Parser.MinusMinus, CPP14Parser.Doublecolon, CPP14Parser.Identifier, CPP14Parser.Integerliteral, CPP14Parser.Characterliteral, CPP14Parser.Floatingliteral, CPP14Parser.Stringliteral, CPP14Parser.Userdefinedintegerliteral, CPP14Parser.Userdefinedfloatingliteral, CPP14Parser.Userdefinedstringliteral, CPP14Parser.Userdefinedcharacterliteral]:
self.enterOuterAlt(localctx, 1)
self.state = 1144
self.expression(0)
pass
elif token in [CPP14Parser.LeftBrace]:
self.enterOuterAlt(localctx, 2)
self.state = 1145
self.bracedinitlist()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class JumpstatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Break(self):
return self.getToken(CPP14Parser.Break, 0)
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def Continue(self):
return self.getToken(CPP14Parser.Continue, 0)
def Return(self):
return self.getToken(CPP14Parser.Return, 0)
def expression(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionContext,0)
def bracedinitlist(self):
return self.getTypedRuleContext(CPP14Parser.BracedinitlistContext,0)
def Goto(self):
return self.getToken(CPP14Parser.Goto, 0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_jumpstatement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterJumpstatement" ):
listener.enterJumpstatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitJumpstatement" ):
listener.exitJumpstatement(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitJumpstatement" ):
return visitor.visitJumpstatement(self)
else:
return visitor.visitChildren(self)
def jumpstatement(self):
localctx = CPP14Parser.JumpstatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 118, self.RULE_jumpstatement)
self._la = 0 # Token type
try:
self.state = 1164
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,99,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1148
self.match(CPP14Parser.Break)
self.state = 1149
self.match(CPP14Parser.Semi)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1150
self.match(CPP14Parser.Continue)
self.state = 1151
self.match(CPP14Parser.Semi)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1152
self.match(CPP14Parser.Return)
self.state = 1154
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.Throw - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 1153
self.expression(0)
self.state = 1156
self.match(CPP14Parser.Semi)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1157
self.match(CPP14Parser.Return)
self.state = 1158
self.bracedinitlist()
self.state = 1159
self.match(CPP14Parser.Semi)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1161
self.match(CPP14Parser.Goto)
self.state = 1162
self.match(CPP14Parser.Identifier)
self.state = 1163
self.match(CPP14Parser.Semi)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclarationstatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def blockdeclaration(self):
return self.getTypedRuleContext(CPP14Parser.BlockdeclarationContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_declarationstatement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclarationstatement" ):
listener.enterDeclarationstatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclarationstatement" ):
listener.exitDeclarationstatement(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitDeclarationstatement" ):
return visitor.visitDeclarationstatement(self)
else:
return visitor.visitChildren(self)
def declarationstatement(self):
localctx = CPP14Parser.DeclarationstatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 120, self.RULE_declarationstatement)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1166
self.blockdeclaration()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclarationseqContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declaration(self):
return self.getTypedRuleContext(CPP14Parser.DeclarationContext,0)
def declarationseq(self):
return self.getTypedRuleContext(CPP14Parser.DeclarationseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_declarationseq
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclarationseq" ):
listener.enterDeclarationseq(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclarationseq" ):
listener.exitDeclarationseq(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitDeclarationseq" ):
return visitor.visitDeclarationseq(self)
else:
return visitor.visitChildren(self)
def declarationseq(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.DeclarationseqContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 122
self.enterRecursionRule(localctx, 122, self.RULE_declarationseq, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1169
self.declaration()
self._ctx.stop = self._input.LT(-1)
self.state = 1175
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,100,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.DeclarationseqContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_declarationseq)
self.state = 1171
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 1172
self.declaration()
self.state = 1177
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,100,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class DeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def blockdeclaration(self):
return self.getTypedRuleContext(CPP14Parser.BlockdeclarationContext,0)
def functiondefinition(self):
return self.getTypedRuleContext(CPP14Parser.FunctiondefinitionContext,0)
def templatedeclaration(self):
return self.getTypedRuleContext(CPP14Parser.TemplatedeclarationContext,0)
def explicitinstantiation(self):
return self.getTypedRuleContext(CPP14Parser.ExplicitinstantiationContext,0)
def explicitspecialization(self):
return self.getTypedRuleContext(CPP14Parser.ExplicitspecializationContext,0)
def linkagespecification(self):
return self.getTypedRuleContext(CPP14Parser.LinkagespecificationContext,0)
def namespacedefinition(self):
return self.getTypedRuleContext(CPP14Parser.NamespacedefinitionContext,0)
def emptydeclaration(self):
return self.getTypedRuleContext(CPP14Parser.EmptydeclarationContext,0)
def attributedeclaration(self):
return self.getTypedRuleContext(CPP14Parser.AttributedeclarationContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_declaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclaration" ):
listener.enterDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclaration" ):
listener.exitDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitDeclaration" ):
return visitor.visitDeclaration(self)
else:
return visitor.visitChildren(self)
def declaration(self):
localctx = CPP14Parser.DeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 124, self.RULE_declaration)
try:
self.state = 1187
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,101,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1178
self.blockdeclaration()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1179
self.functiondefinition()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1180
self.templatedeclaration()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1181
self.explicitinstantiation()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1182
self.explicitspecialization()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 1183
self.linkagespecification()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 1184
self.namespacedefinition()
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 1185
self.emptydeclaration()
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 1186
self.attributedeclaration()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BlockdeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def simpledeclaration(self):
return self.getTypedRuleContext(CPP14Parser.SimpledeclarationContext,0)
def asmdefinition(self):
return self.getTypedRuleContext(CPP14Parser.AsmdefinitionContext,0)
def namespacealiasdefinition(self):
return self.getTypedRuleContext(CPP14Parser.NamespacealiasdefinitionContext,0)
def usingdeclaration(self):
return self.getTypedRuleContext(CPP14Parser.UsingdeclarationContext,0)
def usingdirective(self):
return self.getTypedRuleContext(CPP14Parser.UsingdirectiveContext,0)
def static_assertdeclaration(self):
return self.getTypedRuleContext(CPP14Parser.Static_assertdeclarationContext,0)
def aliasdeclaration(self):
return self.getTypedRuleContext(CPP14Parser.AliasdeclarationContext,0)
def opaqueenumdeclaration(self):
return self.getTypedRuleContext(CPP14Parser.OpaqueenumdeclarationContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_blockdeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBlockdeclaration" ):
listener.enterBlockdeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBlockdeclaration" ):
listener.exitBlockdeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBlockdeclaration" ):
return visitor.visitBlockdeclaration(self)
else:
return visitor.visitChildren(self)
def blockdeclaration(self):
localctx = CPP14Parser.BlockdeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 126, self.RULE_blockdeclaration)
try:
self.state = 1197
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,102,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1189
self.simpledeclaration()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1190
self.asmdefinition()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1191
self.namespacealiasdefinition()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1192
self.usingdeclaration()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1193
self.usingdirective()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 1194
self.static_assertdeclaration()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 1195
self.aliasdeclaration()
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 1196
self.opaqueenumdeclaration()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AliasdeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Using(self):
return self.getToken(CPP14Parser.Using, 0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def Assign(self):
return self.getToken(CPP14Parser.Assign, 0)
def thetypeid(self):
return self.getTypedRuleContext(CPP14Parser.ThetypeidContext,0)
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_aliasdeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAliasdeclaration" ):
listener.enterAliasdeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAliasdeclaration" ):
listener.exitAliasdeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAliasdeclaration" ):
return visitor.visitAliasdeclaration(self)
else:
return visitor.visitChildren(self)
def aliasdeclaration(self):
localctx = CPP14Parser.AliasdeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 128, self.RULE_aliasdeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1199
self.match(CPP14Parser.Using)
self.state = 1200
self.match(CPP14Parser.Identifier)
self.state = 1202
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1201
self.attributespecifierseq(0)
self.state = 1204
self.match(CPP14Parser.Assign)
self.state = 1205
self.thetypeid()
self.state = 1206
self.match(CPP14Parser.Semi)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SimpledeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def declspecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.DeclspecifierseqContext,0)
def initdeclaratorlist(self):
return self.getTypedRuleContext(CPP14Parser.InitdeclaratorlistContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_simpledeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSimpledeclaration" ):
listener.enterSimpledeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSimpledeclaration" ):
listener.exitSimpledeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitSimpledeclaration" ):
return visitor.visitSimpledeclaration(self)
else:
return visitor.visitChildren(self)
def simpledeclaration(self):
localctx = CPP14Parser.SimpledeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 130, self.RULE_simpledeclaration)
self._la = 0 # Token type
try:
self.state = 1222
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.T__2, CPP14Parser.Auto, CPP14Parser.Bool, CPP14Parser.Char, CPP14Parser.Char16, CPP14Parser.Char32, CPP14Parser.Class, CPP14Parser.Const, CPP14Parser.Constexpr, CPP14Parser.Decltype, CPP14Parser.Double, CPP14Parser.Enum, CPP14Parser.Explicit, CPP14Parser.Extern, CPP14Parser.Float, CPP14Parser.Friend, CPP14Parser.Inline, CPP14Parser.Int, CPP14Parser.Long, CPP14Parser.Mutable, CPP14Parser.Operator, CPP14Parser.Register, CPP14Parser.Short, CPP14Parser.Signed, CPP14Parser.Static, CPP14Parser.Struct, CPP14Parser.Thread_local, CPP14Parser.Typedef, CPP14Parser.Typename_, CPP14Parser.Union, CPP14Parser.Unsigned, CPP14Parser.Virtual, CPP14Parser.Void, CPP14Parser.Volatile, CPP14Parser.Wchar, CPP14Parser.LeftParen, CPP14Parser.Star, CPP14Parser.And, CPP14Parser.Tilde, CPP14Parser.Doublecolon, CPP14Parser.Semi, CPP14Parser.Ellipsis, CPP14Parser.Identifier]:
self.enterOuterAlt(localctx, 1)
self.state = 1209
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,104,self._ctx)
if la_ == 1:
self.state = 1208
self.declspecifierseq()
self.state = 1212
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__2) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Operator))) != 0) or ((((_la - 84)) & ~0x3f) == 0 and ((1 << (_la - 84)) & ((1 << (CPP14Parser.LeftParen - 84)) | (1 << (CPP14Parser.Star - 84)) | (1 << (CPP14Parser.And - 84)) | (1 << (CPP14Parser.Tilde - 84)) | (1 << (CPP14Parser.Doublecolon - 84)) | (1 << (CPP14Parser.Ellipsis - 84)) | (1 << (CPP14Parser.Identifier - 84)))) != 0):
self.state = 1211
self.initdeclaratorlist(0)
self.state = 1214
self.match(CPP14Parser.Semi)
pass
elif token in [CPP14Parser.Alignas, CPP14Parser.LeftBracket]:
self.enterOuterAlt(localctx, 2)
self.state = 1215
self.attributespecifierseq(0)
self.state = 1217
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,106,self._ctx)
if la_ == 1:
self.state = 1216
self.declspecifierseq()
self.state = 1219
self.initdeclaratorlist(0)
self.state = 1220
self.match(CPP14Parser.Semi)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Static_assertdeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Static_assert(self):
return self.getToken(CPP14Parser.Static_assert, 0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def constantexpression(self):
return self.getTypedRuleContext(CPP14Parser.ConstantexpressionContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def Stringliteral(self):
return self.getToken(CPP14Parser.Stringliteral, 0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_static_assertdeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStatic_assertdeclaration" ):
listener.enterStatic_assertdeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStatic_assertdeclaration" ):
listener.exitStatic_assertdeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitStatic_assertdeclaration" ):
return visitor.visitStatic_assertdeclaration(self)
else:
return visitor.visitChildren(self)
def static_assertdeclaration(self):
localctx = CPP14Parser.Static_assertdeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 132, self.RULE_static_assertdeclaration)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1224
self.match(CPP14Parser.Static_assert)
self.state = 1225
self.match(CPP14Parser.LeftParen)
self.state = 1226
self.constantexpression()
self.state = 1227
self.match(CPP14Parser.Comma)
self.state = 1228
self.match(CPP14Parser.Stringliteral)
self.state = 1229
self.match(CPP14Parser.RightParen)
self.state = 1230
self.match(CPP14Parser.Semi)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EmptydeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_emptydeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEmptydeclaration" ):
listener.enterEmptydeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEmptydeclaration" ):
listener.exitEmptydeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEmptydeclaration" ):
return visitor.visitEmptydeclaration(self)
else:
return visitor.visitChildren(self)
def emptydeclaration(self):
localctx = CPP14Parser.EmptydeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 134, self.RULE_emptydeclaration)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1232
self.match(CPP14Parser.Semi)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AttributedeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_attributedeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAttributedeclaration" ):
listener.enterAttributedeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAttributedeclaration" ):
listener.exitAttributedeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAttributedeclaration" ):
return visitor.visitAttributedeclaration(self)
else:
return visitor.visitChildren(self)
def attributedeclaration(self):
localctx = CPP14Parser.AttributedeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 136, self.RULE_attributedeclaration)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1234
self.attributespecifierseq(0)
self.state = 1235
self.match(CPP14Parser.Semi)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclspecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def storageclassspecifier(self):
return self.getTypedRuleContext(CPP14Parser.StorageclassspecifierContext,0)
def typespecifier(self):
return self.getTypedRuleContext(CPP14Parser.TypespecifierContext,0)
def functionspecifier(self):
return self.getTypedRuleContext(CPP14Parser.FunctionspecifierContext,0)
def Friend(self):
return self.getToken(CPP14Parser.Friend, 0)
def Typedef(self):
return self.getToken(CPP14Parser.Typedef, 0)
def Constexpr(self):
return self.getToken(CPP14Parser.Constexpr, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_declspecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclspecifier" ):
listener.enterDeclspecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclspecifier" ):
listener.exitDeclspecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitDeclspecifier" ):
return visitor.visitDeclspecifier(self)
else:
return visitor.visitChildren(self)
def declspecifier(self):
localctx = CPP14Parser.DeclspecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 138, self.RULE_declspecifier)
try:
self.state = 1243
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.Extern, CPP14Parser.Mutable, CPP14Parser.Register, CPP14Parser.Static, CPP14Parser.Thread_local]:
self.enterOuterAlt(localctx, 1)
self.state = 1237
self.storageclassspecifier()
pass
elif token in [CPP14Parser.Auto, CPP14Parser.Bool, CPP14Parser.Char, CPP14Parser.Char16, CPP14Parser.Char32, CPP14Parser.Class, CPP14Parser.Const, CPP14Parser.Decltype, CPP14Parser.Double, CPP14Parser.Enum, CPP14Parser.Float, CPP14Parser.Int, CPP14Parser.Long, CPP14Parser.Short, CPP14Parser.Signed, CPP14Parser.Struct, CPP14Parser.Typename_, CPP14Parser.Union, CPP14Parser.Unsigned, CPP14Parser.Void, CPP14Parser.Volatile, CPP14Parser.Wchar, CPP14Parser.Doublecolon, CPP14Parser.Identifier]:
self.enterOuterAlt(localctx, 2)
self.state = 1238
self.typespecifier()
pass
elif token in [CPP14Parser.Explicit, CPP14Parser.Inline, CPP14Parser.Virtual]:
self.enterOuterAlt(localctx, 3)
self.state = 1239
self.functionspecifier()
pass
elif token in [CPP14Parser.Friend]:
self.enterOuterAlt(localctx, 4)
self.state = 1240
self.match(CPP14Parser.Friend)
pass
elif token in [CPP14Parser.Typedef]:
self.enterOuterAlt(localctx, 5)
self.state = 1241
self.match(CPP14Parser.Typedef)
pass
elif token in [CPP14Parser.Constexpr]:
self.enterOuterAlt(localctx, 6)
self.state = 1242
self.match(CPP14Parser.Constexpr)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclspecifierseqContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declspecifier(self):
return self.getTypedRuleContext(CPP14Parser.DeclspecifierContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def declspecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.DeclspecifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_declspecifierseq
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclspecifierseq" ):
listener.enterDeclspecifierseq(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclspecifierseq" ):
listener.exitDeclspecifierseq(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitDeclspecifierseq" ):
return visitor.visitDeclspecifierseq(self)
else:
return visitor.visitChildren(self)
def declspecifierseq(self):
localctx = CPP14Parser.DeclspecifierseqContext(self, self._ctx, self.state)
self.enterRule(localctx, 140, self.RULE_declspecifierseq)
try:
self.state = 1252
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,110,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1245
self.declspecifier()
self.state = 1247
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,109,self._ctx)
if la_ == 1:
self.state = 1246
self.attributespecifierseq(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1249
self.declspecifier()
self.state = 1250
self.declspecifierseq()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StorageclassspecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Register(self):
return self.getToken(CPP14Parser.Register, 0)
def Static(self):
return self.getToken(CPP14Parser.Static, 0)
def Thread_local(self):
return self.getToken(CPP14Parser.Thread_local, 0)
def Extern(self):
return self.getToken(CPP14Parser.Extern, 0)
def Mutable(self):
return self.getToken(CPP14Parser.Mutable, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_storageclassspecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStorageclassspecifier" ):
listener.enterStorageclassspecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStorageclassspecifier" ):
listener.exitStorageclassspecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitStorageclassspecifier" ):
return visitor.visitStorageclassspecifier(self)
else:
return visitor.visitChildren(self)
def storageclassspecifier(self):
localctx = CPP14Parser.StorageclassspecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 142, self.RULE_storageclassspecifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1254
_la = self._input.LA(1)
if not(((((_la - 35)) & ~0x3f) == 0 and ((1 << (_la - 35)) & ((1 << (CPP14Parser.Extern - 35)) | (1 << (CPP14Parser.Mutable - 35)) | (1 << (CPP14Parser.Register - 35)) | (1 << (CPP14Parser.Static - 35)) | (1 << (CPP14Parser.Thread_local - 35)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FunctionspecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Inline(self):
return self.getToken(CPP14Parser.Inline, 0)
def Virtual(self):
return self.getToken(CPP14Parser.Virtual, 0)
def Explicit(self):
return self.getToken(CPP14Parser.Explicit, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_functionspecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunctionspecifier" ):
listener.enterFunctionspecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunctionspecifier" ):
listener.exitFunctionspecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitFunctionspecifier" ):
return visitor.visitFunctionspecifier(self)
else:
return visitor.visitChildren(self)
def functionspecifier(self):
localctx = CPP14Parser.FunctionspecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 144, self.RULE_functionspecifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1256
_la = self._input.LA(1)
if not(((((_la - 33)) & ~0x3f) == 0 and ((1 << (_la - 33)) & ((1 << (CPP14Parser.Explicit - 33)) | (1 << (CPP14Parser.Inline - 33)) | (1 << (CPP14Parser.Virtual - 33)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypedefnameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_typedefname
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypedefname" ):
listener.enterTypedefname(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypedefname" ):
listener.exitTypedefname(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypedefname" ):
return visitor.visitTypedefname(self)
else:
return visitor.visitChildren(self)
def typedefname(self):
localctx = CPP14Parser.TypedefnameContext(self, self._ctx, self.state)
self.enterRule(localctx, 146, self.RULE_typedefname)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1258
self.match(CPP14Parser.Identifier)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypespecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def trailingtypespecifier(self):
return self.getTypedRuleContext(CPP14Parser.TrailingtypespecifierContext,0)
def classspecifier(self):
return self.getTypedRuleContext(CPP14Parser.ClassspecifierContext,0)
def enumspecifier(self):
return self.getTypedRuleContext(CPP14Parser.EnumspecifierContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_typespecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypespecifier" ):
listener.enterTypespecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypespecifier" ):
listener.exitTypespecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypespecifier" ):
return visitor.visitTypespecifier(self)
else:
return visitor.visitChildren(self)
def typespecifier(self):
localctx = CPP14Parser.TypespecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 148, self.RULE_typespecifier)
try:
self.state = 1263
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,111,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1260
self.trailingtypespecifier()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1261
self.classspecifier()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1262
self.enumspecifier()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TrailingtypespecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def simpletypespecifier(self):
return self.getTypedRuleContext(CPP14Parser.SimpletypespecifierContext,0)
def elaboratedtypespecifier(self):
return self.getTypedRuleContext(CPP14Parser.ElaboratedtypespecifierContext,0)
def typenamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.TypenamespecifierContext,0)
def cvqualifier(self):
return self.getTypedRuleContext(CPP14Parser.CvqualifierContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_trailingtypespecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTrailingtypespecifier" ):
listener.enterTrailingtypespecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTrailingtypespecifier" ):
listener.exitTrailingtypespecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTrailingtypespecifier" ):
return visitor.visitTrailingtypespecifier(self)
else:
return visitor.visitChildren(self)
def trailingtypespecifier(self):
localctx = CPP14Parser.TrailingtypespecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 150, self.RULE_trailingtypespecifier)
try:
self.state = 1269
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.Auto, CPP14Parser.Bool, CPP14Parser.Char, CPP14Parser.Char16, CPP14Parser.Char32, CPP14Parser.Decltype, CPP14Parser.Double, CPP14Parser.Float, CPP14Parser.Int, CPP14Parser.Long, CPP14Parser.Short, CPP14Parser.Signed, CPP14Parser.Unsigned, CPP14Parser.Void, CPP14Parser.Wchar, CPP14Parser.Doublecolon, CPP14Parser.Identifier]:
self.enterOuterAlt(localctx, 1)
self.state = 1265
self.simpletypespecifier()
pass
elif token in [CPP14Parser.Class, CPP14Parser.Enum, CPP14Parser.Struct, CPP14Parser.Union]:
self.enterOuterAlt(localctx, 2)
self.state = 1266
self.elaboratedtypespecifier()
pass
elif token in [CPP14Parser.Typename_]:
self.enterOuterAlt(localctx, 3)
self.state = 1267
self.typenamespecifier()
pass
elif token in [CPP14Parser.Const, CPP14Parser.Volatile]:
self.enterOuterAlt(localctx, 4)
self.state = 1268
self.cvqualifier()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypespecifierseqContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typespecifier(self):
return self.getTypedRuleContext(CPP14Parser.TypespecifierContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def typespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.TypespecifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_typespecifierseq
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypespecifierseq" ):
listener.enterTypespecifierseq(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypespecifierseq" ):
listener.exitTypespecifierseq(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypespecifierseq" ):
return visitor.visitTypespecifierseq(self)
else:
return visitor.visitChildren(self)
def typespecifierseq(self):
localctx = CPP14Parser.TypespecifierseqContext(self, self._ctx, self.state)
self.enterRule(localctx, 152, self.RULE_typespecifierseq)
try:
self.state = 1278
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,114,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1271
self.typespecifier()
self.state = 1273
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,113,self._ctx)
if la_ == 1:
self.state = 1272
self.attributespecifierseq(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1275
self.typespecifier()
self.state = 1276
self.typespecifierseq()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TrailingtypespecifierseqContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def trailingtypespecifier(self):
return self.getTypedRuleContext(CPP14Parser.TrailingtypespecifierContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def trailingtypespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.TrailingtypespecifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_trailingtypespecifierseq
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTrailingtypespecifierseq" ):
listener.enterTrailingtypespecifierseq(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTrailingtypespecifierseq" ):
listener.exitTrailingtypespecifierseq(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTrailingtypespecifierseq" ):
return visitor.visitTrailingtypespecifierseq(self)
else:
return visitor.visitChildren(self)
def trailingtypespecifierseq(self):
localctx = CPP14Parser.TrailingtypespecifierseqContext(self, self._ctx, self.state)
self.enterRule(localctx, 154, self.RULE_trailingtypespecifierseq)
try:
self.state = 1287
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,116,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1280
self.trailingtypespecifier()
self.state = 1282
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,115,self._ctx)
if la_ == 1:
self.state = 1281
self.attributespecifierseq(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1284
self.trailingtypespecifier()
self.state = 1285
self.trailingtypespecifierseq()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SimpletypespecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thetypename(self):
return self.getTypedRuleContext(CPP14Parser.ThetypenameContext,0)
def nestednamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.NestednamespecifierContext,0)
def Template(self):
return self.getToken(CPP14Parser.Template, 0)
def simpletemplateid(self):
return self.getTypedRuleContext(CPP14Parser.SimpletemplateidContext,0)
def Char(self):
return self.getToken(CPP14Parser.Char, 0)
def Char16(self):
return self.getToken(CPP14Parser.Char16, 0)
def Char32(self):
return self.getToken(CPP14Parser.Char32, 0)
def Wchar(self):
return self.getToken(CPP14Parser.Wchar, 0)
def Bool(self):
return self.getToken(CPP14Parser.Bool, 0)
def Short(self):
return self.getToken(CPP14Parser.Short, 0)
def Int(self):
return self.getToken(CPP14Parser.Int, 0)
def Long(self):
return self.getToken(CPP14Parser.Long, 0)
def Signed(self):
return self.getToken(CPP14Parser.Signed, 0)
def Unsigned(self):
return self.getToken(CPP14Parser.Unsigned, 0)
def Float(self):
return self.getToken(CPP14Parser.Float, 0)
def Double(self):
return self.getToken(CPP14Parser.Double, 0)
def Void(self):
return self.getToken(CPP14Parser.Void, 0)
def Auto(self):
return self.getToken(CPP14Parser.Auto, 0)
def decltypespecifier(self):
return self.getTypedRuleContext(CPP14Parser.DecltypespecifierContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_simpletypespecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSimpletypespecifier" ):
listener.enterSimpletypespecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSimpletypespecifier" ):
listener.exitSimpletypespecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitSimpletypespecifier" ):
return visitor.visitSimpletypespecifier(self)
else:
return visitor.visitChildren(self)
def simpletypespecifier(self):
localctx = CPP14Parser.SimpletypespecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 156, self.RULE_simpletypespecifier)
try:
self.state = 1312
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,118,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1290
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,117,self._ctx)
if la_ == 1:
self.state = 1289
self.nestednamespecifier(0)
self.state = 1292
self.thetypename()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1293
self.nestednamespecifier(0)
self.state = 1294
self.match(CPP14Parser.Template)
self.state = 1295
self.simpletemplateid()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1297
self.match(CPP14Parser.Char)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1298
self.match(CPP14Parser.Char16)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1299
self.match(CPP14Parser.Char32)
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 1300
self.match(CPP14Parser.Wchar)
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 1301
self.match(CPP14Parser.Bool)
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 1302
self.match(CPP14Parser.Short)
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 1303
self.match(CPP14Parser.Int)
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 1304
self.match(CPP14Parser.Long)
pass
elif la_ == 11:
self.enterOuterAlt(localctx, 11)
self.state = 1305
self.match(CPP14Parser.Signed)
pass
elif la_ == 12:
self.enterOuterAlt(localctx, 12)
self.state = 1306
self.match(CPP14Parser.Unsigned)
pass
elif la_ == 13:
self.enterOuterAlt(localctx, 13)
self.state = 1307
self.match(CPP14Parser.Float)
pass
elif la_ == 14:
self.enterOuterAlt(localctx, 14)
self.state = 1308
self.match(CPP14Parser.Double)
pass
elif la_ == 15:
self.enterOuterAlt(localctx, 15)
self.state = 1309
self.match(CPP14Parser.Void)
pass
elif la_ == 16:
self.enterOuterAlt(localctx, 16)
self.state = 1310
self.match(CPP14Parser.Auto)
pass
elif la_ == 17:
self.enterOuterAlt(localctx, 17)
self.state = 1311
self.decltypespecifier()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ThetypenameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classname(self):
return self.getTypedRuleContext(CPP14Parser.ClassnameContext,0)
def enumname(self):
return self.getTypedRuleContext(CPP14Parser.EnumnameContext,0)
def typedefname(self):
return self.getTypedRuleContext(CPP14Parser.TypedefnameContext,0)
def simpletemplateid(self):
return self.getTypedRuleContext(CPP14Parser.SimpletemplateidContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_thetypename
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThetypename" ):
listener.enterThetypename(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThetypename" ):
listener.exitThetypename(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitThetypename" ):
return visitor.visitThetypename(self)
else:
return visitor.visitChildren(self)
def thetypename(self):
localctx = CPP14Parser.ThetypenameContext(self, self._ctx, self.state)
self.enterRule(localctx, 158, self.RULE_thetypename)
try:
self.state = 1318
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,119,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1314
self.classname()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1315
self.enumname()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1316
self.typedefname()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1317
self.simpletemplateid()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DecltypespecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Decltype(self):
return self.getToken(CPP14Parser.Decltype, 0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def expression(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def Auto(self):
return self.getToken(CPP14Parser.Auto, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_decltypespecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDecltypespecifier" ):
listener.enterDecltypespecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDecltypespecifier" ):
listener.exitDecltypespecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitDecltypespecifier" ):
return visitor.visitDecltypespecifier(self)
else:
return visitor.visitChildren(self)
def decltypespecifier(self):
localctx = CPP14Parser.DecltypespecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 160, self.RULE_decltypespecifier)
try:
self.state = 1329
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,120,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1320
self.match(CPP14Parser.Decltype)
self.state = 1321
self.match(CPP14Parser.LeftParen)
self.state = 1322
self.expression(0)
self.state = 1323
self.match(CPP14Parser.RightParen)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1325
self.match(CPP14Parser.Decltype)
self.state = 1326
self.match(CPP14Parser.LeftParen)
self.state = 1327
self.match(CPP14Parser.Auto)
self.state = 1328
self.match(CPP14Parser.RightParen)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ElaboratedtypespecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classkey(self):
return self.getTypedRuleContext(CPP14Parser.ClasskeyContext,0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def nestednamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.NestednamespecifierContext,0)
def simpletemplateid(self):
return self.getTypedRuleContext(CPP14Parser.SimpletemplateidContext,0)
def Template(self):
return self.getToken(CPP14Parser.Template, 0)
def Enum(self):
return self.getToken(CPP14Parser.Enum, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_elaboratedtypespecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterElaboratedtypespecifier" ):
listener.enterElaboratedtypespecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitElaboratedtypespecifier" ):
listener.exitElaboratedtypespecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitElaboratedtypespecifier" ):
return visitor.visitElaboratedtypespecifier(self)
else:
return visitor.visitChildren(self)
def elaboratedtypespecifier(self):
localctx = CPP14Parser.ElaboratedtypespecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 162, self.RULE_elaboratedtypespecifier)
self._la = 0 # Token type
try:
self.state = 1355
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,125,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1331
self.classkey()
self.state = 1333
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1332
self.attributespecifierseq(0)
self.state = 1336
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,122,self._ctx)
if la_ == 1:
self.state = 1335
self.nestednamespecifier(0)
self.state = 1338
self.match(CPP14Parser.Identifier)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1340
self.classkey()
self.state = 1341
self.simpletemplateid()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1343
self.classkey()
self.state = 1344
self.nestednamespecifier(0)
self.state = 1346
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Template:
self.state = 1345
self.match(CPP14Parser.Template)
self.state = 1348
self.simpletemplateid()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1350
self.match(CPP14Parser.Enum)
self.state = 1352
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,124,self._ctx)
if la_ == 1:
self.state = 1351
self.nestednamespecifier(0)
self.state = 1354
self.match(CPP14Parser.Identifier)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnumnameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_enumname
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEnumname" ):
listener.enterEnumname(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEnumname" ):
listener.exitEnumname(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEnumname" ):
return visitor.visitEnumname(self)
else:
return visitor.visitChildren(self)
def enumname(self):
localctx = CPP14Parser.EnumnameContext(self, self._ctx, self.state)
self.enterRule(localctx, 164, self.RULE_enumname)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1357
self.match(CPP14Parser.Identifier)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnumspecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def enumhead(self):
return self.getTypedRuleContext(CPP14Parser.EnumheadContext,0)
def LeftBrace(self):
return self.getToken(CPP14Parser.LeftBrace, 0)
def RightBrace(self):
return self.getToken(CPP14Parser.RightBrace, 0)
def enumeratorlist(self):
return self.getTypedRuleContext(CPP14Parser.EnumeratorlistContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_enumspecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEnumspecifier" ):
listener.enterEnumspecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEnumspecifier" ):
listener.exitEnumspecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEnumspecifier" ):
return visitor.visitEnumspecifier(self)
else:
return visitor.visitChildren(self)
def enumspecifier(self):
localctx = CPP14Parser.EnumspecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 166, self.RULE_enumspecifier)
self._la = 0 # Token type
try:
self.state = 1372
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,127,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1359
self.enumhead()
self.state = 1360
self.match(CPP14Parser.LeftBrace)
self.state = 1362
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Identifier:
self.state = 1361
self.enumeratorlist(0)
self.state = 1364
self.match(CPP14Parser.RightBrace)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1366
self.enumhead()
self.state = 1367
self.match(CPP14Parser.LeftBrace)
self.state = 1368
self.enumeratorlist(0)
self.state = 1369
self.match(CPP14Parser.Comma)
self.state = 1370
self.match(CPP14Parser.RightBrace)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnumheadContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def enumkey(self):
return self.getTypedRuleContext(CPP14Parser.EnumkeyContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def enumbase(self):
return self.getTypedRuleContext(CPP14Parser.EnumbaseContext,0)
def nestednamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.NestednamespecifierContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_enumhead
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEnumhead" ):
listener.enterEnumhead(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEnumhead" ):
listener.exitEnumhead(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEnumhead" ):
return visitor.visitEnumhead(self)
else:
return visitor.visitChildren(self)
def enumhead(self):
localctx = CPP14Parser.EnumheadContext(self, self._ctx, self.state)
self.enterRule(localctx, 168, self.RULE_enumhead)
self._la = 0 # Token type
try:
self.state = 1393
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,133,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1374
self.enumkey()
self.state = 1376
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1375
self.attributespecifierseq(0)
self.state = 1379
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Identifier:
self.state = 1378
self.match(CPP14Parser.Identifier)
self.state = 1382
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Colon:
self.state = 1381
self.enumbase()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1384
self.enumkey()
self.state = 1386
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1385
self.attributespecifierseq(0)
self.state = 1388
self.nestednamespecifier(0)
self.state = 1389
self.match(CPP14Parser.Identifier)
self.state = 1391
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Colon:
self.state = 1390
self.enumbase()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class OpaqueenumdeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def enumkey(self):
return self.getTypedRuleContext(CPP14Parser.EnumkeyContext,0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def enumbase(self):
return self.getTypedRuleContext(CPP14Parser.EnumbaseContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_opaqueenumdeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOpaqueenumdeclaration" ):
listener.enterOpaqueenumdeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOpaqueenumdeclaration" ):
listener.exitOpaqueenumdeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitOpaqueenumdeclaration" ):
return visitor.visitOpaqueenumdeclaration(self)
else:
return visitor.visitChildren(self)
def opaqueenumdeclaration(self):
localctx = CPP14Parser.OpaqueenumdeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 170, self.RULE_opaqueenumdeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1395
self.enumkey()
self.state = 1397
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1396
self.attributespecifierseq(0)
self.state = 1399
self.match(CPP14Parser.Identifier)
self.state = 1401
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Colon:
self.state = 1400
self.enumbase()
self.state = 1403
self.match(CPP14Parser.Semi)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnumkeyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Enum(self):
return self.getToken(CPP14Parser.Enum, 0)
def Class(self):
return self.getToken(CPP14Parser.Class, 0)
def Struct(self):
return self.getToken(CPP14Parser.Struct, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_enumkey
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEnumkey" ):
listener.enterEnumkey(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEnumkey" ):
listener.exitEnumkey(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEnumkey" ):
return visitor.visitEnumkey(self)
else:
return visitor.visitChildren(self)
def enumkey(self):
localctx = CPP14Parser.EnumkeyContext(self, self._ctx, self.state)
self.enterRule(localctx, 172, self.RULE_enumkey)
try:
self.state = 1410
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,136,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1405
self.match(CPP14Parser.Enum)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1406
self.match(CPP14Parser.Enum)
self.state = 1407
self.match(CPP14Parser.Class)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1408
self.match(CPP14Parser.Enum)
self.state = 1409
self.match(CPP14Parser.Struct)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnumbaseContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Colon(self):
return self.getToken(CPP14Parser.Colon, 0)
def typespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.TypespecifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_enumbase
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEnumbase" ):
listener.enterEnumbase(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEnumbase" ):
listener.exitEnumbase(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEnumbase" ):
return visitor.visitEnumbase(self)
else:
return visitor.visitChildren(self)
def enumbase(self):
localctx = CPP14Parser.EnumbaseContext(self, self._ctx, self.state)
self.enterRule(localctx, 174, self.RULE_enumbase)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1412
self.match(CPP14Parser.Colon)
self.state = 1413
self.typespecifierseq()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnumeratorlistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def enumeratordefinition(self):
return self.getTypedRuleContext(CPP14Parser.EnumeratordefinitionContext,0)
def enumeratorlist(self):
return self.getTypedRuleContext(CPP14Parser.EnumeratorlistContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_enumeratorlist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEnumeratorlist" ):
listener.enterEnumeratorlist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEnumeratorlist" ):
listener.exitEnumeratorlist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEnumeratorlist" ):
return visitor.visitEnumeratorlist(self)
else:
return visitor.visitChildren(self)
def enumeratorlist(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.EnumeratorlistContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 176
self.enterRecursionRule(localctx, 176, self.RULE_enumeratorlist, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1416
self.enumeratordefinition()
self._ctx.stop = self._input.LT(-1)
self.state = 1423
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,137,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.EnumeratorlistContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_enumeratorlist)
self.state = 1418
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 1419
self.match(CPP14Parser.Comma)
self.state = 1420
self.enumeratordefinition()
self.state = 1425
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,137,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class EnumeratordefinitionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def enumerator(self):
return self.getTypedRuleContext(CPP14Parser.EnumeratorContext,0)
def Assign(self):
return self.getToken(CPP14Parser.Assign, 0)
def constantexpression(self):
return self.getTypedRuleContext(CPP14Parser.ConstantexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_enumeratordefinition
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEnumeratordefinition" ):
listener.enterEnumeratordefinition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEnumeratordefinition" ):
listener.exitEnumeratordefinition(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEnumeratordefinition" ):
return visitor.visitEnumeratordefinition(self)
else:
return visitor.visitChildren(self)
def enumeratordefinition(self):
localctx = CPP14Parser.EnumeratordefinitionContext(self, self._ctx, self.state)
self.enterRule(localctx, 178, self.RULE_enumeratordefinition)
try:
self.state = 1431
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,138,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1426
self.enumerator()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1427
self.enumerator()
self.state = 1428
self.match(CPP14Parser.Assign)
self.state = 1429
self.constantexpression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnumeratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_enumerator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEnumerator" ):
listener.enterEnumerator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEnumerator" ):
listener.exitEnumerator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEnumerator" ):
return visitor.visitEnumerator(self)
else:
return visitor.visitChildren(self)
def enumerator(self):
localctx = CPP14Parser.EnumeratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 180, self.RULE_enumerator)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1433
self.match(CPP14Parser.Identifier)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NamespacenameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def originalnamespacename(self):
return self.getTypedRuleContext(CPP14Parser.OriginalnamespacenameContext,0)
def namespacealias(self):
return self.getTypedRuleContext(CPP14Parser.NamespacealiasContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_namespacename
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNamespacename" ):
listener.enterNamespacename(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNamespacename" ):
listener.exitNamespacename(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNamespacename" ):
return visitor.visitNamespacename(self)
else:
return visitor.visitChildren(self)
def namespacename(self):
localctx = CPP14Parser.NamespacenameContext(self, self._ctx, self.state)
self.enterRule(localctx, 182, self.RULE_namespacename)
try:
self.state = 1437
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,139,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1435
self.originalnamespacename()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1436
self.namespacealias()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class OriginalnamespacenameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_originalnamespacename
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOriginalnamespacename" ):
listener.enterOriginalnamespacename(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOriginalnamespacename" ):
listener.exitOriginalnamespacename(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitOriginalnamespacename" ):
return visitor.visitOriginalnamespacename(self)
else:
return visitor.visitChildren(self)
def originalnamespacename(self):
localctx = CPP14Parser.OriginalnamespacenameContext(self, self._ctx, self.state)
self.enterRule(localctx, 184, self.RULE_originalnamespacename)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1439
self.match(CPP14Parser.Identifier)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NamespacedefinitionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def namednamespacedefinition(self):
return self.getTypedRuleContext(CPP14Parser.NamednamespacedefinitionContext,0)
def unnamednamespacedefinition(self):
return self.getTypedRuleContext(CPP14Parser.UnnamednamespacedefinitionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_namespacedefinition
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNamespacedefinition" ):
listener.enterNamespacedefinition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNamespacedefinition" ):
listener.exitNamespacedefinition(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNamespacedefinition" ):
return visitor.visitNamespacedefinition(self)
else:
return visitor.visitChildren(self)
def namespacedefinition(self):
localctx = CPP14Parser.NamespacedefinitionContext(self, self._ctx, self.state)
self.enterRule(localctx, 186, self.RULE_namespacedefinition)
try:
self.state = 1443
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,140,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1441
self.namednamespacedefinition()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1442
self.unnamednamespacedefinition()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NamednamespacedefinitionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def originalnamespacedefinition(self):
return self.getTypedRuleContext(CPP14Parser.OriginalnamespacedefinitionContext,0)
def extensionnamespacedefinition(self):
return self.getTypedRuleContext(CPP14Parser.ExtensionnamespacedefinitionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_namednamespacedefinition
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNamednamespacedefinition" ):
listener.enterNamednamespacedefinition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNamednamespacedefinition" ):
listener.exitNamednamespacedefinition(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNamednamespacedefinition" ):
return visitor.visitNamednamespacedefinition(self)
else:
return visitor.visitChildren(self)
def namednamespacedefinition(self):
localctx = CPP14Parser.NamednamespacedefinitionContext(self, self._ctx, self.state)
self.enterRule(localctx, 188, self.RULE_namednamespacedefinition)
try:
self.state = 1447
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,141,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1445
self.originalnamespacedefinition()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1446
self.extensionnamespacedefinition()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class OriginalnamespacedefinitionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Namespace(self):
return self.getToken(CPP14Parser.Namespace, 0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def LeftBrace(self):
return self.getToken(CPP14Parser.LeftBrace, 0)
def namespacebody(self):
return self.getTypedRuleContext(CPP14Parser.NamespacebodyContext,0)
def RightBrace(self):
return self.getToken(CPP14Parser.RightBrace, 0)
def Inline(self):
return self.getToken(CPP14Parser.Inline, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_originalnamespacedefinition
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOriginalnamespacedefinition" ):
listener.enterOriginalnamespacedefinition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOriginalnamespacedefinition" ):
listener.exitOriginalnamespacedefinition(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitOriginalnamespacedefinition" ):
return visitor.visitOriginalnamespacedefinition(self)
else:
return visitor.visitChildren(self)
def originalnamespacedefinition(self):
localctx = CPP14Parser.OriginalnamespacedefinitionContext(self, self._ctx, self.state)
self.enterRule(localctx, 190, self.RULE_originalnamespacedefinition)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1450
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Inline:
self.state = 1449
self.match(CPP14Parser.Inline)
self.state = 1452
self.match(CPP14Parser.Namespace)
self.state = 1453
self.match(CPP14Parser.Identifier)
self.state = 1454
self.match(CPP14Parser.LeftBrace)
self.state = 1455
self.namespacebody()
self.state = 1456
self.match(CPP14Parser.RightBrace)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExtensionnamespacedefinitionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Namespace(self):
return self.getToken(CPP14Parser.Namespace, 0)
def originalnamespacename(self):
return self.getTypedRuleContext(CPP14Parser.OriginalnamespacenameContext,0)
def LeftBrace(self):
return self.getToken(CPP14Parser.LeftBrace, 0)
def namespacebody(self):
return self.getTypedRuleContext(CPP14Parser.NamespacebodyContext,0)
def RightBrace(self):
return self.getToken(CPP14Parser.RightBrace, 0)
def Inline(self):
return self.getToken(CPP14Parser.Inline, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_extensionnamespacedefinition
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExtensionnamespacedefinition" ):
listener.enterExtensionnamespacedefinition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExtensionnamespacedefinition" ):
listener.exitExtensionnamespacedefinition(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExtensionnamespacedefinition" ):
return visitor.visitExtensionnamespacedefinition(self)
else:
return visitor.visitChildren(self)
def extensionnamespacedefinition(self):
localctx = CPP14Parser.ExtensionnamespacedefinitionContext(self, self._ctx, self.state)
self.enterRule(localctx, 192, self.RULE_extensionnamespacedefinition)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1459
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Inline:
self.state = 1458
self.match(CPP14Parser.Inline)
self.state = 1461
self.match(CPP14Parser.Namespace)
self.state = 1462
self.originalnamespacename()
self.state = 1463
self.match(CPP14Parser.LeftBrace)
self.state = 1464
self.namespacebody()
self.state = 1465
self.match(CPP14Parser.RightBrace)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UnnamednamespacedefinitionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Namespace(self):
return self.getToken(CPP14Parser.Namespace, 0)
def LeftBrace(self):
return self.getToken(CPP14Parser.LeftBrace, 0)
def namespacebody(self):
return self.getTypedRuleContext(CPP14Parser.NamespacebodyContext,0)
def RightBrace(self):
return self.getToken(CPP14Parser.RightBrace, 0)
def Inline(self):
return self.getToken(CPP14Parser.Inline, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_unnamednamespacedefinition
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUnnamednamespacedefinition" ):
listener.enterUnnamednamespacedefinition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUnnamednamespacedefinition" ):
listener.exitUnnamednamespacedefinition(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitUnnamednamespacedefinition" ):
return visitor.visitUnnamednamespacedefinition(self)
else:
return visitor.visitChildren(self)
def unnamednamespacedefinition(self):
localctx = CPP14Parser.UnnamednamespacedefinitionContext(self, self._ctx, self.state)
self.enterRule(localctx, 194, self.RULE_unnamednamespacedefinition)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1468
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Inline:
self.state = 1467
self.match(CPP14Parser.Inline)
self.state = 1470
self.match(CPP14Parser.Namespace)
self.state = 1471
self.match(CPP14Parser.LeftBrace)
self.state = 1472
self.namespacebody()
self.state = 1473
self.match(CPP14Parser.RightBrace)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NamespacebodyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarationseq(self):
return self.getTypedRuleContext(CPP14Parser.DeclarationseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_namespacebody
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNamespacebody" ):
listener.enterNamespacebody(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNamespacebody" ):
listener.exitNamespacebody(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNamespacebody" ):
return visitor.visitNamespacebody(self)
else:
return visitor.visitChildren(self)
def namespacebody(self):
localctx = CPP14Parser.NamespacebodyContext(self, self._ctx, self.state)
self.enterRule(localctx, 196, self.RULE_namespacebody)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1476
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__2) | (1 << CPP14Parser.Alignas) | (1 << CPP14Parser.Asm) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Class) | (1 << CPP14Parser.Const) | (1 << CPP14Parser.Constexpr) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Enum) | (1 << CPP14Parser.Explicit) | (1 << CPP14Parser.Extern) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Friend) | (1 << CPP14Parser.Inline) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.Mutable) | (1 << CPP14Parser.Namespace) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Register) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Static) | (1 << CPP14Parser.Static_assert))) != 0) or ((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (CPP14Parser.Struct - 65)) | (1 << (CPP14Parser.Template - 65)) | (1 << (CPP14Parser.Thread_local - 65)) | (1 << (CPP14Parser.Typedef - 65)) | (1 << (CPP14Parser.Typename_ - 65)) | (1 << (CPP14Parser.Union - 65)) | (1 << (CPP14Parser.Unsigned - 65)) | (1 << (CPP14Parser.Using - 65)) | (1 << (CPP14Parser.Virtual - 65)) | (1 << (CPP14Parser.Void - 65)) | (1 << (CPP14Parser.Volatile - 65)) | (1 << (CPP14Parser.Wchar - 65)) | (1 << (CPP14Parser.LeftParen - 65)) | (1 << (CPP14Parser.LeftBracket - 65)) | (1 << (CPP14Parser.Star - 65)) | (1 << (CPP14Parser.And - 65)) | (1 << (CPP14Parser.Tilde - 65)) | (1 << (CPP14Parser.Doublecolon - 65)))) != 0) or ((((_la - 129)) & ~0x3f) == 0 and ((1 << (_la - 129)) & ((1 << (CPP14Parser.Semi - 129)) | (1 << (CPP14Parser.Ellipsis - 129)) | (1 << (CPP14Parser.Identifier - 129)))) != 0):
self.state = 1475
self.declarationseq(0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NamespacealiasContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_namespacealias
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNamespacealias" ):
listener.enterNamespacealias(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNamespacealias" ):
listener.exitNamespacealias(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNamespacealias" ):
return visitor.visitNamespacealias(self)
else:
return visitor.visitChildren(self)
def namespacealias(self):
localctx = CPP14Parser.NamespacealiasContext(self, self._ctx, self.state)
self.enterRule(localctx, 198, self.RULE_namespacealias)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1478
self.match(CPP14Parser.Identifier)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NamespacealiasdefinitionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Namespace(self):
return self.getToken(CPP14Parser.Namespace, 0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def Assign(self):
return self.getToken(CPP14Parser.Assign, 0)
def qualifiednamespacespecifier(self):
return self.getTypedRuleContext(CPP14Parser.QualifiednamespacespecifierContext,0)
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_namespacealiasdefinition
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNamespacealiasdefinition" ):
listener.enterNamespacealiasdefinition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNamespacealiasdefinition" ):
listener.exitNamespacealiasdefinition(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNamespacealiasdefinition" ):
return visitor.visitNamespacealiasdefinition(self)
else:
return visitor.visitChildren(self)
def namespacealiasdefinition(self):
localctx = CPP14Parser.NamespacealiasdefinitionContext(self, self._ctx, self.state)
self.enterRule(localctx, 200, self.RULE_namespacealiasdefinition)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1480
self.match(CPP14Parser.Namespace)
self.state = 1481
self.match(CPP14Parser.Identifier)
self.state = 1482
self.match(CPP14Parser.Assign)
self.state = 1483
self.qualifiednamespacespecifier()
self.state = 1484
self.match(CPP14Parser.Semi)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class QualifiednamespacespecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def namespacename(self):
return self.getTypedRuleContext(CPP14Parser.NamespacenameContext,0)
def nestednamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.NestednamespecifierContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_qualifiednamespacespecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterQualifiednamespacespecifier" ):
listener.enterQualifiednamespacespecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitQualifiednamespacespecifier" ):
listener.exitQualifiednamespacespecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitQualifiednamespacespecifier" ):
return visitor.visitQualifiednamespacespecifier(self)
else:
return visitor.visitChildren(self)
def qualifiednamespacespecifier(self):
localctx = CPP14Parser.QualifiednamespacespecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 202, self.RULE_qualifiednamespacespecifier)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1487
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,146,self._ctx)
if la_ == 1:
self.state = 1486
self.nestednamespecifier(0)
self.state = 1489
self.namespacename()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UsingdeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Using(self):
return self.getToken(CPP14Parser.Using, 0)
def nestednamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.NestednamespecifierContext,0)
def unqualifiedid(self):
return self.getTypedRuleContext(CPP14Parser.UnqualifiedidContext,0)
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def Typename_(self):
return self.getToken(CPP14Parser.Typename_, 0)
def Doublecolon(self):
return self.getToken(CPP14Parser.Doublecolon, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_usingdeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUsingdeclaration" ):
listener.enterUsingdeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUsingdeclaration" ):
listener.exitUsingdeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitUsingdeclaration" ):
return visitor.visitUsingdeclaration(self)
else:
return visitor.visitChildren(self)
def usingdeclaration(self):
localctx = CPP14Parser.UsingdeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 204, self.RULE_usingdeclaration)
self._la = 0 # Token type
try:
self.state = 1504
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,148,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1491
self.match(CPP14Parser.Using)
self.state = 1493
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Typename_:
self.state = 1492
self.match(CPP14Parser.Typename_)
self.state = 1495
self.nestednamespecifier(0)
self.state = 1496
self.unqualifiedid()
self.state = 1497
self.match(CPP14Parser.Semi)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1499
self.match(CPP14Parser.Using)
self.state = 1500
self.match(CPP14Parser.Doublecolon)
self.state = 1501
self.unqualifiedid()
self.state = 1502
self.match(CPP14Parser.Semi)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UsingdirectiveContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Using(self):
return self.getToken(CPP14Parser.Using, 0)
def Namespace(self):
return self.getToken(CPP14Parser.Namespace, 0)
def namespacename(self):
return self.getTypedRuleContext(CPP14Parser.NamespacenameContext,0)
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def nestednamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.NestednamespecifierContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_usingdirective
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUsingdirective" ):
listener.enterUsingdirective(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUsingdirective" ):
listener.exitUsingdirective(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitUsingdirective" ):
return visitor.visitUsingdirective(self)
else:
return visitor.visitChildren(self)
def usingdirective(self):
localctx = CPP14Parser.UsingdirectiveContext(self, self._ctx, self.state)
self.enterRule(localctx, 206, self.RULE_usingdirective)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1507
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1506
self.attributespecifierseq(0)
self.state = 1509
self.match(CPP14Parser.Using)
self.state = 1510
self.match(CPP14Parser.Namespace)
self.state = 1512
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,150,self._ctx)
if la_ == 1:
self.state = 1511
self.nestednamespecifier(0)
self.state = 1514
self.namespacename()
self.state = 1515
self.match(CPP14Parser.Semi)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AsmdefinitionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Asm(self):
return self.getToken(CPP14Parser.Asm, 0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def Stringliteral(self):
return self.getToken(CPP14Parser.Stringliteral, 0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_asmdefinition
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAsmdefinition" ):
listener.enterAsmdefinition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAsmdefinition" ):
listener.exitAsmdefinition(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAsmdefinition" ):
return visitor.visitAsmdefinition(self)
else:
return visitor.visitChildren(self)
def asmdefinition(self):
localctx = CPP14Parser.AsmdefinitionContext(self, self._ctx, self.state)
self.enterRule(localctx, 208, self.RULE_asmdefinition)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1517
self.match(CPP14Parser.Asm)
self.state = 1518
self.match(CPP14Parser.LeftParen)
self.state = 1519
self.match(CPP14Parser.Stringliteral)
self.state = 1520
self.match(CPP14Parser.RightParen)
self.state = 1521
self.match(CPP14Parser.Semi)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LinkagespecificationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Extern(self):
return self.getToken(CPP14Parser.Extern, 0)
def Stringliteral(self):
return self.getToken(CPP14Parser.Stringliteral, 0)
def LeftBrace(self):
return self.getToken(CPP14Parser.LeftBrace, 0)
def RightBrace(self):
return self.getToken(CPP14Parser.RightBrace, 0)
def declarationseq(self):
return self.getTypedRuleContext(CPP14Parser.DeclarationseqContext,0)
def declaration(self):
return self.getTypedRuleContext(CPP14Parser.DeclarationContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_linkagespecification
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLinkagespecification" ):
listener.enterLinkagespecification(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLinkagespecification" ):
listener.exitLinkagespecification(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLinkagespecification" ):
return visitor.visitLinkagespecification(self)
else:
return visitor.visitChildren(self)
def linkagespecification(self):
localctx = CPP14Parser.LinkagespecificationContext(self, self._ctx, self.state)
self.enterRule(localctx, 210, self.RULE_linkagespecification)
self._la = 0 # Token type
try:
self.state = 1533
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,152,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1523
self.match(CPP14Parser.Extern)
self.state = 1524
self.match(CPP14Parser.Stringliteral)
self.state = 1525
self.match(CPP14Parser.LeftBrace)
self.state = 1527
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__2) | (1 << CPP14Parser.Alignas) | (1 << CPP14Parser.Asm) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Class) | (1 << CPP14Parser.Const) | (1 << CPP14Parser.Constexpr) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Enum) | (1 << CPP14Parser.Explicit) | (1 << CPP14Parser.Extern) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Friend) | (1 << CPP14Parser.Inline) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.Mutable) | (1 << CPP14Parser.Namespace) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Register) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Static) | (1 << CPP14Parser.Static_assert))) != 0) or ((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (CPP14Parser.Struct - 65)) | (1 << (CPP14Parser.Template - 65)) | (1 << (CPP14Parser.Thread_local - 65)) | (1 << (CPP14Parser.Typedef - 65)) | (1 << (CPP14Parser.Typename_ - 65)) | (1 << (CPP14Parser.Union - 65)) | (1 << (CPP14Parser.Unsigned - 65)) | (1 << (CPP14Parser.Using - 65)) | (1 << (CPP14Parser.Virtual - 65)) | (1 << (CPP14Parser.Void - 65)) | (1 << (CPP14Parser.Volatile - 65)) | (1 << (CPP14Parser.Wchar - 65)) | (1 << (CPP14Parser.LeftParen - 65)) | (1 << (CPP14Parser.LeftBracket - 65)) | (1 << (CPP14Parser.Star - 65)) | (1 << (CPP14Parser.And - 65)) | (1 << (CPP14Parser.Tilde - 65)) | (1 << (CPP14Parser.Doublecolon - 65)))) != 0) or ((((_la - 129)) & ~0x3f) == 0 and ((1 << (_la - 129)) & ((1 << (CPP14Parser.Semi - 129)) | (1 << (CPP14Parser.Ellipsis - 129)) | (1 << (CPP14Parser.Identifier - 129)))) != 0):
self.state = 1526
self.declarationseq(0)
self.state = 1529
self.match(CPP14Parser.RightBrace)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1530
self.match(CPP14Parser.Extern)
self.state = 1531
self.match(CPP14Parser.Stringliteral)
self.state = 1532
self.declaration()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AttributespecifierseqContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def attributespecifier(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_attributespecifierseq
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAttributespecifierseq" ):
listener.enterAttributespecifierseq(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAttributespecifierseq" ):
listener.exitAttributespecifierseq(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAttributespecifierseq" ):
return visitor.visitAttributespecifierseq(self)
else:
return visitor.visitChildren(self)
def attributespecifierseq(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.AttributespecifierseqContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 212
self.enterRecursionRule(localctx, 212, self.RULE_attributespecifierseq, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1536
self.attributespecifier()
self._ctx.stop = self._input.LT(-1)
self.state = 1542
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,153,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.AttributespecifierseqContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_attributespecifierseq)
self.state = 1538
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 1539
self.attributespecifier()
self.state = 1544
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,153,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class AttributespecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LeftBracket(self, i:int=None):
if i is None:
return self.getTokens(CPP14Parser.LeftBracket)
else:
return self.getToken(CPP14Parser.LeftBracket, i)
def attributelist(self):
return self.getTypedRuleContext(CPP14Parser.AttributelistContext,0)
def RightBracket(self, i:int=None):
if i is None:
return self.getTokens(CPP14Parser.RightBracket)
else:
return self.getToken(CPP14Parser.RightBracket, i)
def alignmentspecifier(self):
return self.getTypedRuleContext(CPP14Parser.AlignmentspecifierContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_attributespecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAttributespecifier" ):
listener.enterAttributespecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAttributespecifier" ):
listener.exitAttributespecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAttributespecifier" ):
return visitor.visitAttributespecifier(self)
else:
return visitor.visitChildren(self)
def attributespecifier(self):
localctx = CPP14Parser.AttributespecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 214, self.RULE_attributespecifier)
try:
self.state = 1552
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.LeftBracket]:
self.enterOuterAlt(localctx, 1)
self.state = 1545
self.match(CPP14Parser.LeftBracket)
self.state = 1546
self.match(CPP14Parser.LeftBracket)
self.state = 1547
self.attributelist(0)
self.state = 1548
self.match(CPP14Parser.RightBracket)
self.state = 1549
self.match(CPP14Parser.RightBracket)
pass
elif token in [CPP14Parser.Alignas]:
self.enterOuterAlt(localctx, 2)
self.state = 1551
self.alignmentspecifier()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AlignmentspecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Alignas(self):
return self.getToken(CPP14Parser.Alignas, 0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def thetypeid(self):
return self.getTypedRuleContext(CPP14Parser.ThetypeidContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def constantexpression(self):
return self.getTypedRuleContext(CPP14Parser.ConstantexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_alignmentspecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAlignmentspecifier" ):
listener.enterAlignmentspecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAlignmentspecifier" ):
listener.exitAlignmentspecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAlignmentspecifier" ):
return visitor.visitAlignmentspecifier(self)
else:
return visitor.visitChildren(self)
def alignmentspecifier(self):
localctx = CPP14Parser.AlignmentspecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 216, self.RULE_alignmentspecifier)
self._la = 0 # Token type
try:
self.state = 1570
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,157,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1554
self.match(CPP14Parser.Alignas)
self.state = 1555
self.match(CPP14Parser.LeftParen)
self.state = 1556
self.thetypeid()
self.state = 1558
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Ellipsis:
self.state = 1557
self.match(CPP14Parser.Ellipsis)
self.state = 1560
self.match(CPP14Parser.RightParen)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1562
self.match(CPP14Parser.Alignas)
self.state = 1563
self.match(CPP14Parser.LeftParen)
self.state = 1564
self.constantexpression()
self.state = 1566
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Ellipsis:
self.state = 1565
self.match(CPP14Parser.Ellipsis)
self.state = 1568
self.match(CPP14Parser.RightParen)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AttributelistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def attribute(self):
return self.getTypedRuleContext(CPP14Parser.AttributeContext,0)
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def attributelist(self):
return self.getTypedRuleContext(CPP14Parser.AttributelistContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_attributelist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAttributelist" ):
listener.enterAttributelist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAttributelist" ):
listener.exitAttributelist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAttributelist" ):
return visitor.visitAttributelist(self)
else:
return visitor.visitChildren(self)
def attributelist(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.AttributelistContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 218
self.enterRecursionRule(localctx, 218, self.RULE_attributelist, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1579
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,159,self._ctx)
if la_ == 1:
self.state = 1574
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,158,self._ctx)
if la_ == 1:
self.state = 1573
self.attribute()
pass
elif la_ == 2:
self.state = 1576
self.attribute()
self.state = 1577
self.match(CPP14Parser.Ellipsis)
pass
self._ctx.stop = self._input.LT(-1)
self.state = 1593
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,162,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 1591
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,161,self._ctx)
if la_ == 1:
localctx = CPP14Parser.AttributelistContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_attributelist)
self.state = 1581
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 1582
self.match(CPP14Parser.Comma)
self.state = 1584
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,160,self._ctx)
if la_ == 1:
self.state = 1583
self.attribute()
pass
elif la_ == 2:
localctx = CPP14Parser.AttributelistContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_attributelist)
self.state = 1586
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 1587
self.match(CPP14Parser.Comma)
self.state = 1588
self.attribute()
self.state = 1589
self.match(CPP14Parser.Ellipsis)
pass
self.state = 1595
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,162,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class AttributeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def attributetoken(self):
return self.getTypedRuleContext(CPP14Parser.AttributetokenContext,0)
def attributeargumentclause(self):
return self.getTypedRuleContext(CPP14Parser.AttributeargumentclauseContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_attribute
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAttribute" ):
listener.enterAttribute(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAttribute" ):
listener.exitAttribute(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAttribute" ):
return visitor.visitAttribute(self)
else:
return visitor.visitChildren(self)
def attribute(self):
localctx = CPP14Parser.AttributeContext(self, self._ctx, self.state)
self.enterRule(localctx, 220, self.RULE_attribute)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1596
self.attributetoken()
self.state = 1598
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,163,self._ctx)
if la_ == 1:
self.state = 1597
self.attributeargumentclause()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AttributetokenContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def attributescopedtoken(self):
return self.getTypedRuleContext(CPP14Parser.AttributescopedtokenContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_attributetoken
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAttributetoken" ):
listener.enterAttributetoken(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAttributetoken" ):
listener.exitAttributetoken(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAttributetoken" ):
return visitor.visitAttributetoken(self)
else:
return visitor.visitChildren(self)
def attributetoken(self):
localctx = CPP14Parser.AttributetokenContext(self, self._ctx, self.state)
self.enterRule(localctx, 222, self.RULE_attributetoken)
try:
self.state = 1602
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,164,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1600
self.match(CPP14Parser.Identifier)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1601
self.attributescopedtoken()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AttributescopedtokenContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def attributenamespace(self):
return self.getTypedRuleContext(CPP14Parser.AttributenamespaceContext,0)
def Doublecolon(self):
return self.getToken(CPP14Parser.Doublecolon, 0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_attributescopedtoken
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAttributescopedtoken" ):
listener.enterAttributescopedtoken(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAttributescopedtoken" ):
listener.exitAttributescopedtoken(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAttributescopedtoken" ):
return visitor.visitAttributescopedtoken(self)
else:
return visitor.visitChildren(self)
def attributescopedtoken(self):
localctx = CPP14Parser.AttributescopedtokenContext(self, self._ctx, self.state)
self.enterRule(localctx, 224, self.RULE_attributescopedtoken)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1604
self.attributenamespace()
self.state = 1605
self.match(CPP14Parser.Doublecolon)
self.state = 1606
self.match(CPP14Parser.Identifier)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AttributenamespaceContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_attributenamespace
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAttributenamespace" ):
listener.enterAttributenamespace(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAttributenamespace" ):
listener.exitAttributenamespace(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAttributenamespace" ):
return visitor.visitAttributenamespace(self)
else:
return visitor.visitChildren(self)
def attributenamespace(self):
localctx = CPP14Parser.AttributenamespaceContext(self, self._ctx, self.state)
self.enterRule(localctx, 226, self.RULE_attributenamespace)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1608
self.match(CPP14Parser.Identifier)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AttributeargumentclauseContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def balancedtokenseq(self):
return self.getTypedRuleContext(CPP14Parser.BalancedtokenseqContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_attributeargumentclause
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAttributeargumentclause" ):
listener.enterAttributeargumentclause(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAttributeargumentclause" ):
listener.exitAttributeargumentclause(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAttributeargumentclause" ):
return visitor.visitAttributeargumentclause(self)
else:
return visitor.visitChildren(self)
def attributeargumentclause(self):
localctx = CPP14Parser.AttributeargumentclauseContext(self, self._ctx, self.state)
self.enterRule(localctx, 228, self.RULE_attributeargumentclause)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1610
self.match(CPP14Parser.LeftParen)
self.state = 1611
self.balancedtokenseq(0)
self.state = 1612
self.match(CPP14Parser.RightParen)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BalancedtokenseqContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def balancedtoken(self):
return self.getTypedRuleContext(CPP14Parser.BalancedtokenContext,0)
def balancedtokenseq(self):
return self.getTypedRuleContext(CPP14Parser.BalancedtokenseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_balancedtokenseq
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBalancedtokenseq" ):
listener.enterBalancedtokenseq(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBalancedtokenseq" ):
listener.exitBalancedtokenseq(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBalancedtokenseq" ):
return visitor.visitBalancedtokenseq(self)
else:
return visitor.visitChildren(self)
def balancedtokenseq(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.BalancedtokenseqContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 230
self.enterRecursionRule(localctx, 230, self.RULE_balancedtokenseq, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1616
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,165,self._ctx)
if la_ == 1:
self.state = 1615
self.balancedtoken()
self._ctx.stop = self._input.LT(-1)
self.state = 1622
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,166,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.BalancedtokenseqContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_balancedtokenseq)
self.state = 1618
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 1619
self.balancedtoken()
self.state = 1624
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,166,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class BalancedtokenContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LeftParen(self, i:int=None):
if i is None:
return self.getTokens(CPP14Parser.LeftParen)
else:
return self.getToken(CPP14Parser.LeftParen, i)
def balancedtokenseq(self):
return self.getTypedRuleContext(CPP14Parser.BalancedtokenseqContext,0)
def RightParen(self, i:int=None):
if i is None:
return self.getTokens(CPP14Parser.RightParen)
else:
return self.getToken(CPP14Parser.RightParen, i)
def LeftBracket(self, i:int=None):
if i is None:
return self.getTokens(CPP14Parser.LeftBracket)
else:
return self.getToken(CPP14Parser.LeftBracket, i)
def RightBracket(self, i:int=None):
if i is None:
return self.getTokens(CPP14Parser.RightBracket)
else:
return self.getToken(CPP14Parser.RightBracket, i)
def LeftBrace(self, i:int=None):
if i is None:
return self.getTokens(CPP14Parser.LeftBrace)
else:
return self.getToken(CPP14Parser.LeftBrace, i)
def RightBrace(self, i:int=None):
if i is None:
return self.getTokens(CPP14Parser.RightBrace)
else:
return self.getToken(CPP14Parser.RightBrace, i)
def getRuleIndex(self):
return CPP14Parser.RULE_balancedtoken
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBalancedtoken" ):
listener.enterBalancedtoken(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBalancedtoken" ):
listener.exitBalancedtoken(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBalancedtoken" ):
return visitor.visitBalancedtoken(self)
else:
return visitor.visitChildren(self)
def balancedtoken(self):
localctx = CPP14Parser.BalancedtokenContext(self, self._ctx, self.state)
self.enterRule(localctx, 232, self.RULE_balancedtoken)
self._la = 0 # Token type
try:
self.state = 1642
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.LeftParen]:
self.enterOuterAlt(localctx, 1)
self.state = 1625
self.match(CPP14Parser.LeftParen)
self.state = 1626
self.balancedtokenseq(0)
self.state = 1627
self.match(CPP14Parser.RightParen)
pass
elif token in [CPP14Parser.LeftBracket]:
self.enterOuterAlt(localctx, 2)
self.state = 1629
self.match(CPP14Parser.LeftBracket)
self.state = 1630
self.balancedtokenseq(0)
self.state = 1631
self.match(CPP14Parser.RightBracket)
pass
elif token in [CPP14Parser.LeftBrace]:
self.enterOuterAlt(localctx, 3)
self.state = 1633
self.match(CPP14Parser.LeftBrace)
self.state = 1634
self.balancedtokenseq(0)
self.state = 1635
self.match(CPP14Parser.RightBrace)
pass
elif token in [CPP14Parser.T__0, CPP14Parser.T__1, CPP14Parser.T__2, CPP14Parser.T__3, CPP14Parser.T__4, CPP14Parser.T__5, CPP14Parser.MultiLineMacro, CPP14Parser.Directive, CPP14Parser.Alignas, CPP14Parser.Alignof, CPP14Parser.Asm, CPP14Parser.Auto, CPP14Parser.Bool, CPP14Parser.Break, CPP14Parser.Case, CPP14Parser.Catch, CPP14Parser.Char, CPP14Parser.Char16, CPP14Parser.Char32, CPP14Parser.Class, CPP14Parser.Const, CPP14Parser.Constexpr, CPP14Parser.Const_cast, CPP14Parser.Continue, CPP14Parser.Decltype, CPP14Parser.Default, CPP14Parser.Delete, CPP14Parser.Do, CPP14Parser.Double, CPP14Parser.Dynamic_cast, CPP14Parser.Else, CPP14Parser.Enum, CPP14Parser.Explicit, CPP14Parser.Export, CPP14Parser.Extern, CPP14Parser.BFalse, CPP14Parser.Final, CPP14Parser.Float, CPP14Parser.For, CPP14Parser.Friend, CPP14Parser.Goto, CPP14Parser.If, CPP14Parser.Inline, CPP14Parser.Int, CPP14Parser.Long, CPP14Parser.Mutable, CPP14Parser.Namespace, CPP14Parser.New, CPP14Parser.Noexcept, CPP14Parser.Nullptr, CPP14Parser.Operator, CPP14Parser.Override, CPP14Parser.Private, CPP14Parser.Protected, CPP14Parser.Public, CPP14Parser.Register, CPP14Parser.Reinterpret_cast, CPP14Parser.Return, CPP14Parser.Short, CPP14Parser.Signed, CPP14Parser.Sizeof, CPP14Parser.Static, CPP14Parser.Static_assert, CPP14Parser.Static_cast, CPP14Parser.Struct, CPP14Parser.Switch, CPP14Parser.Template, CPP14Parser.This, CPP14Parser.Thread_local, CPP14Parser.Throw, CPP14Parser.BTrue, CPP14Parser.Try, CPP14Parser.Typedef, CPP14Parser.Typeid_, CPP14Parser.Typename_, CPP14Parser.Union, CPP14Parser.Unsigned, CPP14Parser.Using, CPP14Parser.Virtual, CPP14Parser.Void, CPP14Parser.Volatile, CPP14Parser.Wchar, CPP14Parser.While, CPP14Parser.Plus, CPP14Parser.Minus, CPP14Parser.Star, CPP14Parser.Div, CPP14Parser.Mod, CPP14Parser.Caret, CPP14Parser.And, CPP14Parser.Or, CPP14Parser.Tilde, CPP14Parser.Not, CPP14Parser.Assign, CPP14Parser.Less, CPP14Parser.Greater, CPP14Parser.PlusAssign, CPP14Parser.MinusAssign, CPP14Parser.StarAssign, CPP14Parser.DivAssign, CPP14Parser.ModAssign, CPP14Parser.XorAssign, CPP14Parser.AndAssign, CPP14Parser.OrAssign, CPP14Parser.LeftShift, CPP14Parser.RightShift, CPP14Parser.LeftShiftAssign, CPP14Parser.RightShiftAssign, CPP14Parser.Equal, CPP14Parser.NotEqual, CPP14Parser.LessEqual, CPP14Parser.GreaterEqual, CPP14Parser.AndAnd, CPP14Parser.OrOr, CPP14Parser.PlusPlus, CPP14Parser.MinusMinus, CPP14Parser.Comma, CPP14Parser.ArrowStar, CPP14Parser.Arrow, CPP14Parser.Question, CPP14Parser.Colon, CPP14Parser.Doublecolon, CPP14Parser.Semi, CPP14Parser.Dot, CPP14Parser.DotStar, CPP14Parser.Ellipsis, CPP14Parser.Identifier, CPP14Parser.Integerliteral, CPP14Parser.Decimalliteral, CPP14Parser.Octalliteral, CPP14Parser.Hexadecimalliteral, CPP14Parser.Binaryliteral, CPP14Parser.Integersuffix, CPP14Parser.Characterliteral, CPP14Parser.Floatingliteral, CPP14Parser.Stringliteral, CPP14Parser.Userdefinedintegerliteral, CPP14Parser.Userdefinedfloatingliteral, CPP14Parser.Userdefinedstringliteral, CPP14Parser.Userdefinedcharacterliteral, CPP14Parser.Whitespace, CPP14Parser.Newline, CPP14Parser.BlockComment, CPP14Parser.LineComment]:
self.enterOuterAlt(localctx, 4)
self.state = 1638
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 1637
_la = self._input.LA(1)
if _la <= 0 or ((((_la - 84)) & ~0x3f) == 0 and ((1 << (_la - 84)) & ((1 << (CPP14Parser.LeftParen - 84)) | (1 << (CPP14Parser.RightParen - 84)) | (1 << (CPP14Parser.LeftBracket - 84)) | (1 << (CPP14Parser.RightBracket - 84)) | (1 << (CPP14Parser.LeftBrace - 84)) | (1 << (CPP14Parser.RightBrace - 84)))) != 0):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
else:
raise NoViableAltException(self)
self.state = 1640
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,167,self._ctx)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InitdeclaratorlistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def initdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.InitdeclaratorContext,0)
def initdeclaratorlist(self):
return self.getTypedRuleContext(CPP14Parser.InitdeclaratorlistContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_initdeclaratorlist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInitdeclaratorlist" ):
listener.enterInitdeclaratorlist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInitdeclaratorlist" ):
listener.exitInitdeclaratorlist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInitdeclaratorlist" ):
return visitor.visitInitdeclaratorlist(self)
else:
return visitor.visitChildren(self)
def initdeclaratorlist(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.InitdeclaratorlistContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 234
self.enterRecursionRule(localctx, 234, self.RULE_initdeclaratorlist, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1645
self.initdeclarator()
self._ctx.stop = self._input.LT(-1)
self.state = 1652
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,169,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.InitdeclaratorlistContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_initdeclaratorlist)
self.state = 1647
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 1648
self.match(CPP14Parser.Comma)
self.state = 1649
self.initdeclarator()
self.state = 1654
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,169,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class InitdeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator(self):
return self.getTypedRuleContext(CPP14Parser.DeclaratorContext,0)
def initializer(self):
return self.getTypedRuleContext(CPP14Parser.InitializerContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_initdeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInitdeclarator" ):
listener.enterInitdeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInitdeclarator" ):
listener.exitInitdeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInitdeclarator" ):
return visitor.visitInitdeclarator(self)
else:
return visitor.visitChildren(self)
def initdeclarator(self):
localctx = CPP14Parser.InitdeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 236, self.RULE_initdeclarator)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1655
self.declarator()
self.state = 1657
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,170,self._ctx)
if la_ == 1:
self.state = 1656
self.initializer()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ptrdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.PtrdeclaratorContext,0)
def noptrdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.NoptrdeclaratorContext,0)
def parametersandqualifiers(self):
return self.getTypedRuleContext(CPP14Parser.ParametersandqualifiersContext,0)
def trailingreturntype(self):
return self.getTypedRuleContext(CPP14Parser.TrailingreturntypeContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_declarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclarator" ):
listener.enterDeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclarator" ):
listener.exitDeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitDeclarator" ):
return visitor.visitDeclarator(self)
else:
return visitor.visitChildren(self)
def declarator(self):
localctx = CPP14Parser.DeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 238, self.RULE_declarator)
try:
self.state = 1664
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,171,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1659
self.ptrdeclarator()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1660
self.noptrdeclarator(0)
self.state = 1661
self.parametersandqualifiers()
self.state = 1662
self.trailingreturntype()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PtrdeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def noptrdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.NoptrdeclaratorContext,0)
def ptroperator(self):
return self.getTypedRuleContext(CPP14Parser.PtroperatorContext,0)
def ptrdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.PtrdeclaratorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_ptrdeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPtrdeclarator" ):
listener.enterPtrdeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPtrdeclarator" ):
listener.exitPtrdeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitPtrdeclarator" ):
return visitor.visitPtrdeclarator(self)
else:
return visitor.visitChildren(self)
def ptrdeclarator(self):
localctx = CPP14Parser.PtrdeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 240, self.RULE_ptrdeclarator)
try:
self.state = 1670
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,172,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1666
self.noptrdeclarator(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1667
self.ptroperator()
self.state = 1668
self.ptrdeclarator()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NoptrdeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declaratorid(self):
return self.getTypedRuleContext(CPP14Parser.DeclaratoridContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def ptrdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.PtrdeclaratorContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def noptrdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.NoptrdeclaratorContext,0)
def parametersandqualifiers(self):
return self.getTypedRuleContext(CPP14Parser.ParametersandqualifiersContext,0)
def LeftBracket(self):
return self.getToken(CPP14Parser.LeftBracket, 0)
def RightBracket(self):
return self.getToken(CPP14Parser.RightBracket, 0)
def constantexpression(self):
return self.getTypedRuleContext(CPP14Parser.ConstantexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_noptrdeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNoptrdeclarator" ):
listener.enterNoptrdeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNoptrdeclarator" ):
listener.exitNoptrdeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNoptrdeclarator" ):
return visitor.visitNoptrdeclarator(self)
else:
return visitor.visitChildren(self)
def noptrdeclarator(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.NoptrdeclaratorContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 242
self.enterRecursionRule(localctx, 242, self.RULE_noptrdeclarator, _p)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1681
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.Decltype, CPP14Parser.Operator, CPP14Parser.Tilde, CPP14Parser.Doublecolon, CPP14Parser.Ellipsis, CPP14Parser.Identifier]:
self.state = 1673
self.declaratorid()
self.state = 1675
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,173,self._ctx)
if la_ == 1:
self.state = 1674
self.attributespecifierseq(0)
pass
elif token in [CPP14Parser.LeftParen]:
self.state = 1677
self.match(CPP14Parser.LeftParen)
self.state = 1678
self.ptrdeclarator()
self.state = 1679
self.match(CPP14Parser.RightParen)
pass
else:
raise NoViableAltException(self)
self._ctx.stop = self._input.LT(-1)
self.state = 1696
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,178,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 1694
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,177,self._ctx)
if la_ == 1:
localctx = CPP14Parser.NoptrdeclaratorContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_noptrdeclarator)
self.state = 1683
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 1684
self.parametersandqualifiers()
pass
elif la_ == 2:
localctx = CPP14Parser.NoptrdeclaratorContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_noptrdeclarator)
self.state = 1685
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 1686
self.match(CPP14Parser.LeftBracket)
self.state = 1688
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 1687
self.constantexpression()
self.state = 1690
self.match(CPP14Parser.RightBracket)
self.state = 1692
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,176,self._ctx)
if la_ == 1:
self.state = 1691
self.attributespecifierseq(0)
pass
self.state = 1698
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,178,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class ParametersandqualifiersContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def parameterdeclarationclause(self):
return self.getTypedRuleContext(CPP14Parser.ParameterdeclarationclauseContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def cvqualifierseq(self):
return self.getTypedRuleContext(CPP14Parser.CvqualifierseqContext,0)
def refqualifier(self):
return self.getTypedRuleContext(CPP14Parser.RefqualifierContext,0)
def exceptionspecification(self):
return self.getTypedRuleContext(CPP14Parser.ExceptionspecificationContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_parametersandqualifiers
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParametersandqualifiers" ):
listener.enterParametersandqualifiers(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParametersandqualifiers" ):
listener.exitParametersandqualifiers(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitParametersandqualifiers" ):
return visitor.visitParametersandqualifiers(self)
else:
return visitor.visitChildren(self)
def parametersandqualifiers(self):
localctx = CPP14Parser.ParametersandqualifiersContext(self, self._ctx, self.state)
self.enterRule(localctx, 244, self.RULE_parametersandqualifiers)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1699
self.match(CPP14Parser.LeftParen)
self.state = 1700
self.parameterdeclarationclause()
self.state = 1701
self.match(CPP14Parser.RightParen)
self.state = 1703
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,179,self._ctx)
if la_ == 1:
self.state = 1702
self.cvqualifierseq()
self.state = 1706
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,180,self._ctx)
if la_ == 1:
self.state = 1705
self.refqualifier()
self.state = 1709
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,181,self._ctx)
if la_ == 1:
self.state = 1708
self.exceptionspecification()
self.state = 1712
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,182,self._ctx)
if la_ == 1:
self.state = 1711
self.attributespecifierseq(0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TrailingreturntypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Arrow(self):
return self.getToken(CPP14Parser.Arrow, 0)
def trailingtypespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.TrailingtypespecifierseqContext,0)
def abstractdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.AbstractdeclaratorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_trailingreturntype
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTrailingreturntype" ):
listener.enterTrailingreturntype(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTrailingreturntype" ):
listener.exitTrailingreturntype(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTrailingreturntype" ):
return visitor.visitTrailingreturntype(self)
else:
return visitor.visitChildren(self)
def trailingreturntype(self):
localctx = CPP14Parser.TrailingreturntypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 246, self.RULE_trailingreturntype)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1714
self.match(CPP14Parser.Arrow)
self.state = 1715
self.trailingtypespecifierseq()
self.state = 1717
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,183,self._ctx)
if la_ == 1:
self.state = 1716
self.abstractdeclarator()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PtroperatorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Star(self):
return self.getToken(CPP14Parser.Star, 0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def cvqualifierseq(self):
return self.getTypedRuleContext(CPP14Parser.CvqualifierseqContext,0)
def And(self):
return self.getToken(CPP14Parser.And, 0)
def nestednamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.NestednamespecifierContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_ptroperator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPtroperator" ):
listener.enterPtroperator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPtroperator" ):
listener.exitPtroperator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitPtroperator" ):
return visitor.visitPtroperator(self)
else:
return visitor.visitChildren(self)
def ptroperator(self):
localctx = CPP14Parser.PtroperatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 248, self.RULE_ptroperator)
try:
self.state = 1742
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.Star]:
self.enterOuterAlt(localctx, 1)
self.state = 1719
self.match(CPP14Parser.Star)
self.state = 1721
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,184,self._ctx)
if la_ == 1:
self.state = 1720
self.attributespecifierseq(0)
self.state = 1724
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,185,self._ctx)
if la_ == 1:
self.state = 1723
self.cvqualifierseq()
pass
elif token in [CPP14Parser.And]:
self.enterOuterAlt(localctx, 2)
self.state = 1726
self.match(CPP14Parser.And)
self.state = 1728
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,186,self._ctx)
if la_ == 1:
self.state = 1727
self.attributespecifierseq(0)
pass
elif token in [CPP14Parser.T__2]:
self.enterOuterAlt(localctx, 3)
self.state = 1730
self.match(CPP14Parser.T__2)
self.state = 1732
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,187,self._ctx)
if la_ == 1:
self.state = 1731
self.attributespecifierseq(0)
pass
elif token in [CPP14Parser.Decltype, CPP14Parser.Doublecolon, CPP14Parser.Identifier]:
self.enterOuterAlt(localctx, 4)
self.state = 1734
self.nestednamespecifier(0)
self.state = 1735
self.match(CPP14Parser.Star)
self.state = 1737
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,188,self._ctx)
if la_ == 1:
self.state = 1736
self.attributespecifierseq(0)
self.state = 1740
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,189,self._ctx)
if la_ == 1:
self.state = 1739
self.cvqualifierseq()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CvqualifierseqContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def cvqualifier(self):
return self.getTypedRuleContext(CPP14Parser.CvqualifierContext,0)
def cvqualifierseq(self):
return self.getTypedRuleContext(CPP14Parser.CvqualifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_cvqualifierseq
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCvqualifierseq" ):
listener.enterCvqualifierseq(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCvqualifierseq" ):
listener.exitCvqualifierseq(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCvqualifierseq" ):
return visitor.visitCvqualifierseq(self)
else:
return visitor.visitChildren(self)
def cvqualifierseq(self):
localctx = CPP14Parser.CvqualifierseqContext(self, self._ctx, self.state)
self.enterRule(localctx, 250, self.RULE_cvqualifierseq)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1744
self.cvqualifier()
self.state = 1746
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,191,self._ctx)
if la_ == 1:
self.state = 1745
self.cvqualifierseq()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CvqualifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Const(self):
return self.getToken(CPP14Parser.Const, 0)
def Volatile(self):
return self.getToken(CPP14Parser.Volatile, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_cvqualifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCvqualifier" ):
listener.enterCvqualifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCvqualifier" ):
listener.exitCvqualifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCvqualifier" ):
return visitor.visitCvqualifier(self)
else:
return visitor.visitChildren(self)
def cvqualifier(self):
localctx = CPP14Parser.CvqualifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 252, self.RULE_cvqualifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1748
_la = self._input.LA(1)
if not(_la==CPP14Parser.Const or _la==CPP14Parser.Volatile):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class RefqualifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def And(self):
return self.getToken(CPP14Parser.And, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_refqualifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterRefqualifier" ):
listener.enterRefqualifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitRefqualifier" ):
listener.exitRefqualifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitRefqualifier" ):
return visitor.visitRefqualifier(self)
else:
return visitor.visitChildren(self)
def refqualifier(self):
localctx = CPP14Parser.RefqualifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 254, self.RULE_refqualifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1750
_la = self._input.LA(1)
if not(_la==CPP14Parser.T__2 or _la==CPP14Parser.And):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclaratoridContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def idexpression(self):
return self.getTypedRuleContext(CPP14Parser.IdexpressionContext,0)
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_declaratorid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclaratorid" ):
listener.enterDeclaratorid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclaratorid" ):
listener.exitDeclaratorid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitDeclaratorid" ):
return visitor.visitDeclaratorid(self)
else:
return visitor.visitChildren(self)
def declaratorid(self):
localctx = CPP14Parser.DeclaratoridContext(self, self._ctx, self.state)
self.enterRule(localctx, 256, self.RULE_declaratorid)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1753
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Ellipsis:
self.state = 1752
self.match(CPP14Parser.Ellipsis)
self.state = 1755
self.idexpression()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ThetypeidContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.TypespecifierseqContext,0)
def abstractdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.AbstractdeclaratorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_thetypeid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThetypeid" ):
listener.enterThetypeid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThetypeid" ):
listener.exitThetypeid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitThetypeid" ):
return visitor.visitThetypeid(self)
else:
return visitor.visitChildren(self)
def thetypeid(self):
localctx = CPP14Parser.ThetypeidContext(self, self._ctx, self.state)
self.enterRule(localctx, 258, self.RULE_thetypeid)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1757
self.typespecifierseq()
self.state = 1759
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,193,self._ctx)
if la_ == 1:
self.state = 1758
self.abstractdeclarator()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AbstractdeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ptrabstractdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.PtrabstractdeclaratorContext,0)
def parametersandqualifiers(self):
return self.getTypedRuleContext(CPP14Parser.ParametersandqualifiersContext,0)
def trailingreturntype(self):
return self.getTypedRuleContext(CPP14Parser.TrailingreturntypeContext,0)
def noptrabstractdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.NoptrabstractdeclaratorContext,0)
def abstractpackdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.AbstractpackdeclaratorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_abstractdeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAbstractdeclarator" ):
listener.enterAbstractdeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAbstractdeclarator" ):
listener.exitAbstractdeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAbstractdeclarator" ):
return visitor.visitAbstractdeclarator(self)
else:
return visitor.visitChildren(self)
def abstractdeclarator(self):
localctx = CPP14Parser.AbstractdeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 260, self.RULE_abstractdeclarator)
try:
self.state = 1769
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,195,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1761
self.ptrabstractdeclarator()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1763
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,194,self._ctx)
if la_ == 1:
self.state = 1762
self.noptrabstractdeclarator(0)
self.state = 1765
self.parametersandqualifiers()
self.state = 1766
self.trailingreturntype()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1768
self.abstractpackdeclarator()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PtrabstractdeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def noptrabstractdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.NoptrabstractdeclaratorContext,0)
def ptroperator(self):
return self.getTypedRuleContext(CPP14Parser.PtroperatorContext,0)
def ptrabstractdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.PtrabstractdeclaratorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_ptrabstractdeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPtrabstractdeclarator" ):
listener.enterPtrabstractdeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPtrabstractdeclarator" ):
listener.exitPtrabstractdeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitPtrabstractdeclarator" ):
return visitor.visitPtrabstractdeclarator(self)
else:
return visitor.visitChildren(self)
def ptrabstractdeclarator(self):
localctx = CPP14Parser.PtrabstractdeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 262, self.RULE_ptrabstractdeclarator)
try:
self.state = 1776
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.LeftParen, CPP14Parser.LeftBracket]:
self.enterOuterAlt(localctx, 1)
self.state = 1771
self.noptrabstractdeclarator(0)
pass
elif token in [CPP14Parser.T__2, CPP14Parser.Decltype, CPP14Parser.Star, CPP14Parser.And, CPP14Parser.Doublecolon, CPP14Parser.Identifier]:
self.enterOuterAlt(localctx, 2)
self.state = 1772
self.ptroperator()
self.state = 1774
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,196,self._ctx)
if la_ == 1:
self.state = 1773
self.ptrabstractdeclarator()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NoptrabstractdeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parametersandqualifiers(self):
return self.getTypedRuleContext(CPP14Parser.ParametersandqualifiersContext,0)
def LeftBracket(self):
return self.getToken(CPP14Parser.LeftBracket, 0)
def RightBracket(self):
return self.getToken(CPP14Parser.RightBracket, 0)
def constantexpression(self):
return self.getTypedRuleContext(CPP14Parser.ConstantexpressionContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def ptrabstractdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.PtrabstractdeclaratorContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def noptrabstractdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.NoptrabstractdeclaratorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_noptrabstractdeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNoptrabstractdeclarator" ):
listener.enterNoptrabstractdeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNoptrabstractdeclarator" ):
listener.exitNoptrabstractdeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNoptrabstractdeclarator" ):
return visitor.visitNoptrabstractdeclarator(self)
else:
return visitor.visitChildren(self)
def noptrabstractdeclarator(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.NoptrabstractdeclaratorContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 264
self.enterRecursionRule(localctx, 264, self.RULE_noptrabstractdeclarator, _p)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1792
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,200,self._ctx)
if la_ == 1:
self.state = 1779
self.parametersandqualifiers()
pass
elif la_ == 2:
self.state = 1780
self.match(CPP14Parser.LeftBracket)
self.state = 1782
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 1781
self.constantexpression()
self.state = 1784
self.match(CPP14Parser.RightBracket)
self.state = 1786
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,199,self._ctx)
if la_ == 1:
self.state = 1785
self.attributespecifierseq(0)
pass
elif la_ == 3:
self.state = 1788
self.match(CPP14Parser.LeftParen)
self.state = 1789
self.ptrabstractdeclarator()
self.state = 1790
self.match(CPP14Parser.RightParen)
pass
self._ctx.stop = self._input.LT(-1)
self.state = 1807
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,204,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 1805
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,203,self._ctx)
if la_ == 1:
localctx = CPP14Parser.NoptrabstractdeclaratorContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_noptrabstractdeclarator)
self.state = 1794
if not self.precpred(self._ctx, 5):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 5)")
self.state = 1795
self.parametersandqualifiers()
pass
elif la_ == 2:
localctx = CPP14Parser.NoptrabstractdeclaratorContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_noptrabstractdeclarator)
self.state = 1796
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 1797
self.match(CPP14Parser.LeftBracket)
self.state = 1799
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 1798
self.constantexpression()
self.state = 1801
self.match(CPP14Parser.RightBracket)
self.state = 1803
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,202,self._ctx)
if la_ == 1:
self.state = 1802
self.attributespecifierseq(0)
pass
self.state = 1809
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,204,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class AbstractpackdeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def noptrabstractpackdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.NoptrabstractpackdeclaratorContext,0)
def ptroperator(self):
return self.getTypedRuleContext(CPP14Parser.PtroperatorContext,0)
def abstractpackdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.AbstractpackdeclaratorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_abstractpackdeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAbstractpackdeclarator" ):
listener.enterAbstractpackdeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAbstractpackdeclarator" ):
listener.exitAbstractpackdeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAbstractpackdeclarator" ):
return visitor.visitAbstractpackdeclarator(self)
else:
return visitor.visitChildren(self)
def abstractpackdeclarator(self):
localctx = CPP14Parser.AbstractpackdeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 266, self.RULE_abstractpackdeclarator)
try:
self.state = 1814
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.Ellipsis]:
self.enterOuterAlt(localctx, 1)
self.state = 1810
self.noptrabstractpackdeclarator(0)
pass
elif token in [CPP14Parser.T__2, CPP14Parser.Decltype, CPP14Parser.Star, CPP14Parser.And, CPP14Parser.Doublecolon, CPP14Parser.Identifier]:
self.enterOuterAlt(localctx, 2)
self.state = 1811
self.ptroperator()
self.state = 1812
self.abstractpackdeclarator()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NoptrabstractpackdeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def noptrabstractpackdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.NoptrabstractpackdeclaratorContext,0)
def parametersandqualifiers(self):
return self.getTypedRuleContext(CPP14Parser.ParametersandqualifiersContext,0)
def LeftBracket(self):
return self.getToken(CPP14Parser.LeftBracket, 0)
def RightBracket(self):
return self.getToken(CPP14Parser.RightBracket, 0)
def constantexpression(self):
return self.getTypedRuleContext(CPP14Parser.ConstantexpressionContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_noptrabstractpackdeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNoptrabstractpackdeclarator" ):
listener.enterNoptrabstractpackdeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNoptrabstractpackdeclarator" ):
listener.exitNoptrabstractpackdeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNoptrabstractpackdeclarator" ):
return visitor.visitNoptrabstractpackdeclarator(self)
else:
return visitor.visitChildren(self)
def noptrabstractpackdeclarator(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.NoptrabstractpackdeclaratorContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 268
self.enterRecursionRule(localctx, 268, self.RULE_noptrabstractpackdeclarator, _p)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1817
self.match(CPP14Parser.Ellipsis)
self._ctx.stop = self._input.LT(-1)
self.state = 1832
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,209,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 1830
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,208,self._ctx)
if la_ == 1:
localctx = CPP14Parser.NoptrabstractpackdeclaratorContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_noptrabstractpackdeclarator)
self.state = 1819
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 1820
self.parametersandqualifiers()
pass
elif la_ == 2:
localctx = CPP14Parser.NoptrabstractpackdeclaratorContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_noptrabstractpackdeclarator)
self.state = 1821
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 1822
self.match(CPP14Parser.LeftBracket)
self.state = 1824
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 1823
self.constantexpression()
self.state = 1826
self.match(CPP14Parser.RightBracket)
self.state = 1828
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,207,self._ctx)
if la_ == 1:
self.state = 1827
self.attributespecifierseq(0)
pass
self.state = 1834
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,209,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class ParameterdeclarationclauseContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parameterdeclarationlist(self):
return self.getTypedRuleContext(CPP14Parser.ParameterdeclarationlistContext,0)
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_parameterdeclarationclause
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameterdeclarationclause" ):
listener.enterParameterdeclarationclause(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameterdeclarationclause" ):
listener.exitParameterdeclarationclause(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitParameterdeclarationclause" ):
return visitor.visitParameterdeclarationclause(self)
else:
return visitor.visitChildren(self)
def parameterdeclarationclause(self):
localctx = CPP14Parser.ParameterdeclarationclauseContext(self, self._ctx, self.state)
self.enterRule(localctx, 270, self.RULE_parameterdeclarationclause)
self._la = 0 # Token type
try:
self.state = 1845
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,212,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1836
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 9)) & ~0x3f) == 0 and ((1 << (_la - 9)) & ((1 << (CPP14Parser.Alignas - 9)) | (1 << (CPP14Parser.Auto - 9)) | (1 << (CPP14Parser.Bool - 9)) | (1 << (CPP14Parser.Char - 9)) | (1 << (CPP14Parser.Char16 - 9)) | (1 << (CPP14Parser.Char32 - 9)) | (1 << (CPP14Parser.Class - 9)) | (1 << (CPP14Parser.Const - 9)) | (1 << (CPP14Parser.Constexpr - 9)) | (1 << (CPP14Parser.Decltype - 9)) | (1 << (CPP14Parser.Double - 9)) | (1 << (CPP14Parser.Enum - 9)) | (1 << (CPP14Parser.Explicit - 9)) | (1 << (CPP14Parser.Extern - 9)) | (1 << (CPP14Parser.Float - 9)) | (1 << (CPP14Parser.Friend - 9)) | (1 << (CPP14Parser.Inline - 9)) | (1 << (CPP14Parser.Int - 9)) | (1 << (CPP14Parser.Long - 9)) | (1 << (CPP14Parser.Mutable - 9)) | (1 << (CPP14Parser.Register - 9)) | (1 << (CPP14Parser.Short - 9)) | (1 << (CPP14Parser.Signed - 9)) | (1 << (CPP14Parser.Static - 9)) | (1 << (CPP14Parser.Struct - 9)) | (1 << (CPP14Parser.Thread_local - 9)))) != 0) or ((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & ((1 << (CPP14Parser.Typedef - 73)) | (1 << (CPP14Parser.Typename_ - 73)) | (1 << (CPP14Parser.Union - 73)) | (1 << (CPP14Parser.Unsigned - 73)) | (1 << (CPP14Parser.Virtual - 73)) | (1 << (CPP14Parser.Void - 73)) | (1 << (CPP14Parser.Volatile - 73)) | (1 << (CPP14Parser.Wchar - 73)) | (1 << (CPP14Parser.LeftBracket - 73)) | (1 << (CPP14Parser.Doublecolon - 73)) | (1 << (CPP14Parser.Identifier - 73)))) != 0):
self.state = 1835
self.parameterdeclarationlist(0)
self.state = 1839
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Ellipsis:
self.state = 1838
self.match(CPP14Parser.Ellipsis)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1841
self.parameterdeclarationlist(0)
self.state = 1842
self.match(CPP14Parser.Comma)
self.state = 1843
self.match(CPP14Parser.Ellipsis)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParameterdeclarationlistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parameterdeclaration(self):
return self.getTypedRuleContext(CPP14Parser.ParameterdeclarationContext,0)
def parameterdeclarationlist(self):
return self.getTypedRuleContext(CPP14Parser.ParameterdeclarationlistContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_parameterdeclarationlist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameterdeclarationlist" ):
listener.enterParameterdeclarationlist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameterdeclarationlist" ):
listener.exitParameterdeclarationlist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitParameterdeclarationlist" ):
return visitor.visitParameterdeclarationlist(self)
else:
return visitor.visitChildren(self)
def parameterdeclarationlist(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.ParameterdeclarationlistContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 272
self.enterRecursionRule(localctx, 272, self.RULE_parameterdeclarationlist, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1848
self.parameterdeclaration()
self._ctx.stop = self._input.LT(-1)
self.state = 1855
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,213,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.ParameterdeclarationlistContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_parameterdeclarationlist)
self.state = 1850
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 1851
self.match(CPP14Parser.Comma)
self.state = 1852
self.parameterdeclaration()
self.state = 1857
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,213,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class ParameterdeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declspecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.DeclspecifierseqContext,0)
def declarator(self):
return self.getTypedRuleContext(CPP14Parser.DeclaratorContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def Assign(self):
return self.getToken(CPP14Parser.Assign, 0)
def initializerclause(self):
return self.getTypedRuleContext(CPP14Parser.InitializerclauseContext,0)
def abstractdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.AbstractdeclaratorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_parameterdeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameterdeclaration" ):
listener.enterParameterdeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameterdeclaration" ):
listener.exitParameterdeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitParameterdeclaration" ):
return visitor.visitParameterdeclaration(self)
else:
return visitor.visitChildren(self)
def parameterdeclaration(self):
localctx = CPP14Parser.ParameterdeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 274, self.RULE_parameterdeclaration)
self._la = 0 # Token type
try:
self.state = 1889
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,220,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1859
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1858
self.attributespecifierseq(0)
self.state = 1861
self.declspecifierseq()
self.state = 1862
self.declarator()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1865
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1864
self.attributespecifierseq(0)
self.state = 1867
self.declspecifierseq()
self.state = 1868
self.declarator()
self.state = 1869
self.match(CPP14Parser.Assign)
self.state = 1870
self.initializerclause()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1873
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1872
self.attributespecifierseq(0)
self.state = 1875
self.declspecifierseq()
self.state = 1877
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,217,self._ctx)
if la_ == 1:
self.state = 1876
self.abstractdeclarator()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1880
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1879
self.attributespecifierseq(0)
self.state = 1882
self.declspecifierseq()
self.state = 1884
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.T__2 or _la==CPP14Parser.Decltype or ((((_la - 84)) & ~0x3f) == 0 and ((1 << (_la - 84)) & ((1 << (CPP14Parser.LeftParen - 84)) | (1 << (CPP14Parser.LeftBracket - 84)) | (1 << (CPP14Parser.Star - 84)) | (1 << (CPP14Parser.And - 84)) | (1 << (CPP14Parser.Doublecolon - 84)) | (1 << (CPP14Parser.Ellipsis - 84)) | (1 << (CPP14Parser.Identifier - 84)))) != 0):
self.state = 1883
self.abstractdeclarator()
self.state = 1886
self.match(CPP14Parser.Assign)
self.state = 1887
self.initializerclause()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FunctiondefinitionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator(self):
return self.getTypedRuleContext(CPP14Parser.DeclaratorContext,0)
def functionbody(self):
return self.getTypedRuleContext(CPP14Parser.FunctionbodyContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def declspecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.DeclspecifierseqContext,0)
def virtspecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.VirtspecifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_functiondefinition
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunctiondefinition" ):
listener.enterFunctiondefinition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunctiondefinition" ):
listener.exitFunctiondefinition(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitFunctiondefinition" ):
return visitor.visitFunctiondefinition(self)
else:
return visitor.visitChildren(self)
def functiondefinition(self):
localctx = CPP14Parser.FunctiondefinitionContext(self, self._ctx, self.state)
self.enterRule(localctx, 276, self.RULE_functiondefinition)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1892
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1891
self.attributespecifierseq(0)
self.state = 1895
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,222,self._ctx)
if la_ == 1:
self.state = 1894
self.declspecifierseq()
self.state = 1897
self.declarator()
self.state = 1899
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Final or _la==CPP14Parser.Override:
self.state = 1898
self.virtspecifierseq(0)
self.state = 1901
self.functionbody()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FunctionbodyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def compoundstatement(self):
return self.getTypedRuleContext(CPP14Parser.CompoundstatementContext,0)
def ctorinitializer(self):
return self.getTypedRuleContext(CPP14Parser.CtorinitializerContext,0)
def functiontryblock(self):
return self.getTypedRuleContext(CPP14Parser.FunctiontryblockContext,0)
def Assign(self):
return self.getToken(CPP14Parser.Assign, 0)
def Default(self):
return self.getToken(CPP14Parser.Default, 0)
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def Delete(self):
return self.getToken(CPP14Parser.Delete, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_functionbody
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunctionbody" ):
listener.enterFunctionbody(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunctionbody" ):
listener.exitFunctionbody(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitFunctionbody" ):
return visitor.visitFunctionbody(self)
else:
return visitor.visitChildren(self)
def functionbody(self):
localctx = CPP14Parser.FunctionbodyContext(self, self._ctx, self.state)
self.enterRule(localctx, 278, self.RULE_functionbody)
self._la = 0 # Token type
try:
self.state = 1914
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,225,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1904
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Colon:
self.state = 1903
self.ctorinitializer()
self.state = 1906
self.compoundstatement()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1907
self.functiontryblock()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1908
self.match(CPP14Parser.Assign)
self.state = 1909
self.match(CPP14Parser.Default)
self.state = 1910
self.match(CPP14Parser.Semi)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1911
self.match(CPP14Parser.Assign)
self.state = 1912
self.match(CPP14Parser.Delete)
self.state = 1913
self.match(CPP14Parser.Semi)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InitializerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def braceorequalinitializer(self):
return self.getTypedRuleContext(CPP14Parser.BraceorequalinitializerContext,0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def expressionlist(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionlistContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_initializer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInitializer" ):
listener.enterInitializer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInitializer" ):
listener.exitInitializer(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInitializer" ):
return visitor.visitInitializer(self)
else:
return visitor.visitChildren(self)
def initializer(self):
localctx = CPP14Parser.InitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 280, self.RULE_initializer)
try:
self.state = 1921
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.LeftBrace, CPP14Parser.Assign]:
self.enterOuterAlt(localctx, 1)
self.state = 1916
self.braceorequalinitializer()
pass
elif token in [CPP14Parser.LeftParen]:
self.enterOuterAlt(localctx, 2)
self.state = 1917
self.match(CPP14Parser.LeftParen)
self.state = 1918
self.expressionlist()
self.state = 1919
self.match(CPP14Parser.RightParen)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BraceorequalinitializerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Assign(self):
return self.getToken(CPP14Parser.Assign, 0)
def initializerclause(self):
return self.getTypedRuleContext(CPP14Parser.InitializerclauseContext,0)
def bracedinitlist(self):
return self.getTypedRuleContext(CPP14Parser.BracedinitlistContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_braceorequalinitializer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBraceorequalinitializer" ):
listener.enterBraceorequalinitializer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBraceorequalinitializer" ):
listener.exitBraceorequalinitializer(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBraceorequalinitializer" ):
return visitor.visitBraceorequalinitializer(self)
else:
return visitor.visitChildren(self)
def braceorequalinitializer(self):
localctx = CPP14Parser.BraceorequalinitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 282, self.RULE_braceorequalinitializer)
try:
self.state = 1926
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.Assign]:
self.enterOuterAlt(localctx, 1)
self.state = 1923
self.match(CPP14Parser.Assign)
self.state = 1924
self.initializerclause()
pass
elif token in [CPP14Parser.LeftBrace]:
self.enterOuterAlt(localctx, 2)
self.state = 1925
self.bracedinitlist()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InitializerclauseContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def assignmentexpression(self):
return self.getTypedRuleContext(CPP14Parser.AssignmentexpressionContext,0)
def bracedinitlist(self):
return self.getTypedRuleContext(CPP14Parser.BracedinitlistContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_initializerclause
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInitializerclause" ):
listener.enterInitializerclause(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInitializerclause" ):
listener.exitInitializerclause(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInitializerclause" ):
return visitor.visitInitializerclause(self)
else:
return visitor.visitChildren(self)
def initializerclause(self):
localctx = CPP14Parser.InitializerclauseContext(self, self._ctx, self.state)
self.enterRule(localctx, 284, self.RULE_initializerclause)
try:
self.state = 1930
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.T__0, CPP14Parser.T__1, CPP14Parser.Alignof, CPP14Parser.Auto, CPP14Parser.Bool, CPP14Parser.Char, CPP14Parser.Char16, CPP14Parser.Char32, CPP14Parser.Const_cast, CPP14Parser.Decltype, CPP14Parser.Delete, CPP14Parser.Double, CPP14Parser.Dynamic_cast, CPP14Parser.BFalse, CPP14Parser.Float, CPP14Parser.Int, CPP14Parser.Long, CPP14Parser.New, CPP14Parser.Noexcept, CPP14Parser.Nullptr, CPP14Parser.Operator, CPP14Parser.Reinterpret_cast, CPP14Parser.Short, CPP14Parser.Signed, CPP14Parser.Sizeof, CPP14Parser.Static_cast, CPP14Parser.This, CPP14Parser.Throw, CPP14Parser.BTrue, CPP14Parser.Typeid_, CPP14Parser.Typename_, CPP14Parser.Unsigned, CPP14Parser.Void, CPP14Parser.Wchar, CPP14Parser.LeftParen, CPP14Parser.LeftBracket, CPP14Parser.Plus, CPP14Parser.Minus, CPP14Parser.Star, CPP14Parser.And, CPP14Parser.Or, CPP14Parser.Tilde, CPP14Parser.PlusPlus, CPP14Parser.MinusMinus, CPP14Parser.Doublecolon, CPP14Parser.Identifier, CPP14Parser.Integerliteral, CPP14Parser.Characterliteral, CPP14Parser.Floatingliteral, CPP14Parser.Stringliteral, CPP14Parser.Userdefinedintegerliteral, CPP14Parser.Userdefinedfloatingliteral, CPP14Parser.Userdefinedstringliteral, CPP14Parser.Userdefinedcharacterliteral]:
self.enterOuterAlt(localctx, 1)
self.state = 1928
self.assignmentexpression()
pass
elif token in [CPP14Parser.LeftBrace]:
self.enterOuterAlt(localctx, 2)
self.state = 1929
self.bracedinitlist()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InitializerlistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def initializerclause(self):
return self.getTypedRuleContext(CPP14Parser.InitializerclauseContext,0)
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def initializerlist(self):
return self.getTypedRuleContext(CPP14Parser.InitializerlistContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_initializerlist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInitializerlist" ):
listener.enterInitializerlist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInitializerlist" ):
listener.exitInitializerlist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInitializerlist" ):
return visitor.visitInitializerlist(self)
else:
return visitor.visitChildren(self)
def initializerlist(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.InitializerlistContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 286
self.enterRecursionRule(localctx, 286, self.RULE_initializerlist, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1933
self.initializerclause()
self.state = 1935
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,229,self._ctx)
if la_ == 1:
self.state = 1934
self.match(CPP14Parser.Ellipsis)
self._ctx.stop = self._input.LT(-1)
self.state = 1945
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,231,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.InitializerlistContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_initializerlist)
self.state = 1937
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 1938
self.match(CPP14Parser.Comma)
self.state = 1939
self.initializerclause()
self.state = 1941
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,230,self._ctx)
if la_ == 1:
self.state = 1940
self.match(CPP14Parser.Ellipsis)
self.state = 1947
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,231,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class BracedinitlistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LeftBrace(self):
return self.getToken(CPP14Parser.LeftBrace, 0)
def initializerlist(self):
return self.getTypedRuleContext(CPP14Parser.InitializerlistContext,0)
def RightBrace(self):
return self.getToken(CPP14Parser.RightBrace, 0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_bracedinitlist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBracedinitlist" ):
listener.enterBracedinitlist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBracedinitlist" ):
listener.exitBracedinitlist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBracedinitlist" ):
return visitor.visitBracedinitlist(self)
else:
return visitor.visitChildren(self)
def bracedinitlist(self):
localctx = CPP14Parser.BracedinitlistContext(self, self._ctx, self.state)
self.enterRule(localctx, 288, self.RULE_bracedinitlist)
self._la = 0 # Token type
try:
self.state = 1957
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,233,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1948
self.match(CPP14Parser.LeftBrace)
self.state = 1949
self.initializerlist(0)
self.state = 1951
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Comma:
self.state = 1950
self.match(CPP14Parser.Comma)
self.state = 1953
self.match(CPP14Parser.RightBrace)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1955
self.match(CPP14Parser.LeftBrace)
self.state = 1956
self.match(CPP14Parser.RightBrace)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClassnameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def simpletemplateid(self):
return self.getTypedRuleContext(CPP14Parser.SimpletemplateidContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_classname
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClassname" ):
listener.enterClassname(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClassname" ):
listener.exitClassname(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClassname" ):
return visitor.visitClassname(self)
else:
return visitor.visitChildren(self)
def classname(self):
localctx = CPP14Parser.ClassnameContext(self, self._ctx, self.state)
self.enterRule(localctx, 290, self.RULE_classname)
try:
self.state = 1961
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,234,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1959
self.match(CPP14Parser.Identifier)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1960
self.simpletemplateid()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClassspecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classhead(self):
return self.getTypedRuleContext(CPP14Parser.ClassheadContext,0)
def LeftBrace(self):
return self.getToken(CPP14Parser.LeftBrace, 0)
def RightBrace(self):
return self.getToken(CPP14Parser.RightBrace, 0)
def memberspecification(self):
return self.getTypedRuleContext(CPP14Parser.MemberspecificationContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_classspecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClassspecifier" ):
listener.enterClassspecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClassspecifier" ):
listener.exitClassspecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClassspecifier" ):
return visitor.visitClassspecifier(self)
else:
return visitor.visitChildren(self)
def classspecifier(self):
localctx = CPP14Parser.ClassspecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 292, self.RULE_classspecifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1963
self.classhead()
self.state = 1964
self.match(CPP14Parser.LeftBrace)
self.state = 1966
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__2) | (1 << CPP14Parser.Alignas) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Class) | (1 << CPP14Parser.Const) | (1 << CPP14Parser.Constexpr) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Enum) | (1 << CPP14Parser.Explicit) | (1 << CPP14Parser.Extern) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Friend) | (1 << CPP14Parser.Inline) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.Mutable) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Private) | (1 << CPP14Parser.Protected) | (1 << CPP14Parser.Public) | (1 << CPP14Parser.Register) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Static) | (1 << CPP14Parser.Static_assert))) != 0) or ((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (CPP14Parser.Struct - 65)) | (1 << (CPP14Parser.Template - 65)) | (1 << (CPP14Parser.Thread_local - 65)) | (1 << (CPP14Parser.Typedef - 65)) | (1 << (CPP14Parser.Typename_ - 65)) | (1 << (CPP14Parser.Union - 65)) | (1 << (CPP14Parser.Unsigned - 65)) | (1 << (CPP14Parser.Using - 65)) | (1 << (CPP14Parser.Virtual - 65)) | (1 << (CPP14Parser.Void - 65)) | (1 << (CPP14Parser.Volatile - 65)) | (1 << (CPP14Parser.Wchar - 65)) | (1 << (CPP14Parser.LeftParen - 65)) | (1 << (CPP14Parser.LeftBracket - 65)) | (1 << (CPP14Parser.Star - 65)) | (1 << (CPP14Parser.And - 65)) | (1 << (CPP14Parser.Tilde - 65)) | (1 << (CPP14Parser.Colon - 65)) | (1 << (CPP14Parser.Doublecolon - 65)))) != 0) or ((((_la - 129)) & ~0x3f) == 0 and ((1 << (_la - 129)) & ((1 << (CPP14Parser.Semi - 129)) | (1 << (CPP14Parser.Ellipsis - 129)) | (1 << (CPP14Parser.Identifier - 129)))) != 0):
self.state = 1965
self.memberspecification()
self.state = 1968
self.match(CPP14Parser.RightBrace)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClassheadContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classkey(self):
return self.getTypedRuleContext(CPP14Parser.ClasskeyContext,0)
def classheadname(self):
return self.getTypedRuleContext(CPP14Parser.ClassheadnameContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def classvirtspecifier(self):
return self.getTypedRuleContext(CPP14Parser.ClassvirtspecifierContext,0)
def baseclause(self):
return self.getTypedRuleContext(CPP14Parser.BaseclauseContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_classhead
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClasshead" ):
listener.enterClasshead(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClasshead" ):
listener.exitClasshead(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClasshead" ):
return visitor.visitClasshead(self)
else:
return visitor.visitChildren(self)
def classhead(self):
localctx = CPP14Parser.ClassheadContext(self, self._ctx, self.state)
self.enterRule(localctx, 294, self.RULE_classhead)
self._la = 0 # Token type
try:
self.state = 1988
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,241,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1970
self.classkey()
self.state = 1972
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1971
self.attributespecifierseq(0)
self.state = 1974
self.classheadname()
self.state = 1976
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Final:
self.state = 1975
self.classvirtspecifier()
self.state = 1979
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Colon:
self.state = 1978
self.baseclause()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1981
self.classkey()
self.state = 1983
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 1982
self.attributespecifierseq(0)
self.state = 1986
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Colon:
self.state = 1985
self.baseclause()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClassheadnameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classname(self):
return self.getTypedRuleContext(CPP14Parser.ClassnameContext,0)
def nestednamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.NestednamespecifierContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_classheadname
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClassheadname" ):
listener.enterClassheadname(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClassheadname" ):
listener.exitClassheadname(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClassheadname" ):
return visitor.visitClassheadname(self)
else:
return visitor.visitChildren(self)
def classheadname(self):
localctx = CPP14Parser.ClassheadnameContext(self, self._ctx, self.state)
self.enterRule(localctx, 296, self.RULE_classheadname)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1991
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,242,self._ctx)
if la_ == 1:
self.state = 1990
self.nestednamespecifier(0)
self.state = 1993
self.classname()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClassvirtspecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Final(self):
return self.getToken(CPP14Parser.Final, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_classvirtspecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClassvirtspecifier" ):
listener.enterClassvirtspecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClassvirtspecifier" ):
listener.exitClassvirtspecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClassvirtspecifier" ):
return visitor.visitClassvirtspecifier(self)
else:
return visitor.visitChildren(self)
def classvirtspecifier(self):
localctx = CPP14Parser.ClassvirtspecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 298, self.RULE_classvirtspecifier)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1995
self.match(CPP14Parser.Final)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClasskeyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Class(self):
return self.getToken(CPP14Parser.Class, 0)
def Struct(self):
return self.getToken(CPP14Parser.Struct, 0)
def Union(self):
return self.getToken(CPP14Parser.Union, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_classkey
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClasskey" ):
listener.enterClasskey(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClasskey" ):
listener.exitClasskey(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClasskey" ):
return visitor.visitClasskey(self)
else:
return visitor.visitChildren(self)
def classkey(self):
localctx = CPP14Parser.ClasskeyContext(self, self._ctx, self.state)
self.enterRule(localctx, 300, self.RULE_classkey)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1997
_la = self._input.LA(1)
if not(((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (CPP14Parser.Class - 20)) | (1 << (CPP14Parser.Struct - 20)) | (1 << (CPP14Parser.Union - 20)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MemberspecificationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def memberdeclaration(self):
return self.getTypedRuleContext(CPP14Parser.MemberdeclarationContext,0)
def memberspecification(self):
return self.getTypedRuleContext(CPP14Parser.MemberspecificationContext,0)
def accessspecifier(self):
return self.getTypedRuleContext(CPP14Parser.AccessspecifierContext,0)
def Colon(self):
return self.getToken(CPP14Parser.Colon, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_memberspecification
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMemberspecification" ):
listener.enterMemberspecification(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMemberspecification" ):
listener.exitMemberspecification(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitMemberspecification" ):
return visitor.visitMemberspecification(self)
else:
return visitor.visitChildren(self)
def memberspecification(self):
localctx = CPP14Parser.MemberspecificationContext(self, self._ctx, self.state)
self.enterRule(localctx, 302, self.RULE_memberspecification)
self._la = 0 # Token type
try:
self.state = 2008
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.T__2, CPP14Parser.Alignas, CPP14Parser.Auto, CPP14Parser.Bool, CPP14Parser.Char, CPP14Parser.Char16, CPP14Parser.Char32, CPP14Parser.Class, CPP14Parser.Const, CPP14Parser.Constexpr, CPP14Parser.Decltype, CPP14Parser.Double, CPP14Parser.Enum, CPP14Parser.Explicit, CPP14Parser.Extern, CPP14Parser.Float, CPP14Parser.Friend, CPP14Parser.Inline, CPP14Parser.Int, CPP14Parser.Long, CPP14Parser.Mutable, CPP14Parser.Operator, CPP14Parser.Register, CPP14Parser.Short, CPP14Parser.Signed, CPP14Parser.Static, CPP14Parser.Static_assert, CPP14Parser.Struct, CPP14Parser.Template, CPP14Parser.Thread_local, CPP14Parser.Typedef, CPP14Parser.Typename_, CPP14Parser.Union, CPP14Parser.Unsigned, CPP14Parser.Using, CPP14Parser.Virtual, CPP14Parser.Void, CPP14Parser.Volatile, CPP14Parser.Wchar, CPP14Parser.LeftParen, CPP14Parser.LeftBracket, CPP14Parser.Star, CPP14Parser.And, CPP14Parser.Tilde, CPP14Parser.Colon, CPP14Parser.Doublecolon, CPP14Parser.Semi, CPP14Parser.Ellipsis, CPP14Parser.Identifier]:
self.enterOuterAlt(localctx, 1)
self.state = 1999
self.memberdeclaration()
self.state = 2001
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__2) | (1 << CPP14Parser.Alignas) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Class) | (1 << CPP14Parser.Const) | (1 << CPP14Parser.Constexpr) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Enum) | (1 << CPP14Parser.Explicit) | (1 << CPP14Parser.Extern) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Friend) | (1 << CPP14Parser.Inline) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.Mutable) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Private) | (1 << CPP14Parser.Protected) | (1 << CPP14Parser.Public) | (1 << CPP14Parser.Register) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Static) | (1 << CPP14Parser.Static_assert))) != 0) or ((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (CPP14Parser.Struct - 65)) | (1 << (CPP14Parser.Template - 65)) | (1 << (CPP14Parser.Thread_local - 65)) | (1 << (CPP14Parser.Typedef - 65)) | (1 << (CPP14Parser.Typename_ - 65)) | (1 << (CPP14Parser.Union - 65)) | (1 << (CPP14Parser.Unsigned - 65)) | (1 << (CPP14Parser.Using - 65)) | (1 << (CPP14Parser.Virtual - 65)) | (1 << (CPP14Parser.Void - 65)) | (1 << (CPP14Parser.Volatile - 65)) | (1 << (CPP14Parser.Wchar - 65)) | (1 << (CPP14Parser.LeftParen - 65)) | (1 << (CPP14Parser.LeftBracket - 65)) | (1 << (CPP14Parser.Star - 65)) | (1 << (CPP14Parser.And - 65)) | (1 << (CPP14Parser.Tilde - 65)) | (1 << (CPP14Parser.Colon - 65)) | (1 << (CPP14Parser.Doublecolon - 65)))) != 0) or ((((_la - 129)) & ~0x3f) == 0 and ((1 << (_la - 129)) & ((1 << (CPP14Parser.Semi - 129)) | (1 << (CPP14Parser.Ellipsis - 129)) | (1 << (CPP14Parser.Identifier - 129)))) != 0):
self.state = 2000
self.memberspecification()
pass
elif token in [CPP14Parser.Private, CPP14Parser.Protected, CPP14Parser.Public]:
self.enterOuterAlt(localctx, 2)
self.state = 2003
self.accessspecifier()
self.state = 2004
self.match(CPP14Parser.Colon)
self.state = 2006
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__2) | (1 << CPP14Parser.Alignas) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Class) | (1 << CPP14Parser.Const) | (1 << CPP14Parser.Constexpr) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Enum) | (1 << CPP14Parser.Explicit) | (1 << CPP14Parser.Extern) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Friend) | (1 << CPP14Parser.Inline) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.Mutable) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Private) | (1 << CPP14Parser.Protected) | (1 << CPP14Parser.Public) | (1 << CPP14Parser.Register) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Static) | (1 << CPP14Parser.Static_assert))) != 0) or ((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (CPP14Parser.Struct - 65)) | (1 << (CPP14Parser.Template - 65)) | (1 << (CPP14Parser.Thread_local - 65)) | (1 << (CPP14Parser.Typedef - 65)) | (1 << (CPP14Parser.Typename_ - 65)) | (1 << (CPP14Parser.Union - 65)) | (1 << (CPP14Parser.Unsigned - 65)) | (1 << (CPP14Parser.Using - 65)) | (1 << (CPP14Parser.Virtual - 65)) | (1 << (CPP14Parser.Void - 65)) | (1 << (CPP14Parser.Volatile - 65)) | (1 << (CPP14Parser.Wchar - 65)) | (1 << (CPP14Parser.LeftParen - 65)) | (1 << (CPP14Parser.LeftBracket - 65)) | (1 << (CPP14Parser.Star - 65)) | (1 << (CPP14Parser.And - 65)) | (1 << (CPP14Parser.Tilde - 65)) | (1 << (CPP14Parser.Colon - 65)) | (1 << (CPP14Parser.Doublecolon - 65)))) != 0) or ((((_la - 129)) & ~0x3f) == 0 and ((1 << (_la - 129)) & ((1 << (CPP14Parser.Semi - 129)) | (1 << (CPP14Parser.Ellipsis - 129)) | (1 << (CPP14Parser.Identifier - 129)))) != 0):
self.state = 2005
self.memberspecification()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MemberdeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Semi(self):
return self.getToken(CPP14Parser.Semi, 0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def declspecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.DeclspecifierseqContext,0)
def memberdeclaratorlist(self):
return self.getTypedRuleContext(CPP14Parser.MemberdeclaratorlistContext,0)
def functiondefinition(self):
return self.getTypedRuleContext(CPP14Parser.FunctiondefinitionContext,0)
def usingdeclaration(self):
return self.getTypedRuleContext(CPP14Parser.UsingdeclarationContext,0)
def static_assertdeclaration(self):
return self.getTypedRuleContext(CPP14Parser.Static_assertdeclarationContext,0)
def templatedeclaration(self):
return self.getTypedRuleContext(CPP14Parser.TemplatedeclarationContext,0)
def aliasdeclaration(self):
return self.getTypedRuleContext(CPP14Parser.AliasdeclarationContext,0)
def emptydeclaration(self):
return self.getTypedRuleContext(CPP14Parser.EmptydeclarationContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_memberdeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMemberdeclaration" ):
listener.enterMemberdeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMemberdeclaration" ):
listener.exitMemberdeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitMemberdeclaration" ):
return visitor.visitMemberdeclaration(self)
else:
return visitor.visitChildren(self)
def memberdeclaration(self):
localctx = CPP14Parser.MemberdeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 304, self.RULE_memberdeclaration)
self._la = 0 # Token type
try:
self.state = 2026
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,249,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2011
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,246,self._ctx)
if la_ == 1:
self.state = 2010
self.attributespecifierseq(0)
self.state = 2014
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,247,self._ctx)
if la_ == 1:
self.state = 2013
self.declspecifierseq()
self.state = 2017
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__2) | (1 << CPP14Parser.Alignas) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Operator))) != 0) or ((((_la - 84)) & ~0x3f) == 0 and ((1 << (_la - 84)) & ((1 << (CPP14Parser.LeftParen - 84)) | (1 << (CPP14Parser.LeftBracket - 84)) | (1 << (CPP14Parser.Star - 84)) | (1 << (CPP14Parser.And - 84)) | (1 << (CPP14Parser.Tilde - 84)) | (1 << (CPP14Parser.Colon - 84)) | (1 << (CPP14Parser.Doublecolon - 84)) | (1 << (CPP14Parser.Ellipsis - 84)) | (1 << (CPP14Parser.Identifier - 84)))) != 0):
self.state = 2016
self.memberdeclaratorlist(0)
self.state = 2019
self.match(CPP14Parser.Semi)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2020
self.functiondefinition()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 2021
self.usingdeclaration()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 2022
self.static_assertdeclaration()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 2023
self.templatedeclaration()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 2024
self.aliasdeclaration()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 2025
self.emptydeclaration()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MemberdeclaratorlistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def memberdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.MemberdeclaratorContext,0)
def memberdeclaratorlist(self):
return self.getTypedRuleContext(CPP14Parser.MemberdeclaratorlistContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_memberdeclaratorlist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMemberdeclaratorlist" ):
listener.enterMemberdeclaratorlist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMemberdeclaratorlist" ):
listener.exitMemberdeclaratorlist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitMemberdeclaratorlist" ):
return visitor.visitMemberdeclaratorlist(self)
else:
return visitor.visitChildren(self)
def memberdeclaratorlist(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.MemberdeclaratorlistContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 306
self.enterRecursionRule(localctx, 306, self.RULE_memberdeclaratorlist, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2029
self.memberdeclarator()
self._ctx.stop = self._input.LT(-1)
self.state = 2036
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,250,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.MemberdeclaratorlistContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_memberdeclaratorlist)
self.state = 2031
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 2032
self.match(CPP14Parser.Comma)
self.state = 2033
self.memberdeclarator()
self.state = 2038
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,250,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class MemberdeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator(self):
return self.getTypedRuleContext(CPP14Parser.DeclaratorContext,0)
def virtspecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.VirtspecifierseqContext,0)
def purespecifier(self):
return self.getTypedRuleContext(CPP14Parser.PurespecifierContext,0)
def braceorequalinitializer(self):
return self.getTypedRuleContext(CPP14Parser.BraceorequalinitializerContext,0)
def Colon(self):
return self.getToken(CPP14Parser.Colon, 0)
def constantexpression(self):
return self.getTypedRuleContext(CPP14Parser.ConstantexpressionContext,0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_memberdeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMemberdeclarator" ):
listener.enterMemberdeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMemberdeclarator" ):
listener.exitMemberdeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitMemberdeclarator" ):
return visitor.visitMemberdeclarator(self)
else:
return visitor.visitChildren(self)
def memberdeclarator(self):
localctx = CPP14Parser.MemberdeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 308, self.RULE_memberdeclarator)
self._la = 0 # Token type
try:
self.state = 2058
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,256,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2039
self.declarator()
self.state = 2041
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,251,self._ctx)
if la_ == 1:
self.state = 2040
self.virtspecifierseq(0)
self.state = 2044
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,252,self._ctx)
if la_ == 1:
self.state = 2043
self.purespecifier()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2046
self.declarator()
self.state = 2048
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,253,self._ctx)
if la_ == 1:
self.state = 2047
self.braceorequalinitializer()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 2051
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Identifier:
self.state = 2050
self.match(CPP14Parser.Identifier)
self.state = 2054
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 2053
self.attributespecifierseq(0)
self.state = 2056
self.match(CPP14Parser.Colon)
self.state = 2057
self.constantexpression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VirtspecifierseqContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def virtspecifier(self):
return self.getTypedRuleContext(CPP14Parser.VirtspecifierContext,0)
def virtspecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.VirtspecifierseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_virtspecifierseq
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVirtspecifierseq" ):
listener.enterVirtspecifierseq(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVirtspecifierseq" ):
listener.exitVirtspecifierseq(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitVirtspecifierseq" ):
return visitor.visitVirtspecifierseq(self)
else:
return visitor.visitChildren(self)
def virtspecifierseq(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.VirtspecifierseqContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 310
self.enterRecursionRule(localctx, 310, self.RULE_virtspecifierseq, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2061
self.virtspecifier()
self._ctx.stop = self._input.LT(-1)
self.state = 2067
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,257,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.VirtspecifierseqContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_virtspecifierseq)
self.state = 2063
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 2064
self.virtspecifier()
self.state = 2069
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,257,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class VirtspecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Override(self):
return self.getToken(CPP14Parser.Override, 0)
def Final(self):
return self.getToken(CPP14Parser.Final, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_virtspecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVirtspecifier" ):
listener.enterVirtspecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVirtspecifier" ):
listener.exitVirtspecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitVirtspecifier" ):
return visitor.visitVirtspecifier(self)
else:
return visitor.visitChildren(self)
def virtspecifier(self):
localctx = CPP14Parser.VirtspecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 312, self.RULE_virtspecifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2070
_la = self._input.LA(1)
if not(_la==CPP14Parser.Final or _la==CPP14Parser.Override):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PurespecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.val = None # Token
def Assign(self):
return self.getToken(CPP14Parser.Assign, 0)
def Octalliteral(self):
return self.getToken(CPP14Parser.Octalliteral, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_purespecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPurespecifier" ):
listener.enterPurespecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPurespecifier" ):
listener.exitPurespecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitPurespecifier" ):
return visitor.visitPurespecifier(self)
else:
return visitor.visitChildren(self)
def purespecifier(self):
localctx = CPP14Parser.PurespecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 314, self.RULE_purespecifier)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2072
self.match(CPP14Parser.Assign)
self.state = 2073
localctx.val = self.match(CPP14Parser.Octalliteral)
if (None if localctx.val is None else localctx.val.text).compareTo("0")!=0: raise InputMismatchException(this)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BaseclauseContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Colon(self):
return self.getToken(CPP14Parser.Colon, 0)
def basespecifierlist(self):
return self.getTypedRuleContext(CPP14Parser.BasespecifierlistContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_baseclause
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBaseclause" ):
listener.enterBaseclause(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBaseclause" ):
listener.exitBaseclause(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBaseclause" ):
return visitor.visitBaseclause(self)
else:
return visitor.visitChildren(self)
def baseclause(self):
localctx = CPP14Parser.BaseclauseContext(self, self._ctx, self.state)
self.enterRule(localctx, 316, self.RULE_baseclause)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2076
self.match(CPP14Parser.Colon)
self.state = 2077
self.basespecifierlist(0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BasespecifierlistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def basespecifier(self):
return self.getTypedRuleContext(CPP14Parser.BasespecifierContext,0)
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def basespecifierlist(self):
return self.getTypedRuleContext(CPP14Parser.BasespecifierlistContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_basespecifierlist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBasespecifierlist" ):
listener.enterBasespecifierlist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBasespecifierlist" ):
listener.exitBasespecifierlist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBasespecifierlist" ):
return visitor.visitBasespecifierlist(self)
else:
return visitor.visitChildren(self)
def basespecifierlist(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.BasespecifierlistContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 318
self.enterRecursionRule(localctx, 318, self.RULE_basespecifierlist, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2080
self.basespecifier()
self.state = 2082
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,258,self._ctx)
if la_ == 1:
self.state = 2081
self.match(CPP14Parser.Ellipsis)
self._ctx.stop = self._input.LT(-1)
self.state = 2092
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,260,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.BasespecifierlistContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_basespecifierlist)
self.state = 2084
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 2085
self.match(CPP14Parser.Comma)
self.state = 2086
self.basespecifier()
self.state = 2088
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,259,self._ctx)
if la_ == 1:
self.state = 2087
self.match(CPP14Parser.Ellipsis)
self.state = 2094
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,260,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class BasespecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def basetypespecifier(self):
return self.getTypedRuleContext(CPP14Parser.BasetypespecifierContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def Virtual(self):
return self.getToken(CPP14Parser.Virtual, 0)
def accessspecifier(self):
return self.getTypedRuleContext(CPP14Parser.AccessspecifierContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_basespecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBasespecifier" ):
listener.enterBasespecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBasespecifier" ):
listener.exitBasespecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBasespecifier" ):
return visitor.visitBasespecifier(self)
else:
return visitor.visitChildren(self)
def basespecifier(self):
localctx = CPP14Parser.BasespecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 320, self.RULE_basespecifier)
self._la = 0 # Token type
try:
self.state = 2116
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,266,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2096
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 2095
self.attributespecifierseq(0)
self.state = 2098
self.basetypespecifier()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2100
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 2099
self.attributespecifierseq(0)
self.state = 2102
self.match(CPP14Parser.Virtual)
self.state = 2104
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.Private) | (1 << CPP14Parser.Protected) | (1 << CPP14Parser.Public))) != 0):
self.state = 2103
self.accessspecifier()
self.state = 2106
self.basetypespecifier()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 2108
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 2107
self.attributespecifierseq(0)
self.state = 2110
self.accessspecifier()
self.state = 2112
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Virtual:
self.state = 2111
self.match(CPP14Parser.Virtual)
self.state = 2114
self.basetypespecifier()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClassordecltypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classname(self):
return self.getTypedRuleContext(CPP14Parser.ClassnameContext,0)
def nestednamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.NestednamespecifierContext,0)
def decltypespecifier(self):
return self.getTypedRuleContext(CPP14Parser.DecltypespecifierContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_classordecltype
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClassordecltype" ):
listener.enterClassordecltype(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClassordecltype" ):
listener.exitClassordecltype(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClassordecltype" ):
return visitor.visitClassordecltype(self)
else:
return visitor.visitChildren(self)
def classordecltype(self):
localctx = CPP14Parser.ClassordecltypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 322, self.RULE_classordecltype)
try:
self.state = 2123
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,268,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2119
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,267,self._ctx)
if la_ == 1:
self.state = 2118
self.nestednamespecifier(0)
self.state = 2121
self.classname()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2122
self.decltypespecifier()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BasetypespecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classordecltype(self):
return self.getTypedRuleContext(CPP14Parser.ClassordecltypeContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_basetypespecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBasetypespecifier" ):
listener.enterBasetypespecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBasetypespecifier" ):
listener.exitBasetypespecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBasetypespecifier" ):
return visitor.visitBasetypespecifier(self)
else:
return visitor.visitChildren(self)
def basetypespecifier(self):
localctx = CPP14Parser.BasetypespecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 324, self.RULE_basetypespecifier)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2125
self.classordecltype()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AccessspecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Private(self):
return self.getToken(CPP14Parser.Private, 0)
def Protected(self):
return self.getToken(CPP14Parser.Protected, 0)
def Public(self):
return self.getToken(CPP14Parser.Public, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_accessspecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAccessspecifier" ):
listener.enterAccessspecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAccessspecifier" ):
listener.exitAccessspecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAccessspecifier" ):
return visitor.visitAccessspecifier(self)
else:
return visitor.visitChildren(self)
def accessspecifier(self):
localctx = CPP14Parser.AccessspecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 326, self.RULE_accessspecifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2127
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.Private) | (1 << CPP14Parser.Protected) | (1 << CPP14Parser.Public))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ConversionfunctionidContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Operator(self):
return self.getToken(CPP14Parser.Operator, 0)
def conversiontypeid(self):
return self.getTypedRuleContext(CPP14Parser.ConversiontypeidContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_conversionfunctionid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterConversionfunctionid" ):
listener.enterConversionfunctionid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitConversionfunctionid" ):
listener.exitConversionfunctionid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitConversionfunctionid" ):
return visitor.visitConversionfunctionid(self)
else:
return visitor.visitChildren(self)
def conversionfunctionid(self):
localctx = CPP14Parser.ConversionfunctionidContext(self, self._ctx, self.state)
self.enterRule(localctx, 328, self.RULE_conversionfunctionid)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2129
self.match(CPP14Parser.Operator)
self.state = 2130
self.conversiontypeid()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ConversiontypeidContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.TypespecifierseqContext,0)
def conversiondeclarator(self):
return self.getTypedRuleContext(CPP14Parser.ConversiondeclaratorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_conversiontypeid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterConversiontypeid" ):
listener.enterConversiontypeid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitConversiontypeid" ):
listener.exitConversiontypeid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitConversiontypeid" ):
return visitor.visitConversiontypeid(self)
else:
return visitor.visitChildren(self)
def conversiontypeid(self):
localctx = CPP14Parser.ConversiontypeidContext(self, self._ctx, self.state)
self.enterRule(localctx, 330, self.RULE_conversiontypeid)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2132
self.typespecifierseq()
self.state = 2134
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,269,self._ctx)
if la_ == 1:
self.state = 2133
self.conversiondeclarator()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ConversiondeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ptroperator(self):
return self.getTypedRuleContext(CPP14Parser.PtroperatorContext,0)
def conversiondeclarator(self):
return self.getTypedRuleContext(CPP14Parser.ConversiondeclaratorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_conversiondeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterConversiondeclarator" ):
listener.enterConversiondeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitConversiondeclarator" ):
listener.exitConversiondeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitConversiondeclarator" ):
return visitor.visitConversiondeclarator(self)
else:
return visitor.visitChildren(self)
def conversiondeclarator(self):
localctx = CPP14Parser.ConversiondeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 332, self.RULE_conversiondeclarator)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2136
self.ptroperator()
self.state = 2138
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,270,self._ctx)
if la_ == 1:
self.state = 2137
self.conversiondeclarator()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CtorinitializerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Colon(self):
return self.getToken(CPP14Parser.Colon, 0)
def meminitializerlist(self):
return self.getTypedRuleContext(CPP14Parser.MeminitializerlistContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_ctorinitializer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCtorinitializer" ):
listener.enterCtorinitializer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCtorinitializer" ):
listener.exitCtorinitializer(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCtorinitializer" ):
return visitor.visitCtorinitializer(self)
else:
return visitor.visitChildren(self)
def ctorinitializer(self):
localctx = CPP14Parser.CtorinitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 334, self.RULE_ctorinitializer)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2140
self.match(CPP14Parser.Colon)
self.state = 2141
self.meminitializerlist()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MeminitializerlistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def meminitializer(self):
return self.getTypedRuleContext(CPP14Parser.MeminitializerContext,0)
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def meminitializerlist(self):
return self.getTypedRuleContext(CPP14Parser.MeminitializerlistContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_meminitializerlist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMeminitializerlist" ):
listener.enterMeminitializerlist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMeminitializerlist" ):
listener.exitMeminitializerlist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitMeminitializerlist" ):
return visitor.visitMeminitializerlist(self)
else:
return visitor.visitChildren(self)
def meminitializerlist(self):
localctx = CPP14Parser.MeminitializerlistContext(self, self._ctx, self.state)
self.enterRule(localctx, 336, self.RULE_meminitializerlist)
self._la = 0 # Token type
try:
self.state = 2154
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,273,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2143
self.meminitializer()
self.state = 2145
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Ellipsis:
self.state = 2144
self.match(CPP14Parser.Ellipsis)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2147
self.meminitializer()
self.state = 2149
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Ellipsis:
self.state = 2148
self.match(CPP14Parser.Ellipsis)
self.state = 2151
self.match(CPP14Parser.Comma)
self.state = 2152
self.meminitializerlist()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MeminitializerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def meminitializerid(self):
return self.getTypedRuleContext(CPP14Parser.MeminitializeridContext,0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def expressionlist(self):
return self.getTypedRuleContext(CPP14Parser.ExpressionlistContext,0)
def bracedinitlist(self):
return self.getTypedRuleContext(CPP14Parser.BracedinitlistContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_meminitializer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMeminitializer" ):
listener.enterMeminitializer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMeminitializer" ):
listener.exitMeminitializer(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitMeminitializer" ):
return visitor.visitMeminitializer(self)
else:
return visitor.visitChildren(self)
def meminitializer(self):
localctx = CPP14Parser.MeminitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 338, self.RULE_meminitializer)
self._la = 0 # Token type
try:
self.state = 2166
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,275,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2156
self.meminitializerid()
self.state = 2157
self.match(CPP14Parser.LeftParen)
self.state = 2159
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.Throw - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.LeftBrace - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 2158
self.expressionlist()
self.state = 2161
self.match(CPP14Parser.RightParen)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2163
self.meminitializerid()
self.state = 2164
self.bracedinitlist()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MeminitializeridContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classordecltype(self):
return self.getTypedRuleContext(CPP14Parser.ClassordecltypeContext,0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_meminitializerid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMeminitializerid" ):
listener.enterMeminitializerid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMeminitializerid" ):
listener.exitMeminitializerid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitMeminitializerid" ):
return visitor.visitMeminitializerid(self)
else:
return visitor.visitChildren(self)
def meminitializerid(self):
localctx = CPP14Parser.MeminitializeridContext(self, self._ctx, self.state)
self.enterRule(localctx, 340, self.RULE_meminitializerid)
try:
self.state = 2170
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,276,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2168
self.classordecltype()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2169
self.match(CPP14Parser.Identifier)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class OperatorfunctionidContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Operator(self):
return self.getToken(CPP14Parser.Operator, 0)
def theoperator(self):
return self.getTypedRuleContext(CPP14Parser.TheoperatorContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_operatorfunctionid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOperatorfunctionid" ):
listener.enterOperatorfunctionid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOperatorfunctionid" ):
listener.exitOperatorfunctionid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitOperatorfunctionid" ):
return visitor.visitOperatorfunctionid(self)
else:
return visitor.visitChildren(self)
def operatorfunctionid(self):
localctx = CPP14Parser.OperatorfunctionidContext(self, self._ctx, self.state)
self.enterRule(localctx, 342, self.RULE_operatorfunctionid)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2172
self.match(CPP14Parser.Operator)
self.state = 2173
self.theoperator()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LiteraloperatoridContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Operator(self):
return self.getToken(CPP14Parser.Operator, 0)
def Stringliteral(self):
return self.getToken(CPP14Parser.Stringliteral, 0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def Userdefinedstringliteral(self):
return self.getToken(CPP14Parser.Userdefinedstringliteral, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_literaloperatorid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLiteraloperatorid" ):
listener.enterLiteraloperatorid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLiteraloperatorid" ):
listener.exitLiteraloperatorid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLiteraloperatorid" ):
return visitor.visitLiteraloperatorid(self)
else:
return visitor.visitChildren(self)
def literaloperatorid(self):
localctx = CPP14Parser.LiteraloperatoridContext(self, self._ctx, self.state)
self.enterRule(localctx, 344, self.RULE_literaloperatorid)
try:
self.state = 2180
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,277,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2175
self.match(CPP14Parser.Operator)
self.state = 2176
self.match(CPP14Parser.Stringliteral)
self.state = 2177
self.match(CPP14Parser.Identifier)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2178
self.match(CPP14Parser.Operator)
self.state = 2179
self.match(CPP14Parser.Userdefinedstringliteral)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TemplatedeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Template(self):
return self.getToken(CPP14Parser.Template, 0)
def Less(self):
return self.getToken(CPP14Parser.Less, 0)
def templateparameterlist(self):
return self.getTypedRuleContext(CPP14Parser.TemplateparameterlistContext,0)
def Greater(self):
return self.getToken(CPP14Parser.Greater, 0)
def declaration(self):
return self.getTypedRuleContext(CPP14Parser.DeclarationContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_templatedeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTemplatedeclaration" ):
listener.enterTemplatedeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTemplatedeclaration" ):
listener.exitTemplatedeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTemplatedeclaration" ):
return visitor.visitTemplatedeclaration(self)
else:
return visitor.visitChildren(self)
def templatedeclaration(self):
localctx = CPP14Parser.TemplatedeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 346, self.RULE_templatedeclaration)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2182
self.match(CPP14Parser.Template)
self.state = 2183
self.match(CPP14Parser.Less)
self.state = 2184
self.templateparameterlist(0)
self.state = 2185
self.match(CPP14Parser.Greater)
self.state = 2186
self.declaration()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TemplateparameterlistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def templateparameter(self):
return self.getTypedRuleContext(CPP14Parser.TemplateparameterContext,0)
def templateparameterlist(self):
return self.getTypedRuleContext(CPP14Parser.TemplateparameterlistContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_templateparameterlist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTemplateparameterlist" ):
listener.enterTemplateparameterlist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTemplateparameterlist" ):
listener.exitTemplateparameterlist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTemplateparameterlist" ):
return visitor.visitTemplateparameterlist(self)
else:
return visitor.visitChildren(self)
def templateparameterlist(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.TemplateparameterlistContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 348
self.enterRecursionRule(localctx, 348, self.RULE_templateparameterlist, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2189
self.templateparameter()
self._ctx.stop = self._input.LT(-1)
self.state = 2196
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,278,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.TemplateparameterlistContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_templateparameterlist)
self.state = 2191
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 2192
self.match(CPP14Parser.Comma)
self.state = 2193
self.templateparameter()
self.state = 2198
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,278,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class TemplateparameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeparameter(self):
return self.getTypedRuleContext(CPP14Parser.TypeparameterContext,0)
def parameterdeclaration(self):
return self.getTypedRuleContext(CPP14Parser.ParameterdeclarationContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_templateparameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTemplateparameter" ):
listener.enterTemplateparameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTemplateparameter" ):
listener.exitTemplateparameter(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTemplateparameter" ):
return visitor.visitTemplateparameter(self)
else:
return visitor.visitChildren(self)
def templateparameter(self):
localctx = CPP14Parser.TemplateparameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 350, self.RULE_templateparameter)
try:
self.state = 2201
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,279,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2199
self.typeparameter()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2200
self.parameterdeclaration()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeparameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Class(self):
return self.getToken(CPP14Parser.Class, 0)
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def Assign(self):
return self.getToken(CPP14Parser.Assign, 0)
def thetypeid(self):
return self.getTypedRuleContext(CPP14Parser.ThetypeidContext,0)
def Typename_(self):
return self.getToken(CPP14Parser.Typename_, 0)
def Template(self):
return self.getToken(CPP14Parser.Template, 0)
def Less(self):
return self.getToken(CPP14Parser.Less, 0)
def templateparameterlist(self):
return self.getTypedRuleContext(CPP14Parser.TemplateparameterlistContext,0)
def Greater(self):
return self.getToken(CPP14Parser.Greater, 0)
def idexpression(self):
return self.getTypedRuleContext(CPP14Parser.IdexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_typeparameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeparameter" ):
listener.enterTypeparameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeparameter" ):
listener.exitTypeparameter(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeparameter" ):
return visitor.visitTypeparameter(self)
else:
return visitor.visitChildren(self)
def typeparameter(self):
localctx = CPP14Parser.TypeparameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 352, self.RULE_typeparameter)
self._la = 0 # Token type
try:
self.state = 2251
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,289,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2203
self.match(CPP14Parser.Class)
self.state = 2205
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,280,self._ctx)
if la_ == 1:
self.state = 2204
self.match(CPP14Parser.Ellipsis)
self.state = 2208
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,281,self._ctx)
if la_ == 1:
self.state = 2207
self.match(CPP14Parser.Identifier)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2210
self.match(CPP14Parser.Class)
self.state = 2212
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Identifier:
self.state = 2211
self.match(CPP14Parser.Identifier)
self.state = 2214
self.match(CPP14Parser.Assign)
self.state = 2215
self.thetypeid()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 2216
self.match(CPP14Parser.Typename_)
self.state = 2218
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,283,self._ctx)
if la_ == 1:
self.state = 2217
self.match(CPP14Parser.Ellipsis)
self.state = 2221
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,284,self._ctx)
if la_ == 1:
self.state = 2220
self.match(CPP14Parser.Identifier)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 2223
self.match(CPP14Parser.Typename_)
self.state = 2225
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Identifier:
self.state = 2224
self.match(CPP14Parser.Identifier)
self.state = 2227
self.match(CPP14Parser.Assign)
self.state = 2228
self.thetypeid()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 2229
self.match(CPP14Parser.Template)
self.state = 2230
self.match(CPP14Parser.Less)
self.state = 2231
self.templateparameterlist(0)
self.state = 2232
self.match(CPP14Parser.Greater)
self.state = 2233
self.match(CPP14Parser.Class)
self.state = 2235
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,286,self._ctx)
if la_ == 1:
self.state = 2234
self.match(CPP14Parser.Ellipsis)
self.state = 2238
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,287,self._ctx)
if la_ == 1:
self.state = 2237
self.match(CPP14Parser.Identifier)
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 2240
self.match(CPP14Parser.Template)
self.state = 2241
self.match(CPP14Parser.Less)
self.state = 2242
self.templateparameterlist(0)
self.state = 2243
self.match(CPP14Parser.Greater)
self.state = 2244
self.match(CPP14Parser.Class)
self.state = 2246
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Identifier:
self.state = 2245
self.match(CPP14Parser.Identifier)
self.state = 2248
self.match(CPP14Parser.Assign)
self.state = 2249
self.idexpression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SimpletemplateidContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def templatename(self):
return self.getTypedRuleContext(CPP14Parser.TemplatenameContext,0)
def Less(self):
return self.getToken(CPP14Parser.Less, 0)
def Greater(self):
return self.getToken(CPP14Parser.Greater, 0)
def templateargumentlist(self):
return self.getTypedRuleContext(CPP14Parser.TemplateargumentlistContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_simpletemplateid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSimpletemplateid" ):
listener.enterSimpletemplateid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSimpletemplateid" ):
listener.exitSimpletemplateid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitSimpletemplateid" ):
return visitor.visitSimpletemplateid(self)
else:
return visitor.visitChildren(self)
def simpletemplateid(self):
localctx = CPP14Parser.SimpletemplateidContext(self, self._ctx, self.state)
self.enterRule(localctx, 354, self.RULE_simpletemplateid)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2253
self.templatename()
self.state = 2254
self.match(CPP14Parser.Less)
self.state = 2256
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Class) | (1 << CPP14Parser.Const) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.Enum) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.Struct - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Union - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Volatile - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 2255
self.templateargumentlist(0)
self.state = 2258
self.match(CPP14Parser.Greater)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TemplateidContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def simpletemplateid(self):
return self.getTypedRuleContext(CPP14Parser.SimpletemplateidContext,0)
def operatorfunctionid(self):
return self.getTypedRuleContext(CPP14Parser.OperatorfunctionidContext,0)
def Less(self):
return self.getToken(CPP14Parser.Less, 0)
def Greater(self):
return self.getToken(CPP14Parser.Greater, 0)
def templateargumentlist(self):
return self.getTypedRuleContext(CPP14Parser.TemplateargumentlistContext,0)
def literaloperatorid(self):
return self.getTypedRuleContext(CPP14Parser.LiteraloperatoridContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_templateid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTemplateid" ):
listener.enterTemplateid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTemplateid" ):
listener.exitTemplateid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTemplateid" ):
return visitor.visitTemplateid(self)
else:
return visitor.visitChildren(self)
def templateid(self):
localctx = CPP14Parser.TemplateidContext(self, self._ctx, self.state)
self.enterRule(localctx, 356, self.RULE_templateid)
self._la = 0 # Token type
try:
self.state = 2275
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,293,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2260
self.simpletemplateid()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2261
self.operatorfunctionid()
self.state = 2262
self.match(CPP14Parser.Less)
self.state = 2264
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Class) | (1 << CPP14Parser.Const) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.Enum) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.Struct - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Union - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Volatile - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 2263
self.templateargumentlist(0)
self.state = 2266
self.match(CPP14Parser.Greater)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 2268
self.literaloperatorid()
self.state = 2269
self.match(CPP14Parser.Less)
self.state = 2271
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CPP14Parser.T__0) | (1 << CPP14Parser.T__1) | (1 << CPP14Parser.Alignof) | (1 << CPP14Parser.Auto) | (1 << CPP14Parser.Bool) | (1 << CPP14Parser.Char) | (1 << CPP14Parser.Char16) | (1 << CPP14Parser.Char32) | (1 << CPP14Parser.Class) | (1 << CPP14Parser.Const) | (1 << CPP14Parser.Const_cast) | (1 << CPP14Parser.Decltype) | (1 << CPP14Parser.Delete) | (1 << CPP14Parser.Double) | (1 << CPP14Parser.Dynamic_cast) | (1 << CPP14Parser.Enum) | (1 << CPP14Parser.BFalse) | (1 << CPP14Parser.Float) | (1 << CPP14Parser.Int) | (1 << CPP14Parser.Long) | (1 << CPP14Parser.New) | (1 << CPP14Parser.Noexcept) | (1 << CPP14Parser.Nullptr) | (1 << CPP14Parser.Operator) | (1 << CPP14Parser.Reinterpret_cast) | (1 << CPP14Parser.Short) | (1 << CPP14Parser.Signed) | (1 << CPP14Parser.Sizeof))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CPP14Parser.Static_cast - 64)) | (1 << (CPP14Parser.Struct - 64)) | (1 << (CPP14Parser.This - 64)) | (1 << (CPP14Parser.BTrue - 64)) | (1 << (CPP14Parser.Typeid_ - 64)) | (1 << (CPP14Parser.Typename_ - 64)) | (1 << (CPP14Parser.Union - 64)) | (1 << (CPP14Parser.Unsigned - 64)) | (1 << (CPP14Parser.Void - 64)) | (1 << (CPP14Parser.Volatile - 64)) | (1 << (CPP14Parser.Wchar - 64)) | (1 << (CPP14Parser.LeftParen - 64)) | (1 << (CPP14Parser.LeftBracket - 64)) | (1 << (CPP14Parser.Plus - 64)) | (1 << (CPP14Parser.Minus - 64)) | (1 << (CPP14Parser.Star - 64)) | (1 << (CPP14Parser.And - 64)) | (1 << (CPP14Parser.Or - 64)) | (1 << (CPP14Parser.Tilde - 64)) | (1 << (CPP14Parser.PlusPlus - 64)) | (1 << (CPP14Parser.MinusMinus - 64)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (CPP14Parser.Doublecolon - 128)) | (1 << (CPP14Parser.Identifier - 128)) | (1 << (CPP14Parser.Integerliteral - 128)) | (1 << (CPP14Parser.Characterliteral - 128)) | (1 << (CPP14Parser.Floatingliteral - 128)) | (1 << (CPP14Parser.Stringliteral - 128)) | (1 << (CPP14Parser.Userdefinedintegerliteral - 128)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 128)) | (1 << (CPP14Parser.Userdefinedstringliteral - 128)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 128)))) != 0):
self.state = 2270
self.templateargumentlist(0)
self.state = 2273
self.match(CPP14Parser.Greater)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TemplatenameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_templatename
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTemplatename" ):
listener.enterTemplatename(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTemplatename" ):
listener.exitTemplatename(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTemplatename" ):
return visitor.visitTemplatename(self)
else:
return visitor.visitChildren(self)
def templatename(self):
localctx = CPP14Parser.TemplatenameContext(self, self._ctx, self.state)
self.enterRule(localctx, 358, self.RULE_templatename)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2277
self.match(CPP14Parser.Identifier)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TemplateargumentlistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def templateargument(self):
return self.getTypedRuleContext(CPP14Parser.TemplateargumentContext,0)
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def templateargumentlist(self):
return self.getTypedRuleContext(CPP14Parser.TemplateargumentlistContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_templateargumentlist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTemplateargumentlist" ):
listener.enterTemplateargumentlist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTemplateargumentlist" ):
listener.exitTemplateargumentlist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTemplateargumentlist" ):
return visitor.visitTemplateargumentlist(self)
else:
return visitor.visitChildren(self)
def templateargumentlist(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.TemplateargumentlistContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 360
self.enterRecursionRule(localctx, 360, self.RULE_templateargumentlist, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2280
self.templateargument()
self.state = 2282
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,294,self._ctx)
if la_ == 1:
self.state = 2281
self.match(CPP14Parser.Ellipsis)
self._ctx.stop = self._input.LT(-1)
self.state = 2292
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,296,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.TemplateargumentlistContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_templateargumentlist)
self.state = 2284
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 2285
self.match(CPP14Parser.Comma)
self.state = 2286
self.templateargument()
self.state = 2288
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,295,self._ctx)
if la_ == 1:
self.state = 2287
self.match(CPP14Parser.Ellipsis)
self.state = 2294
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,296,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class TemplateargumentContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thetypeid(self):
return self.getTypedRuleContext(CPP14Parser.ThetypeidContext,0)
def constantexpression(self):
return self.getTypedRuleContext(CPP14Parser.ConstantexpressionContext,0)
def idexpression(self):
return self.getTypedRuleContext(CPP14Parser.IdexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_templateargument
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTemplateargument" ):
listener.enterTemplateargument(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTemplateargument" ):
listener.exitTemplateargument(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTemplateargument" ):
return visitor.visitTemplateargument(self)
else:
return visitor.visitChildren(self)
def templateargument(self):
localctx = CPP14Parser.TemplateargumentContext(self, self._ctx, self.state)
self.enterRule(localctx, 362, self.RULE_templateargument)
try:
self.state = 2298
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,297,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2295
self.thetypeid()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2296
self.constantexpression()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 2297
self.idexpression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypenamespecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Typename_(self):
return self.getToken(CPP14Parser.Typename_, 0)
def nestednamespecifier(self):
return self.getTypedRuleContext(CPP14Parser.NestednamespecifierContext,0)
def Identifier(self):
return self.getToken(CPP14Parser.Identifier, 0)
def simpletemplateid(self):
return self.getTypedRuleContext(CPP14Parser.SimpletemplateidContext,0)
def Template(self):
return self.getToken(CPP14Parser.Template, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_typenamespecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypenamespecifier" ):
listener.enterTypenamespecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypenamespecifier" ):
listener.exitTypenamespecifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypenamespecifier" ):
return visitor.visitTypenamespecifier(self)
else:
return visitor.visitChildren(self)
def typenamespecifier(self):
localctx = CPP14Parser.TypenamespecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 364, self.RULE_typenamespecifier)
self._la = 0 # Token type
try:
self.state = 2311
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,299,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2300
self.match(CPP14Parser.Typename_)
self.state = 2301
self.nestednamespecifier(0)
self.state = 2302
self.match(CPP14Parser.Identifier)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2304
self.match(CPP14Parser.Typename_)
self.state = 2305
self.nestednamespecifier(0)
self.state = 2307
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Template:
self.state = 2306
self.match(CPP14Parser.Template)
self.state = 2309
self.simpletemplateid()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExplicitinstantiationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Template(self):
return self.getToken(CPP14Parser.Template, 0)
def declaration(self):
return self.getTypedRuleContext(CPP14Parser.DeclarationContext,0)
def Extern(self):
return self.getToken(CPP14Parser.Extern, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_explicitinstantiation
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExplicitinstantiation" ):
listener.enterExplicitinstantiation(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExplicitinstantiation" ):
listener.exitExplicitinstantiation(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExplicitinstantiation" ):
return visitor.visitExplicitinstantiation(self)
else:
return visitor.visitChildren(self)
def explicitinstantiation(self):
localctx = CPP14Parser.ExplicitinstantiationContext(self, self._ctx, self.state)
self.enterRule(localctx, 366, self.RULE_explicitinstantiation)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2314
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Extern:
self.state = 2313
self.match(CPP14Parser.Extern)
self.state = 2316
self.match(CPP14Parser.Template)
self.state = 2317
self.declaration()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExplicitspecializationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Template(self):
return self.getToken(CPP14Parser.Template, 0)
def Less(self):
return self.getToken(CPP14Parser.Less, 0)
def Greater(self):
return self.getToken(CPP14Parser.Greater, 0)
def declaration(self):
return self.getTypedRuleContext(CPP14Parser.DeclarationContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_explicitspecialization
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExplicitspecialization" ):
listener.enterExplicitspecialization(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExplicitspecialization" ):
listener.exitExplicitspecialization(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExplicitspecialization" ):
return visitor.visitExplicitspecialization(self)
else:
return visitor.visitChildren(self)
def explicitspecialization(self):
localctx = CPP14Parser.ExplicitspecializationContext(self, self._ctx, self.state)
self.enterRule(localctx, 368, self.RULE_explicitspecialization)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2319
self.match(CPP14Parser.Template)
self.state = 2320
self.match(CPP14Parser.Less)
self.state = 2321
self.match(CPP14Parser.Greater)
self.state = 2322
self.declaration()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TryblockContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Try(self):
return self.getToken(CPP14Parser.Try, 0)
def compoundstatement(self):
return self.getTypedRuleContext(CPP14Parser.CompoundstatementContext,0)
def handlerseq(self):
return self.getTypedRuleContext(CPP14Parser.HandlerseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_tryblock
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTryblock" ):
listener.enterTryblock(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTryblock" ):
listener.exitTryblock(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTryblock" ):
return visitor.visitTryblock(self)
else:
return visitor.visitChildren(self)
def tryblock(self):
localctx = CPP14Parser.TryblockContext(self, self._ctx, self.state)
self.enterRule(localctx, 370, self.RULE_tryblock)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2324
self.match(CPP14Parser.Try)
self.state = 2325
self.compoundstatement()
self.state = 2326
self.handlerseq()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FunctiontryblockContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Try(self):
return self.getToken(CPP14Parser.Try, 0)
def compoundstatement(self):
return self.getTypedRuleContext(CPP14Parser.CompoundstatementContext,0)
def handlerseq(self):
return self.getTypedRuleContext(CPP14Parser.HandlerseqContext,0)
def ctorinitializer(self):
return self.getTypedRuleContext(CPP14Parser.CtorinitializerContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_functiontryblock
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunctiontryblock" ):
listener.enterFunctiontryblock(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunctiontryblock" ):
listener.exitFunctiontryblock(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitFunctiontryblock" ):
return visitor.visitFunctiontryblock(self)
else:
return visitor.visitChildren(self)
def functiontryblock(self):
localctx = CPP14Parser.FunctiontryblockContext(self, self._ctx, self.state)
self.enterRule(localctx, 372, self.RULE_functiontryblock)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2328
self.match(CPP14Parser.Try)
self.state = 2330
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Colon:
self.state = 2329
self.ctorinitializer()
self.state = 2332
self.compoundstatement()
self.state = 2333
self.handlerseq()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class HandlerseqContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def handler(self):
return self.getTypedRuleContext(CPP14Parser.HandlerContext,0)
def handlerseq(self):
return self.getTypedRuleContext(CPP14Parser.HandlerseqContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_handlerseq
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterHandlerseq" ):
listener.enterHandlerseq(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitHandlerseq" ):
listener.exitHandlerseq(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitHandlerseq" ):
return visitor.visitHandlerseq(self)
else:
return visitor.visitChildren(self)
def handlerseq(self):
localctx = CPP14Parser.HandlerseqContext(self, self._ctx, self.state)
self.enterRule(localctx, 374, self.RULE_handlerseq)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2335
self.handler()
self.state = 2337
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,302,self._ctx)
if la_ == 1:
self.state = 2336
self.handlerseq()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class HandlerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Catch(self):
return self.getToken(CPP14Parser.Catch, 0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def exceptiondeclaration(self):
return self.getTypedRuleContext(CPP14Parser.ExceptiondeclarationContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def compoundstatement(self):
return self.getTypedRuleContext(CPP14Parser.CompoundstatementContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_handler
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterHandler" ):
listener.enterHandler(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitHandler" ):
listener.exitHandler(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitHandler" ):
return visitor.visitHandler(self)
else:
return visitor.visitChildren(self)
def handler(self):
localctx = CPP14Parser.HandlerContext(self, self._ctx, self.state)
self.enterRule(localctx, 376, self.RULE_handler)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2339
self.match(CPP14Parser.Catch)
self.state = 2340
self.match(CPP14Parser.LeftParen)
self.state = 2341
self.exceptiondeclaration()
self.state = 2342
self.match(CPP14Parser.RightParen)
self.state = 2343
self.compoundstatement()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExceptiondeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.TypespecifierseqContext,0)
def declarator(self):
return self.getTypedRuleContext(CPP14Parser.DeclaratorContext,0)
def attributespecifierseq(self):
return self.getTypedRuleContext(CPP14Parser.AttributespecifierseqContext,0)
def abstractdeclarator(self):
return self.getTypedRuleContext(CPP14Parser.AbstractdeclaratorContext,0)
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_exceptiondeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExceptiondeclaration" ):
listener.enterExceptiondeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExceptiondeclaration" ):
listener.exitExceptiondeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExceptiondeclaration" ):
return visitor.visitExceptiondeclaration(self)
else:
return visitor.visitChildren(self)
def exceptiondeclaration(self):
localctx = CPP14Parser.ExceptiondeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 378, self.RULE_exceptiondeclaration)
self._la = 0 # Token type
try:
self.state = 2359
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,306,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2346
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 2345
self.attributespecifierseq(0)
self.state = 2348
self.typespecifierseq()
self.state = 2349
self.declarator()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2352
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.Alignas or _la==CPP14Parser.LeftBracket:
self.state = 2351
self.attributespecifierseq(0)
self.state = 2354
self.typespecifierseq()
self.state = 2356
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CPP14Parser.T__2 or _la==CPP14Parser.Decltype or ((((_la - 84)) & ~0x3f) == 0 and ((1 << (_la - 84)) & ((1 << (CPP14Parser.LeftParen - 84)) | (1 << (CPP14Parser.LeftBracket - 84)) | (1 << (CPP14Parser.Star - 84)) | (1 << (CPP14Parser.And - 84)) | (1 << (CPP14Parser.Doublecolon - 84)) | (1 << (CPP14Parser.Ellipsis - 84)) | (1 << (CPP14Parser.Identifier - 84)))) != 0):
self.state = 2355
self.abstractdeclarator()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 2358
self.match(CPP14Parser.Ellipsis)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ThrowexpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Throw(self):
return self.getToken(CPP14Parser.Throw, 0)
def assignmentexpression(self):
return self.getTypedRuleContext(CPP14Parser.AssignmentexpressionContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_throwexpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThrowexpression" ):
listener.enterThrowexpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThrowexpression" ):
listener.exitThrowexpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitThrowexpression" ):
return visitor.visitThrowexpression(self)
else:
return visitor.visitChildren(self)
def throwexpression(self):
localctx = CPP14Parser.ThrowexpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 380, self.RULE_throwexpression)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2361
self.match(CPP14Parser.Throw)
self.state = 2363
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,307,self._ctx)
if la_ == 1:
self.state = 2362
self.assignmentexpression()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExceptionspecificationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def dynamicexceptionspecification(self):
return self.getTypedRuleContext(CPP14Parser.DynamicexceptionspecificationContext,0)
def noexceptspecification(self):
return self.getTypedRuleContext(CPP14Parser.NoexceptspecificationContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_exceptionspecification
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExceptionspecification" ):
listener.enterExceptionspecification(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExceptionspecification" ):
listener.exitExceptionspecification(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExceptionspecification" ):
return visitor.visitExceptionspecification(self)
else:
return visitor.visitChildren(self)
def exceptionspecification(self):
localctx = CPP14Parser.ExceptionspecificationContext(self, self._ctx, self.state)
self.enterRule(localctx, 382, self.RULE_exceptionspecification)
try:
self.state = 2367
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.Throw]:
self.enterOuterAlt(localctx, 1)
self.state = 2365
self.dynamicexceptionspecification()
pass
elif token in [CPP14Parser.Noexcept]:
self.enterOuterAlt(localctx, 2)
self.state = 2366
self.noexceptspecification()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DynamicexceptionspecificationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Throw(self):
return self.getToken(CPP14Parser.Throw, 0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def typeidlist(self):
return self.getTypedRuleContext(CPP14Parser.TypeidlistContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_dynamicexceptionspecification
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDynamicexceptionspecification" ):
listener.enterDynamicexceptionspecification(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDynamicexceptionspecification" ):
listener.exitDynamicexceptionspecification(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitDynamicexceptionspecification" ):
return visitor.visitDynamicexceptionspecification(self)
else:
return visitor.visitChildren(self)
def dynamicexceptionspecification(self):
localctx = CPP14Parser.DynamicexceptionspecificationContext(self, self._ctx, self.state)
self.enterRule(localctx, 384, self.RULE_dynamicexceptionspecification)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2369
self.match(CPP14Parser.Throw)
self.state = 2370
self.match(CPP14Parser.LeftParen)
self.state = 2372
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 12)) & ~0x3f) == 0 and ((1 << (_la - 12)) & ((1 << (CPP14Parser.Auto - 12)) | (1 << (CPP14Parser.Bool - 12)) | (1 << (CPP14Parser.Char - 12)) | (1 << (CPP14Parser.Char16 - 12)) | (1 << (CPP14Parser.Char32 - 12)) | (1 << (CPP14Parser.Class - 12)) | (1 << (CPP14Parser.Const - 12)) | (1 << (CPP14Parser.Decltype - 12)) | (1 << (CPP14Parser.Double - 12)) | (1 << (CPP14Parser.Enum - 12)) | (1 << (CPP14Parser.Float - 12)) | (1 << (CPP14Parser.Int - 12)) | (1 << (CPP14Parser.Long - 12)) | (1 << (CPP14Parser.Short - 12)) | (1 << (CPP14Parser.Signed - 12)) | (1 << (CPP14Parser.Struct - 12)) | (1 << (CPP14Parser.Typename_ - 12)))) != 0) or ((((_la - 76)) & ~0x3f) == 0 and ((1 << (_la - 76)) & ((1 << (CPP14Parser.Union - 76)) | (1 << (CPP14Parser.Unsigned - 76)) | (1 << (CPP14Parser.Void - 76)) | (1 << (CPP14Parser.Volatile - 76)) | (1 << (CPP14Parser.Wchar - 76)) | (1 << (CPP14Parser.Doublecolon - 76)) | (1 << (CPP14Parser.Identifier - 76)))) != 0):
self.state = 2371
self.typeidlist(0)
self.state = 2374
self.match(CPP14Parser.RightParen)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeidlistContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thetypeid(self):
return self.getTypedRuleContext(CPP14Parser.ThetypeidContext,0)
def Ellipsis(self):
return self.getToken(CPP14Parser.Ellipsis, 0)
def typeidlist(self):
return self.getTypedRuleContext(CPP14Parser.TypeidlistContext,0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_typeidlist
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeidlist" ):
listener.enterTypeidlist(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeidlist" ):
listener.exitTypeidlist(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeidlist" ):
return visitor.visitTypeidlist(self)
else:
return visitor.visitChildren(self)
def typeidlist(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CPP14Parser.TypeidlistContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 386
self.enterRecursionRule(localctx, 386, self.RULE_typeidlist, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2377
self.thetypeid()
self.state = 2379
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,310,self._ctx)
if la_ == 1:
self.state = 2378
self.match(CPP14Parser.Ellipsis)
self._ctx.stop = self._input.LT(-1)
self.state = 2389
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,312,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = CPP14Parser.TypeidlistContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_typeidlist)
self.state = 2381
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 2382
self.match(CPP14Parser.Comma)
self.state = 2383
self.thetypeid()
self.state = 2385
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,311,self._ctx)
if la_ == 1:
self.state = 2384
self.match(CPP14Parser.Ellipsis)
self.state = 2391
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,312,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class NoexceptspecificationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Noexcept(self):
return self.getToken(CPP14Parser.Noexcept, 0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def constantexpression(self):
return self.getTypedRuleContext(CPP14Parser.ConstantexpressionContext,0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_noexceptspecification
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNoexceptspecification" ):
listener.enterNoexceptspecification(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNoexceptspecification" ):
listener.exitNoexceptspecification(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNoexceptspecification" ):
return visitor.visitNoexceptspecification(self)
else:
return visitor.visitChildren(self)
def noexceptspecification(self):
localctx = CPP14Parser.NoexceptspecificationContext(self, self._ctx, self.state)
self.enterRule(localctx, 388, self.RULE_noexceptspecification)
try:
self.state = 2398
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,313,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2392
self.match(CPP14Parser.Noexcept)
self.state = 2393
self.match(CPP14Parser.LeftParen)
self.state = 2394
self.constantexpression()
self.state = 2395
self.match(CPP14Parser.RightParen)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2397
self.match(CPP14Parser.Noexcept)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TheoperatorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def New(self):
return self.getToken(CPP14Parser.New, 0)
def Delete(self):
return self.getToken(CPP14Parser.Delete, 0)
def LeftBracket(self):
return self.getToken(CPP14Parser.LeftBracket, 0)
def RightBracket(self):
return self.getToken(CPP14Parser.RightBracket, 0)
def Plus(self):
return self.getToken(CPP14Parser.Plus, 0)
def Minus(self):
return self.getToken(CPP14Parser.Minus, 0)
def Star(self):
return self.getToken(CPP14Parser.Star, 0)
def Div(self):
return self.getToken(CPP14Parser.Div, 0)
def Mod(self):
return self.getToken(CPP14Parser.Mod, 0)
def Caret(self):
return self.getToken(CPP14Parser.Caret, 0)
def And(self):
return self.getToken(CPP14Parser.And, 0)
def Or(self):
return self.getToken(CPP14Parser.Or, 0)
def Tilde(self):
return self.getToken(CPP14Parser.Tilde, 0)
def Assign(self):
return self.getToken(CPP14Parser.Assign, 0)
def Less(self):
return self.getToken(CPP14Parser.Less, 0)
def Greater(self):
return self.getToken(CPP14Parser.Greater, 0)
def PlusAssign(self):
return self.getToken(CPP14Parser.PlusAssign, 0)
def MinusAssign(self):
return self.getToken(CPP14Parser.MinusAssign, 0)
def StarAssign(self):
return self.getToken(CPP14Parser.StarAssign, 0)
def DivAssign(self):
return self.getToken(CPP14Parser.DivAssign, 0)
def ModAssign(self):
return self.getToken(CPP14Parser.ModAssign, 0)
def XorAssign(self):
return self.getToken(CPP14Parser.XorAssign, 0)
def AndAssign(self):
return self.getToken(CPP14Parser.AndAssign, 0)
def OrAssign(self):
return self.getToken(CPP14Parser.OrAssign, 0)
def LeftShift(self):
return self.getToken(CPP14Parser.LeftShift, 0)
def RightShift(self):
return self.getToken(CPP14Parser.RightShift, 0)
def RightShiftAssign(self):
return self.getToken(CPP14Parser.RightShiftAssign, 0)
def LeftShiftAssign(self):
return self.getToken(CPP14Parser.LeftShiftAssign, 0)
def Equal(self):
return self.getToken(CPP14Parser.Equal, 0)
def NotEqual(self):
return self.getToken(CPP14Parser.NotEqual, 0)
def LessEqual(self):
return self.getToken(CPP14Parser.LessEqual, 0)
def GreaterEqual(self):
return self.getToken(CPP14Parser.GreaterEqual, 0)
def PlusPlus(self):
return self.getToken(CPP14Parser.PlusPlus, 0)
def MinusMinus(self):
return self.getToken(CPP14Parser.MinusMinus, 0)
def Comma(self):
return self.getToken(CPP14Parser.Comma, 0)
def ArrowStar(self):
return self.getToken(CPP14Parser.ArrowStar, 0)
def Arrow(self):
return self.getToken(CPP14Parser.Arrow, 0)
def LeftParen(self):
return self.getToken(CPP14Parser.LeftParen, 0)
def RightParen(self):
return self.getToken(CPP14Parser.RightParen, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_theoperator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTheoperator" ):
listener.enterTheoperator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTheoperator" ):
listener.exitTheoperator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTheoperator" ):
return visitor.visitTheoperator(self)
else:
return visitor.visitChildren(self)
def theoperator(self):
localctx = CPP14Parser.TheoperatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 390, self.RULE_theoperator)
try:
self.state = 2451
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,314,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 2400
self.match(CPP14Parser.New)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 2401
self.match(CPP14Parser.Delete)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 2402
self.match(CPP14Parser.New)
self.state = 2403
self.match(CPP14Parser.LeftBracket)
self.state = 2404
self.match(CPP14Parser.RightBracket)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 2405
self.match(CPP14Parser.Delete)
self.state = 2406
self.match(CPP14Parser.LeftBracket)
self.state = 2407
self.match(CPP14Parser.RightBracket)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 2408
self.match(CPP14Parser.Plus)
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 2409
self.match(CPP14Parser.Minus)
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 2410
self.match(CPP14Parser.Star)
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 2411
self.match(CPP14Parser.Div)
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 2412
self.match(CPP14Parser.Mod)
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 2413
self.match(CPP14Parser.Caret)
pass
elif la_ == 11:
self.enterOuterAlt(localctx, 11)
self.state = 2414
self.match(CPP14Parser.And)
pass
elif la_ == 12:
self.enterOuterAlt(localctx, 12)
self.state = 2415
self.match(CPP14Parser.Or)
pass
elif la_ == 13:
self.enterOuterAlt(localctx, 13)
self.state = 2416
self.match(CPP14Parser.Tilde)
pass
elif la_ == 14:
self.enterOuterAlt(localctx, 14)
self.state = 2417
self.match(CPP14Parser.T__0)
pass
elif la_ == 15:
self.enterOuterAlt(localctx, 15)
self.state = 2418
self.match(CPP14Parser.T__1)
pass
elif la_ == 16:
self.enterOuterAlt(localctx, 16)
self.state = 2419
self.match(CPP14Parser.Assign)
pass
elif la_ == 17:
self.enterOuterAlt(localctx, 17)
self.state = 2420
self.match(CPP14Parser.Less)
pass
elif la_ == 18:
self.enterOuterAlt(localctx, 18)
self.state = 2421
self.match(CPP14Parser.Greater)
pass
elif la_ == 19:
self.enterOuterAlt(localctx, 19)
self.state = 2422
self.match(CPP14Parser.PlusAssign)
pass
elif la_ == 20:
self.enterOuterAlt(localctx, 20)
self.state = 2423
self.match(CPP14Parser.MinusAssign)
pass
elif la_ == 21:
self.enterOuterAlt(localctx, 21)
self.state = 2424
self.match(CPP14Parser.StarAssign)
pass
elif la_ == 22:
self.enterOuterAlt(localctx, 22)
self.state = 2425
self.match(CPP14Parser.DivAssign)
pass
elif la_ == 23:
self.enterOuterAlt(localctx, 23)
self.state = 2426
self.match(CPP14Parser.ModAssign)
pass
elif la_ == 24:
self.enterOuterAlt(localctx, 24)
self.state = 2427
self.match(CPP14Parser.XorAssign)
pass
elif la_ == 25:
self.enterOuterAlt(localctx, 25)
self.state = 2428
self.match(CPP14Parser.AndAssign)
pass
elif la_ == 26:
self.enterOuterAlt(localctx, 26)
self.state = 2429
self.match(CPP14Parser.OrAssign)
pass
elif la_ == 27:
self.enterOuterAlt(localctx, 27)
self.state = 2430
self.match(CPP14Parser.LeftShift)
pass
elif la_ == 28:
self.enterOuterAlt(localctx, 28)
self.state = 2431
self.match(CPP14Parser.RightShift)
pass
elif la_ == 29:
self.enterOuterAlt(localctx, 29)
self.state = 2432
self.match(CPP14Parser.RightShiftAssign)
pass
elif la_ == 30:
self.enterOuterAlt(localctx, 30)
self.state = 2433
self.match(CPP14Parser.LeftShiftAssign)
pass
elif la_ == 31:
self.enterOuterAlt(localctx, 31)
self.state = 2434
self.match(CPP14Parser.Equal)
pass
elif la_ == 32:
self.enterOuterAlt(localctx, 32)
self.state = 2435
self.match(CPP14Parser.NotEqual)
pass
elif la_ == 33:
self.enterOuterAlt(localctx, 33)
self.state = 2436
self.match(CPP14Parser.LessEqual)
pass
elif la_ == 34:
self.enterOuterAlt(localctx, 34)
self.state = 2437
self.match(CPP14Parser.GreaterEqual)
pass
elif la_ == 35:
self.enterOuterAlt(localctx, 35)
self.state = 2438
self.match(CPP14Parser.T__2)
pass
elif la_ == 36:
self.enterOuterAlt(localctx, 36)
self.state = 2439
self.match(CPP14Parser.T__3)
pass
elif la_ == 37:
self.enterOuterAlt(localctx, 37)
self.state = 2440
self.match(CPP14Parser.T__4)
pass
elif la_ == 38:
self.enterOuterAlt(localctx, 38)
self.state = 2441
self.match(CPP14Parser.T__5)
pass
elif la_ == 39:
self.enterOuterAlt(localctx, 39)
self.state = 2442
self.match(CPP14Parser.PlusPlus)
pass
elif la_ == 40:
self.enterOuterAlt(localctx, 40)
self.state = 2443
self.match(CPP14Parser.MinusMinus)
pass
elif la_ == 41:
self.enterOuterAlt(localctx, 41)
self.state = 2444
self.match(CPP14Parser.Comma)
pass
elif la_ == 42:
self.enterOuterAlt(localctx, 42)
self.state = 2445
self.match(CPP14Parser.ArrowStar)
pass
elif la_ == 43:
self.enterOuterAlt(localctx, 43)
self.state = 2446
self.match(CPP14Parser.Arrow)
pass
elif la_ == 44:
self.enterOuterAlt(localctx, 44)
self.state = 2447
self.match(CPP14Parser.LeftParen)
self.state = 2448
self.match(CPP14Parser.RightParen)
pass
elif la_ == 45:
self.enterOuterAlt(localctx, 45)
self.state = 2449
self.match(CPP14Parser.LeftBracket)
self.state = 2450
self.match(CPP14Parser.RightBracket)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Integerliteral(self):
return self.getToken(CPP14Parser.Integerliteral, 0)
def Characterliteral(self):
return self.getToken(CPP14Parser.Characterliteral, 0)
def Floatingliteral(self):
return self.getToken(CPP14Parser.Floatingliteral, 0)
def Stringliteral(self):
return self.getToken(CPP14Parser.Stringliteral, 0)
def booleanliteral(self):
return self.getTypedRuleContext(CPP14Parser.BooleanliteralContext,0)
def pointerliteral(self):
return self.getTypedRuleContext(CPP14Parser.PointerliteralContext,0)
def userdefinedliteral(self):
return self.getTypedRuleContext(CPP14Parser.UserdefinedliteralContext,0)
def getRuleIndex(self):
return CPP14Parser.RULE_literal
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLiteral" ):
listener.enterLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLiteral" ):
listener.exitLiteral(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLiteral" ):
return visitor.visitLiteral(self)
else:
return visitor.visitChildren(self)
def literal(self):
localctx = CPP14Parser.LiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 392, self.RULE_literal)
try:
self.state = 2460
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CPP14Parser.Integerliteral]:
self.enterOuterAlt(localctx, 1)
self.state = 2453
self.match(CPP14Parser.Integerliteral)
pass
elif token in [CPP14Parser.Characterliteral]:
self.enterOuterAlt(localctx, 2)
self.state = 2454
self.match(CPP14Parser.Characterliteral)
pass
elif token in [CPP14Parser.Floatingliteral]:
self.enterOuterAlt(localctx, 3)
self.state = 2455
self.match(CPP14Parser.Floatingliteral)
pass
elif token in [CPP14Parser.Stringliteral]:
self.enterOuterAlt(localctx, 4)
self.state = 2456
self.match(CPP14Parser.Stringliteral)
pass
elif token in [CPP14Parser.BFalse, CPP14Parser.BTrue]:
self.enterOuterAlt(localctx, 5)
self.state = 2457
self.booleanliteral()
pass
elif token in [CPP14Parser.Nullptr]:
self.enterOuterAlt(localctx, 6)
self.state = 2458
self.pointerliteral()
pass
elif token in [CPP14Parser.Userdefinedintegerliteral, CPP14Parser.Userdefinedfloatingliteral, CPP14Parser.Userdefinedstringliteral, CPP14Parser.Userdefinedcharacterliteral]:
self.enterOuterAlt(localctx, 7)
self.state = 2459
self.userdefinedliteral()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BooleanliteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def BFalse(self):
return self.getToken(CPP14Parser.BFalse, 0)
def BTrue(self):
return self.getToken(CPP14Parser.BTrue, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_booleanliteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBooleanliteral" ):
listener.enterBooleanliteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBooleanliteral" ):
listener.exitBooleanliteral(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBooleanliteral" ):
return visitor.visitBooleanliteral(self)
else:
return visitor.visitChildren(self)
def booleanliteral(self):
localctx = CPP14Parser.BooleanliteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 394, self.RULE_booleanliteral)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2462
_la = self._input.LA(1)
if not(_la==CPP14Parser.BFalse or _la==CPP14Parser.BTrue):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PointerliteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Nullptr(self):
return self.getToken(CPP14Parser.Nullptr, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_pointerliteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPointerliteral" ):
listener.enterPointerliteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPointerliteral" ):
listener.exitPointerliteral(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitPointerliteral" ):
return visitor.visitPointerliteral(self)
else:
return visitor.visitChildren(self)
def pointerliteral(self):
localctx = CPP14Parser.PointerliteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 396, self.RULE_pointerliteral)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2464
self.match(CPP14Parser.Nullptr)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UserdefinedliteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Userdefinedintegerliteral(self):
return self.getToken(CPP14Parser.Userdefinedintegerliteral, 0)
def Userdefinedfloatingliteral(self):
return self.getToken(CPP14Parser.Userdefinedfloatingliteral, 0)
def Userdefinedstringliteral(self):
return self.getToken(CPP14Parser.Userdefinedstringliteral, 0)
def Userdefinedcharacterliteral(self):
return self.getToken(CPP14Parser.Userdefinedcharacterliteral, 0)
def getRuleIndex(self):
return CPP14Parser.RULE_userdefinedliteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUserdefinedliteral" ):
listener.enterUserdefinedliteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUserdefinedliteral" ):
listener.exitUserdefinedliteral(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitUserdefinedliteral" ):
return visitor.visitUserdefinedliteral(self)
else:
return visitor.visitChildren(self)
def userdefinedliteral(self):
localctx = CPP14Parser.UserdefinedliteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 398, self.RULE_userdefinedliteral)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2466
_la = self._input.LA(1)
if not(((((_la - 143)) & ~0x3f) == 0 and ((1 << (_la - 143)) & ((1 << (CPP14Parser.Userdefinedintegerliteral - 143)) | (1 << (CPP14Parser.Userdefinedfloatingliteral - 143)) | (1 << (CPP14Parser.Userdefinedstringliteral - 143)) | (1 << (CPP14Parser.Userdefinedcharacterliteral - 143)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int):
if self._predicates == None:
self._predicates = dict()
self._predicates[5] = self.nestednamespecifier_sempred
self._predicates[10] = self.capturelist_sempred
self._predicates[15] = self.postfixexpression_sempred
self._predicates[26] = self.noptrnewdeclarator_sempred
self._predicates[31] = self.pmexpression_sempred
self._predicates[32] = self.multiplicativeexpression_sempred
self._predicates[33] = self.additiveexpression_sempred
self._predicates[34] = self.shiftexpression_sempred
self._predicates[36] = self.relationalexpression_sempred
self._predicates[37] = self.equalityexpression_sempred
self._predicates[38] = self.andexpression_sempred
self._predicates[39] = self.exclusiveorexpression_sempred
self._predicates[40] = self.inclusiveorexpression_sempred
self._predicates[41] = self.logicalandexpression_sempred
self._predicates[42] = self.logicalorexpression_sempred
self._predicates[46] = self.expression_sempred
self._predicates[52] = self.statementseq_sempred
self._predicates[61] = self.declarationseq_sempred
self._predicates[88] = self.enumeratorlist_sempred
self._predicates[106] = self.attributespecifierseq_sempred
self._predicates[109] = self.attributelist_sempred
self._predicates[115] = self.balancedtokenseq_sempred
self._predicates[117] = self.initdeclaratorlist_sempred
self._predicates[121] = self.noptrdeclarator_sempred
self._predicates[132] = self.noptrabstractdeclarator_sempred
self._predicates[134] = self.noptrabstractpackdeclarator_sempred
self._predicates[136] = self.parameterdeclarationlist_sempred
self._predicates[143] = self.initializerlist_sempred
self._predicates[153] = self.memberdeclaratorlist_sempred
self._predicates[155] = self.virtspecifierseq_sempred
self._predicates[159] = self.basespecifierlist_sempred
self._predicates[174] = self.templateparameterlist_sempred
self._predicates[180] = self.templateargumentlist_sempred
self._predicates[193] = self.typeidlist_sempred
pred = self._predicates.get(ruleIndex, None)
if pred is None:
raise Exception("No predicate with index:" + str(ruleIndex))
else:
return pred(localctx, predIndex)
def nestednamespecifier_sempred(self, localctx:NestednamespecifierContext, predIndex:int):
if predIndex == 0:
return self.precpred(self._ctx, 2)
if predIndex == 1:
return self.precpred(self._ctx, 1)
def capturelist_sempred(self, localctx:CapturelistContext, predIndex:int):
if predIndex == 2:
return self.precpred(self._ctx, 1)
def postfixexpression_sempred(self, localctx:PostfixexpressionContext, predIndex:int):
if predIndex == 3:
return self.precpred(self._ctx, 19)
if predIndex == 4:
return self.precpred(self._ctx, 18)
if predIndex == 5:
return self.precpred(self._ctx, 17)
if predIndex == 6:
return self.precpred(self._ctx, 12)
if predIndex == 7:
return self.precpred(self._ctx, 11)
if predIndex == 8:
return self.precpred(self._ctx, 10)
if predIndex == 9:
return self.precpred(self._ctx, 9)
if predIndex == 10:
return self.precpred(self._ctx, 8)
if predIndex == 11:
return self.precpred(self._ctx, 7)
def noptrnewdeclarator_sempred(self, localctx:NoptrnewdeclaratorContext, predIndex:int):
if predIndex == 12:
return self.precpred(self._ctx, 1)
def pmexpression_sempred(self, localctx:PmexpressionContext, predIndex:int):
if predIndex == 13:
return self.precpred(self._ctx, 2)
if predIndex == 14:
return self.precpred(self._ctx, 1)
def multiplicativeexpression_sempred(self, localctx:MultiplicativeexpressionContext, predIndex:int):
if predIndex == 15:
return self.precpred(self._ctx, 3)
if predIndex == 16:
return self.precpred(self._ctx, 2)
if predIndex == 17:
return self.precpred(self._ctx, 1)
def additiveexpression_sempred(self, localctx:AdditiveexpressionContext, predIndex:int):
if predIndex == 18:
return self.precpred(self._ctx, 2)
if predIndex == 19:
return self.precpred(self._ctx, 1)
def shiftexpression_sempred(self, localctx:ShiftexpressionContext, predIndex:int):
if predIndex == 20:
return self.precpred(self._ctx, 1)
def relationalexpression_sempred(self, localctx:RelationalexpressionContext, predIndex:int):
if predIndex == 21:
return self.precpred(self._ctx, 4)
if predIndex == 22:
return self.precpred(self._ctx, 3)
if predIndex == 23:
return self.precpred(self._ctx, 2)
if predIndex == 24:
return self.precpred(self._ctx, 1)
def equalityexpression_sempred(self, localctx:EqualityexpressionContext, predIndex:int):
if predIndex == 25:
return self.precpred(self._ctx, 2)
if predIndex == 26:
return self.precpred(self._ctx, 1)
def andexpression_sempred(self, localctx:AndexpressionContext, predIndex:int):
if predIndex == 27:
return self.precpred(self._ctx, 1)
def exclusiveorexpression_sempred(self, localctx:ExclusiveorexpressionContext, predIndex:int):
if predIndex == 28:
return self.precpred(self._ctx, 1)
def inclusiveorexpression_sempred(self, localctx:InclusiveorexpressionContext, predIndex:int):
if predIndex == 29:
return self.precpred(self._ctx, 1)
def logicalandexpression_sempred(self, localctx:LogicalandexpressionContext, predIndex:int):
if predIndex == 30:
return self.precpred(self._ctx, 2)
if predIndex == 31:
return self.precpred(self._ctx, 1)
def logicalorexpression_sempred(self, localctx:LogicalorexpressionContext, predIndex:int):
if predIndex == 32:
return self.precpred(self._ctx, 2)
if predIndex == 33:
return self.precpred(self._ctx, 1)
def expression_sempred(self, localctx:ExpressionContext, predIndex:int):
if predIndex == 34:
return self.precpred(self._ctx, 1)
def statementseq_sempred(self, localctx:StatementseqContext, predIndex:int):
if predIndex == 35:
return self.precpred(self._ctx, 1)
def declarationseq_sempred(self, localctx:DeclarationseqContext, predIndex:int):
if predIndex == 36:
return self.precpred(self._ctx, 1)
def enumeratorlist_sempred(self, localctx:EnumeratorlistContext, predIndex:int):
if predIndex == 37:
return self.precpred(self._ctx, 1)
def attributespecifierseq_sempred(self, localctx:AttributespecifierseqContext, predIndex:int):
if predIndex == 38:
return self.precpred(self._ctx, 1)
def attributelist_sempred(self, localctx:AttributelistContext, predIndex:int):
if predIndex == 39:
return self.precpred(self._ctx, 3)
if predIndex == 40:
return self.precpred(self._ctx, 1)
def balancedtokenseq_sempred(self, localctx:BalancedtokenseqContext, predIndex:int):
if predIndex == 41:
return self.precpred(self._ctx, 1)
def initdeclaratorlist_sempred(self, localctx:InitdeclaratorlistContext, predIndex:int):
if predIndex == 42:
return self.precpred(self._ctx, 1)
def noptrdeclarator_sempred(self, localctx:NoptrdeclaratorContext, predIndex:int):
if predIndex == 43:
return self.precpred(self._ctx, 3)
if predIndex == 44:
return self.precpred(self._ctx, 2)
def noptrabstractdeclarator_sempred(self, localctx:NoptrabstractdeclaratorContext, predIndex:int):
if predIndex == 45:
return self.precpred(self._ctx, 5)
if predIndex == 46:
return self.precpred(self._ctx, 3)
def noptrabstractpackdeclarator_sempred(self, localctx:NoptrabstractpackdeclaratorContext, predIndex:int):
if predIndex == 47:
return self.precpred(self._ctx, 3)
if predIndex == 48:
return self.precpred(self._ctx, 2)
def parameterdeclarationlist_sempred(self, localctx:ParameterdeclarationlistContext, predIndex:int):
if predIndex == 49:
return self.precpred(self._ctx, 1)
def initializerlist_sempred(self, localctx:InitializerlistContext, predIndex:int):
if predIndex == 50:
return self.precpred(self._ctx, 1)
def memberdeclaratorlist_sempred(self, localctx:MemberdeclaratorlistContext, predIndex:int):
if predIndex == 51:
return self.precpred(self._ctx, 1)
def virtspecifierseq_sempred(self, localctx:VirtspecifierseqContext, predIndex:int):
if predIndex == 52:
return self.precpred(self._ctx, 1)
def basespecifierlist_sempred(self, localctx:BasespecifierlistContext, predIndex:int):
if predIndex == 53:
return self.precpred(self._ctx, 1)
def templateparameterlist_sempred(self, localctx:TemplateparameterlistContext, predIndex:int):
if predIndex == 54:
return self.precpred(self._ctx, 1)
def templateargumentlist_sempred(self, localctx:TemplateargumentlistContext, predIndex:int):
if predIndex == 55:
return self.precpred(self._ctx, 1)
def typeidlist_sempred(self, localctx:TypeidlistContext, predIndex:int):
if predIndex == 56:
return self.precpred(self._ctx, 1)
| 40.700448
| 3,212
| 0.594921
|
d5dcd199f6c59c5a2c6d928d378af6822f48b174
| 387
|
py
|
Python
|
setup.py
|
MULCIA/PLNFinalWork
|
b73cdef018240d2fbda94fbfaaa03a931f3f9e6e
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
MULCIA/PLNFinalWork
|
b73cdef018240d2fbda94fbfaaa03a931f3f9e6e
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
MULCIA/PLNFinalWork
|
b73cdef018240d2fbda94fbfaaa03a931f3f9e6e
|
[
"Apache-2.0"
] | null | null | null |
from setuptools import setup, find_packages
setup(
name='PLNFinalWork',
version='0.0.1',
url='https://github.com/MULCIA/PLNFinalWork',
license='MIT License',
author='Sergio Rodriguez',
author_email='sergiorodriguezcalvo@gmail.com',
keywords='End work of PLN for MULCIA master',
packages=find_packages(),
install_requires=[],
test_suite='tests',
)
| 27.642857
| 50
| 0.69509
|
20216bcd23f61f6c64a8eb9becc833683e994234
| 19,986
|
py
|
Python
|
tensorflow_probability/python/sts/components/semilocal_linear_trend.py
|
axch/probability
|
b112faafc593d18e6adf4c85fa8e0ce37b29f400
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/sts/components/semilocal_linear_trend.py
|
axch/probability
|
b112faafc593d18e6adf4c85fa8e0ce37b29f400
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/sts/components/semilocal_linear_trend.py
|
axch/probability
|
b112faafc593d18e6adf4c85fa8e0ce37b29f400
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Semi-Local Linear Trend model."""
# Dependency imports
import tensorflow.compat.v2 as tf
from tensorflow_probability.python import bijectors as tfb
from tensorflow_probability.python import distributions as tfd
from tensorflow_probability.python.internal import distribution_util as dist_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.sts.internal import util as sts_util
from tensorflow_probability.python.sts.structural_time_series import Parameter
from tensorflow_probability.python.sts.structural_time_series import StructuralTimeSeries
class SemiLocalLinearTrendStateSpaceModel(tfd.LinearGaussianStateSpaceModel):
"""State space model for a semi-local linear trend.
A state space model (SSM) posits a set of latent (unobserved) variables that
evolve over time with dynamics specified by a probabilistic transition model
`p(z[t+1] | z[t])`. At each timestep, we observe a value sampled from an
observation model conditioned on the current state, `p(x[t] | z[t])`. The
special case where both the transition and observation models are Gaussians
with mean specified as a linear function of the inputs, is known as a linear
Gaussian state space model and supports tractable exact probabilistic
calculations; see `tfp.distributions.LinearGaussianStateSpaceModel` for
details.
The semi-local linear trend model is a special case of a linear Gaussian
SSM, in which the latent state posits a `level` and `slope`. The `level`
evolves via a Gaussian random walk centered at the current `slope`, while
the `slope` follows a first-order autoregressive (AR1) process with
mean `slope_mean`:
```python
level[t] = level[t-1] + slope[t-1] + Normal(0., level_scale)
slope[t] = (slope_mean +
autoregressive_coef * (slope[t-1] - slope_mean) +
Normal(0., slope_scale))
```
The latent state is the two-dimensional tuple `[level, slope]`. The
`level` is observed at each timestep.
The parameters `level_scale`, `slope_mean`, `slope_scale`,
`autoregressive_coef`, and `observation_noise_scale` are each (a batch of)
scalars. The batch shape of this `Distribution` is the broadcast batch shape
of these parameters and of the `initial_state_prior`.
#### Mathematical Details
The semi-local linear trend model implements a
`tfp.distributions.LinearGaussianStateSpaceModel` with `latent_size = 2`
and `observation_size = 1`, following the transition model:
```
transition_matrix = [[1., 1.]
[0., autoregressive_coef]]
transition_noise ~ N(loc=slope_mean - autoregressive_coef * slope_mean,
scale=diag([level_scale, slope_scale]))
```
which implements the evolution of `[level, slope]` described above, and
the observation model:
```
observation_matrix = [[1., 0.]]
observation_noise ~ N(loc=0, scale=observation_noise_scale)
```
which picks out the first latent component, i.e., the `level`, as the
observation at each timestep.
#### Examples
A simple model definition:
```python
semilocal_trend_model = SemiLocalLinearTrendStateSpaceModel(
num_timesteps=50,
level_scale=0.5,
slope_mean=0.2,
slope_scale=0.5,
autoregressive_coef=0.9,
initial_state_prior=tfd.MultivariateNormalDiag(scale_diag=[1., 1.]))
y = semilocal_trend_model.sample() # y has shape [50, 1]
lp = semilocal_trend_model.log_prob(y) # log_prob is scalar
```
Passing additional parameter dimensions constructs a batch of models. The
overall batch shape is the broadcast batch shape of the parameters:
```python
semilocal_trend_model = SemiLocalLinearTrendStateSpaceModel(
num_timesteps=50,
level_scale=tf.ones([10]),
slope_mean=0.2,
slope_scale=0.5,
autoregressive_coef=0.9,
initial_state_prior=tfd.MultivariateNormalDiag(
scale_diag=tf.ones([10, 10, 2])))
y = semilocal_trend_model.sample(5) # y has shape [5, 10, 10, 50, 1]
lp = semilocal_trend_model.log_prob(y) # lp has shape [5, 10, 10]
```
"""
def __init__(self,
num_timesteps,
level_scale,
slope_mean,
slope_scale,
autoregressive_coef,
initial_state_prior,
observation_noise_scale=0.,
name=None,
**linear_gaussian_ssm_kwargs):
"""Build a state space model implementing a semi-local linear trend.
Args:
num_timesteps: Scalar `int` `Tensor` number of timesteps to model
with this distribution.
level_scale: Scalar (any additional dimensions are treated as batch
dimensions) `float` `Tensor` indicating the standard deviation of the
level transitions.
slope_mean: Scalar (any additional dimensions are treated as batch
dimensions) `float` `Tensor` indicating the expected long-term mean of
the latent slope.
slope_scale: Scalar (any additional dimensions are treated as batch
dimensions) `float` `Tensor` indicating the standard deviation of the
slope transitions.
autoregressive_coef: Scalar (any additional dimensions are treated as
batch dimensions) `float` `Tensor` defining the AR1 process on the
latent slope.
initial_state_prior: instance of `tfd.MultivariateNormal`
representing the prior distribution on latent states; must
have event shape `[2]`.
observation_noise_scale: Scalar (any additional dimensions are
treated as batch dimensions) `float` `Tensor` indicating the standard
deviation of the observation noise.
name: Python `str` name prefixed to ops created by this class.
Default value: "SemiLocalLinearTrendStateSpaceModel".
**linear_gaussian_ssm_kwargs: Optional additional keyword arguments to
to the base `tfd.LinearGaussianStateSpaceModel` constructor.
"""
parameters = dict(locals())
parameters.update(linear_gaussian_ssm_kwargs)
del parameters['linear_gaussian_ssm_kwargs']
with tf.name_scope(name or 'SemiLocalLinearTrendStateSpaceModel') as name:
dtype = initial_state_prior.dtype
level_scale = tf.convert_to_tensor(
value=level_scale, dtype=dtype, name='level_scale')
slope_mean = tf.convert_to_tensor(
value=slope_mean, dtype=dtype, name='slope_mean')
slope_scale = tf.convert_to_tensor(
value=slope_scale, dtype=dtype, name='slope_scale')
autoregressive_coef = tf.convert_to_tensor(
value=autoregressive_coef, dtype=dtype, name='autoregressive_coef')
observation_noise_scale = tf.convert_to_tensor(
value=observation_noise_scale,
dtype=dtype,
name='observation_noise_scale')
self._level_scale = level_scale
self._slope_mean = slope_mean
self._slope_scale = slope_scale
self._autoregressive_coef = autoregressive_coef
self._observation_noise_scale = observation_noise_scale
super(SemiLocalLinearTrendStateSpaceModel, self).__init__(
num_timesteps=num_timesteps,
transition_matrix=semilocal_linear_trend_transition_matrix(
autoregressive_coef),
transition_noise=semilocal_linear_trend_transition_noise(
level_scale, slope_mean, slope_scale, autoregressive_coef),
observation_matrix=tf.constant(
[[1., 0.]], dtype=dtype),
observation_noise=tfd.MultivariateNormalDiag(
scale_diag=observation_noise_scale[..., tf.newaxis]),
initial_state_prior=initial_state_prior,
name=name,
**linear_gaussian_ssm_kwargs)
self._parameters = parameters
@property
def level_scale(self):
return self._level_scale
@property
def slope_mean(self):
return self._slope_mean
@property
def slope_scale(self):
return self._slope_scale
@property
def autoregressive_coef(self):
return self._autoregressive_coef
@property
def observation_noise_scale(self):
return self._observation_noise_scale
def semilocal_linear_trend_transition_matrix(autoregressive_coef):
"""Build the transition matrix for a semi-local linear trend model."""
# We want to write the following 2 x 2 matrix:
# [[1., 1., ], # level(t+1) = level(t) + slope(t)
# [0., ar_coef], # slope(t+1) = ar_coef * slope(t)
# but it's slightly tricky to properly incorporate the batch shape of
# autoregressive_coef. E.g., if autoregressive_coef has shape [4,6], we want
# to return shape [4, 6, 2, 2]. We do this by breaking the matrix into its
# fixed entries, written explicitly, and then the autoregressive_coef part
# which we add in after using a mask to broadcast to the correct matrix shape.
fixed_entries = tf.constant(
[[1., 1.],
[0., 0.]],
dtype=autoregressive_coef.dtype)
autoregressive_coef_mask = tf.constant([[0., 0.],
[0., 1.]],
dtype=autoregressive_coef.dtype)
bottom_right_entry = (autoregressive_coef[..., tf.newaxis, tf.newaxis] *
autoregressive_coef_mask)
return tf.linalg.LinearOperatorFullMatrix(
fixed_entries + bottom_right_entry)
def semilocal_linear_trend_transition_noise(level_scale,
slope_mean,
slope_scale,
autoregressive_coef):
"""Build the transition noise model for a semi-local linear trend model."""
# At each timestep, the stochasticity of `level` and `slope` are given
# by `level_scale` and `slope_scale` respectively.
broadcast_batch_shape = dist_util.get_broadcast_shape(
level_scale, slope_mean, slope_scale, autoregressive_coef)
broadcast_ones = tf.ones(broadcast_batch_shape, dtype=level_scale.dtype)
scale_diag = tf.stack([level_scale * broadcast_ones,
slope_scale * broadcast_ones],
axis=-1)
# We additionally fold in a bias term implementing the nonzero `slope_mean`.
# The overall `slope` update is (from `SemiLocalLinearTrend` docstring)
# slope[t] = (slope_mean +
# autoregressive_coef * (slope[t-1] - slope_mean) +
# Normal(0., slope_scale))
# which we rewrite as
# slope[t] = (
# autoregressive_coef * slope[t-1] + # linear transition
# Normal(loc=slope_mean - autoregressive_coef * slope_mean, # noise bias
# scale=slope_scale)) # noise scale
bias = tf.stack([tf.zeros_like(broadcast_ones),
slope_mean * (1 - autoregressive_coef) * broadcast_ones],
axis=-1)
return tfd.MultivariateNormalDiag(
loc=bias,
scale_diag=scale_diag)
class SemiLocalLinearTrend(StructuralTimeSeries):
"""Formal representation of a semi-local linear trend model.
Like the `LocalLinearTrend` model, a semi-local linear trend posits a
latent `level` and `slope`, with the level component updated according to
the current slope plus a random walk:
```
level[t] = level[t-1] + slope[t-1] + Normal(0., level_scale)
```
The slope component in a `SemiLocalLinearTrend` model evolves according to
a first-order autoregressive (AR1) process with potentially nonzero mean:
```
slope[t] = (slope_mean +
autoregressive_coef * (slope[t-1] - slope_mean) +
Normal(0., slope_scale))
```
Unlike the random walk used in `LocalLinearTrend`, a stationary
AR1 process (coefficient in `(-1, 1)`) maintains bounded variance over time,
so a `SemiLocalLinearTrend` model will often produce more reasonable
uncertainties when forecasting over long timescales.
"""
def __init__(self,
level_scale_prior=None,
slope_mean_prior=None,
slope_scale_prior=None,
autoregressive_coef_prior=None,
initial_level_prior=None,
initial_slope_prior=None,
observed_time_series=None,
constrain_ar_coef_stationary=True,
constrain_ar_coef_positive=False,
name=None):
"""Specify a semi-local linear trend model.
Args:
level_scale_prior: optional `tfd.Distribution` instance specifying a prior
on the `level_scale` parameter. If `None`, a heuristic default prior is
constructed based on the provided `observed_time_series`.
Default value: `None`.
slope_mean_prior: optional `tfd.Distribution` instance specifying a prior
on the `slope_mean` parameter. If `None`, a heuristic default prior is
constructed based on the provided `observed_time_series`.
Default value: `None`.
slope_scale_prior: optional `tfd.Distribution` instance specifying a prior
on the `slope_scale` parameter. If `None`, a heuristic default prior is
constructed based on the provided `observed_time_series`.
Default value: `None`.
autoregressive_coef_prior: optional `tfd.Distribution` instance specifying
a prior on the `autoregressive_coef` parameter. If `None`, the default
prior is a standard `Normal(0., 1.)`. Note that the prior may be
implicitly truncated by `constrain_ar_coef_stationary` and/or
`constrain_ar_coef_positive`.
Default value: `None`.
initial_level_prior: optional `tfd.Distribution` instance specifying a
prior on the initial level. If `None`, a heuristic default prior is
constructed based on the provided `observed_time_series`.
Default value: `None`.
initial_slope_prior: optional `tfd.Distribution` instance specifying a
prior on the initial slope. If `None`, a heuristic default prior is
constructed based on the provided `observed_time_series`.
Default value: `None`.
observed_time_series: optional `float` `Tensor` of shape
`batch_shape + [T, 1]` (omitting the trailing unit dimension is also
supported when `T > 1`), specifying an observed time series. Any `NaN`s
are interpreted as missing observations; missingness may be also be
explicitly specified by passing a `tfp.sts.MaskedTimeSeries` instance.
Any priors not explicitly set will be given default values according to
the scale of the observed time series (or batch of time series).
Default value: `None`.
constrain_ar_coef_stationary: if `True`, perform inference using a
parameterization that restricts `autoregressive_coef` to the interval
`(-1, 1)`, or `(0, 1)` if `force_positive_ar_coef` is also `True`,
corresponding to stationary processes. This will implicitly truncates
the support of `autoregressive_coef_prior`.
Default value: `True`.
constrain_ar_coef_positive: if `True`, perform inference using a
parameterization that restricts `autoregressive_coef` to be positive,
or in `(0, 1)` if `constrain_ar_coef_stationary` is also `True`. This
will implicitly truncate the support of `autoregressive_coef_prior`.
Default value: `False`.
name: the name of this model component.
Default value: 'SemiLocalLinearTrend'.
"""
init_parameters = dict(locals())
with tf.name_scope(name or 'SemiLocalLinearTrend') as name:
if observed_time_series is not None:
_, observed_stddev, observed_initial = sts_util.empirical_statistics(
observed_time_series)
else:
observed_stddev, observed_initial = 1., 0.
# Heuristic default priors. Overriding these may dramatically
# change inference performance and results.
if level_scale_prior is None:
level_scale_prior = tfd.LogNormal(
loc=tf.math.log(.01 * observed_stddev), scale=2.)
if slope_mean_prior is None:
slope_mean_prior = tfd.Normal(loc=0.,
scale=observed_stddev)
if slope_scale_prior is None:
slope_scale_prior = tfd.LogNormal(
loc=tf.math.log(.01 * observed_stddev), scale=2.)
if autoregressive_coef_prior is None:
autoregressive_coef_prior = tfd.Normal(
loc=0., scale=tf.ones_like(observed_initial))
if initial_level_prior is None:
initial_level_prior = tfd.Normal(
loc=observed_initial,
scale=tf.abs(observed_initial) + observed_stddev)
if initial_slope_prior is None:
initial_slope_prior = tfd.Normal(loc=0., scale=observed_stddev)
dtype = dtype_util.common_dtype([level_scale_prior,
slope_scale_prior,
autoregressive_coef_prior,
initial_level_prior,
initial_slope_prior])
self._initial_state_prior = tfd.MultivariateNormalDiag(
loc=tf.stack(
[initial_level_prior.mean(),
initial_slope_prior.mean()
], axis=-1),
scale_diag=tf.stack([
initial_level_prior.stddev(),
initial_slope_prior.stddev()
], axis=-1))
# Constrain the support of the autoregressive coefficient.
if constrain_ar_coef_stationary and constrain_ar_coef_positive:
autoregressive_coef_bijector = tfb.Sigmoid() # support in (0, 1)
elif constrain_ar_coef_positive:
autoregressive_coef_bijector = tfb.Softplus() # support in (0, infty)
elif constrain_ar_coef_stationary:
autoregressive_coef_bijector = tfb.Tanh() # support in (-1, 1)
else:
autoregressive_coef_bijector = tfb.Identity() # unconstrained
stddev_preconditioner = tfb.Scale(scale=observed_stddev)
scaled_softplus = tfb.Chain([stddev_preconditioner,
tfb.Softplus(low=dtype_util.eps(dtype))])
super(SemiLocalLinearTrend, self).__init__(
parameters=[
Parameter('level_scale', level_scale_prior, scaled_softplus),
Parameter('slope_mean', slope_mean_prior, stddev_preconditioner),
Parameter('slope_scale', slope_scale_prior, scaled_softplus),
Parameter('autoregressive_coef',
autoregressive_coef_prior,
autoregressive_coef_bijector),
],
latent_size=2,
init_parameters=init_parameters,
name=name)
@property
def initial_state_prior(self):
"""Prior distribution on the initial latent state (level and scale)."""
return self._initial_state_prior
def _make_state_space_model(self,
num_timesteps,
param_map,
initial_state_prior=None,
**linear_gaussian_ssm_kwargs):
if initial_state_prior is None:
initial_state_prior = self.initial_state_prior
linear_gaussian_ssm_kwargs.update(param_map)
return SemiLocalLinearTrendStateSpaceModel(
num_timesteps=num_timesteps,
initial_state_prior=initial_state_prior,
**linear_gaussian_ssm_kwargs)
| 43.733042
| 89
| 0.677124
|
4d85e9e5cbd22e286bfa4228f1fcc76f79f7246e
| 10,007
|
py
|
Python
|
luafun/utils/parse_valve.py
|
Delaunay/LuaFun
|
bd0efd8fc2b064d6bf58993e59a6ad4ac6713b39
|
[
"BSD-3-Clause"
] | 1
|
2021-02-06T06:42:29.000Z
|
2021-02-06T06:42:29.000Z
|
luafun/utils/parse_valve.py
|
Delaunay/LuaFun
|
bd0efd8fc2b064d6bf58993e59a6ad4ac6713b39
|
[
"BSD-3-Clause"
] | 6
|
2021-04-08T21:46:06.000Z
|
2021-05-09T01:40:04.000Z
|
luafun/utils/parse_valve.py
|
Delaunay/LuaFun
|
bd0efd8fc2b064d6bf58993e59a6ad4ac6713b39
|
[
"BSD-3-Clause"
] | null | null | null |
from collections import defaultdict
import logging
log = logging.getLogger(__name__)
class StartObject:
def __repr__(self):
return 'StartObj'
class EndObject:
def __repr__(self):
return 'EndObj'
class EndLine:
def __repr__(self):
return 'EndLine'
class NewValue:
def __init__(self, s):
self.value = s
def __repr__(self):
return self.value
class Lexer:
def __init__(self, filename):
with open(filename, "r") as f:
self.content = f.read()
self.pos = 0
self.buffer = ''
self.parsing_string = False
self.parsing_comment = False
self.previous = None
def __iter__(self):
return self
def process_character(self, c):
if self.parsing_string:
if c == '"':
self.parsing_string = False
value = self.buffer
self.buffer = ''
return NewValue(value)
else:
self.buffer += c
return
if self.previous == '/' and c == '/':
self.parsing_comment = True
if self.parsing_comment:
if c == '\n':
self.parsing_comment = False
return EndLine()
return None
if c in (' ', '\t'):
return None
if c == '{':
return StartObject()
if c == '}':
return EndObject()
if c == '\n':
return EndLine()
if c == '"' and not self.parsing_string:
self.parsing_string = True
return
if c == '/':
self.previous = '/'
def __next__(self):
tok = None
while tok is None and self.pos < len(self.content):
tok = self.process_character(self.content[self.pos])
self.pos += 1
if tok is None and self.pos == len(self.content):
raise StopIteration
return tok
constants = dict(
# "ability_type"
DOTA_ABILITY_TYPE_BASIC = 0,
DOTA_ABILITY_TYPE_ULTIMATE = 1,
DOTA_ABILITY_TYPE_ATTRIBUTES = 2,
# "ability_behavior"
DOTA_ABILITY_BEHAVIOR_HIDDEN=1,
DOTA_ABILITY_BEHAVIOR_PASSIVE = 2,
DOTA_ABILITY_BEHAVIOR_NO_TARGET = 4,
DOTA_ABILITY_BEHAVIOR_UNIT_TARGET = 8,
DOTA_ABILITY_BEHAVIOR_POINT = 16,
DOTA_ABILITY_BEHAVIOR_AOE = 32,
DOTA_ABILITY_BEHAVIOR_NOT_LEARNABLE = 64,
DOTA_ABILITY_BEHAVIOR_CHANNELLED = 128,
DOTA_ABILITY_BEHAVIOR_ITEM = 256,
DOTA_ABILITY_BEHAVIOR_TOGGLE = 512,
# "ability_unit_target_type":
DOTA_UNIT_TARGET_NONE = 0,
DOTA_UNIT_TARGET_FRIENDLY_HERO = 5,
DOTA_UNIT_TARGET_FRIENDLY_BASIC = 9,
DOTA_UNIT_TARGET_FRIENDLY = 13,
DOTA_UNIT_TARGET_ENEMY_HERO = 6,
DOTA_UNIT_TARGET_ENEMY_BASIC = 10,
DOTA_UNIT_TARGET_ENEMY = 14,
DOTA_UNIT_TARGET_ALL = 15,
)
class Parser:
def __init__(self, filename):
self.lexer = Lexer(filename)
self.root = dict()
self.objects = [self.root]
self.current_key = None
self.current_value = None
self.replace_enums = True
names = [
'AbilityUnitDamageType',
'SpellImmunityType',
'AbilityBehavior',
'HasScepterUpgrade',
'AbilityCastPoint',
'AbilityCooldown',
'AbilityManaCost',
'AbilityCastRange',
'SpellDispellableType',
'AbilityUnitDamageType',
'AbilityDuration',
'AbilityChannelTime',
'AbilityUnitTargetFlags',
'AbilityCastAnimation',
'AbilityType',
'AbilityUnitTargetTeam',
'AbilityUnitTargetType',
'AbilityDamage',
'HasShardUpgrade'
]
self.ability_spec = defaultdict(int)
for n in names:
self.ability_spec[n] = 0
def parse(self):
for tok in self.lexer:
if isinstance(tok, StartObject):
self.on_start_obj()
if isinstance(tok, EndObject):
self.on_end_obj()
if isinstance(tok, NewValue):
self.on_value(tok.value)
if isinstance(tok, EndLine):
self.on_end_line()
return self.root
def on_start_obj(self):
assert self.current_key is not None, "Object need a Name"
obj = dict()
self.objects[-1][self.current_key] = obj
self.objects.append(obj)
self.current_key = None
@staticmethod
def trypopitem(d, default):
try:
return d.popitem()
except KeyError:
return default
def on_end_obj(self):
obj = self.objects.pop()
specials = obj.get('AbilitySpecial', dict())
new_spec = dict()
# Object post processing to make it more workable
for k, v in specials.items():
if not isinstance(v, dict):
log.debug(k, v, obj)
continue
var_type = v.pop('var_type')
name, values = Parser.trypopitem(v, [None, ''])
if name:
new_spec[name] = values.split(' ')
if 'special' not in name and 'seasonal' not in name:
self.ability_spec[name] += 1
if new_spec:
obj['AbilitySpecial'] = new_spec
def on_value(self, value):
if self.current_key is None:
self.current_key = value
else:
self.current_value = value
if self.replace_enums:
for k, v in constants.items():
if k in self.current_value:
self.current_value = self.current_value.replace(k, str(v))
def on_end_line(self):
if self.current_value is not None:
self.objects[-1][self.current_key] = self.current_value
self.current_key = None
self.current_value = None
def generate_ability_array():
import os
import json
folder = os.path.dirname(__file__)
f = os.path.join(folder, '..', 'game', 'resources', 'npc_abilities.txt')
p = Parser(f)
p.parse()
abilities = []
for k, v in p.root['DOTAAbilities'].items():
if k in ('Version', 'dota_base_ability'):
continue
if 'ID' not in v or isinstance(v, str):
log.debug(f'Ignoring ability {k} {v}')
break
abilities.append(dict(name=k, id=int(v['ID'])))
f = os.path.join(folder, '..', 'game', 'resources', 'abilities.json')
with open(f, 'w') as f:
json.dump(abilities, f, indent=2)
def generate_item_array():
import os
import json
folder = os.path.dirname(__file__)
f = os.path.join(folder, '..', 'game', 'resources', 'npc_items.txt')
p = Parser(f)
p.parse()
abilities = []
for k, v in p.root['DOTAAbilities'].items():
if k in ('Version', 'dota_base_ability'):
continue
if 'ID' not in v or isinstance(v, str):
log.debug(f'Ignoring ability {k} {v}')
break
if v.get('ItemIsNeutralDrop', "") == '1':
continue
if v.get('ItemPurchasable', "") == '0':
continue
if v.get('IsObsolete', "") == '1':
continue
cost = v.get('ItemCost')
# this only removes `item_recipe_hood_of_defiance`
if cost is None or cost == '':
continue
# this removes, unbuyable recipes
# - item_recipe_echo_sabre
# - item_recipe_oblivion_staff
if cost == '0' and v.get('ItemRecipe', '') == '1':
continue
quality = v.get('ItemQuality', '')
# , cost=int(cost)
abilities.append(dict(name=k, id=int(v['ID']), cost=int(cost), quality=quality))
abilities.sort(key=lambda x: x['cost'])
f = os.path.join(folder, '..', 'game', 'resources', 'items.json')
with open(f, 'w') as f:
json.dump(abilities, f, indent=2)
def generate_hero_array():
import os
import json
folder = os.path.dirname(__file__)
f = os.path.join(folder, '..', 'game', 'resources', 'npc_heroes.txt')
p = Parser(f)
p.parse()
# Invoker has 24 slot
ability_count = 24
roles = set()
heroes = []
for k, v in p.root['DOTAHeroes'].items():
if k in ('Version', 'npc_dota_hero_base'):
continue
if 'HeroID' not in v or isinstance(v, str):
log.debug(f'Ignoring hero {k} {v}')
break
role = v.get('Role').split(',')
level = v.get('Rolelevels').split(',')
role_dict = dict()
for r, l in zip(role, level):
roles.add(r)
role_dict[r] = l
hero = dict(
name=k,
id=int(v['HeroID']),
abilities=[None] * ability_count,
alias=v.get('NameAliases'),
pretty_name=v.get('workshop_guide_name'),
roles=role_dict
)
for i in range(ability_count):
k = f'Ability{i + 1}'
hero['abilities'][i] = v.get(k)
heroes.append(hero)
f = os.path.join(folder, '..', 'game', 'resources', 'heroes.json')
with open(f, 'w') as f:
json.dump(heroes, f, indent=2)
f = os.path.join(folder, '..', 'game', 'resources', 'roles.json')
with open(f, 'w') as f:
json.dump(list(roles), f, indent=2)
if __name__ == '__main__':
# p = Parser('C:/Users/Newton/work/luafun/resources/npc_abilities.txt')
generate_ability_array()
generate_hero_array()
generate_item_array()
# import json
# print(json.dumps(p.root['DOTAAbilities']['antimage_mana_break'], indent=2))
# print(p.ability_spec)
# print(len(p.ability_spec))
# reused_comp = 0
# for k, c in p.ability_spec.items():
# if c > 1:
# print(k, c)
# reused_comp += 1
# print(reused_comp)
# print(len(p.ability_spec))
# print(len(p.root['DOTAAbilities']))
| 26.196335
| 88
| 0.551014
|
cabd9714bfa69165f634d046005748e9f5199a98
| 415
|
py
|
Python
|
problem_92.py
|
renxiaoyi/project_euler
|
2566482209cd225fce39887fe869f839cf88330e
|
[
"Unlicense"
] | null | null | null |
problem_92.py
|
renxiaoyi/project_euler
|
2566482209cd225fce39887fe869f839cf88330e
|
[
"Unlicense"
] | null | null | null |
problem_92.py
|
renxiaoyi/project_euler
|
2566482209cd225fce39887fe869f839cf88330e
|
[
"Unlicense"
] | null | null | null |
ans = {}
cnt = 0
def Next(n):
digits = map(int, str(n))
squares = [x*x for x in digits]
return sum(squares)
for i in range(1, 10000000):
n = i
chain = [n]
while True:
if n == 1 or n == 89:
for j in chain:
ans[j] = n
break
if n in ans:
for j in chain:
ans[j] = ans[n]
break
n = Next(n)
chain.append(n)
if ans[i] == 89:
cnt += 1
print cnt
| 14.821429
| 33
| 0.493976
|
5ed6ea0bf57404c3e62268af87cae5e19dd13d0d
| 2,340
|
py
|
Python
|
tests/views.py
|
motius/django-gitlab-boilerplate
|
b10e0096b8760d4d001a3ba9c19dd909267a4334
|
[
"MIT"
] | 13
|
2017-05-08T13:00:37.000Z
|
2019-10-07T16:35:38.000Z
|
tests/views.py
|
motius/django-gitlab-boilerplate
|
b10e0096b8760d4d001a3ba9c19dd909267a4334
|
[
"MIT"
] | null | null | null |
tests/views.py
|
motius/django-gitlab-boilerplate
|
b10e0096b8760d4d001a3ba9c19dd909267a4334
|
[
"MIT"
] | 1
|
2020-10-01T21:06:57.000Z
|
2020-10-01T21:06:57.000Z
|
import json
import logging
from django.contrib.auth import get_user_model
from hamcrest import assert_that, has_key
from django.core.urlresolvers import reverse
from apps.core.urls import router
from libs.tests import BaseAPITestCase
from tests.recipes import UserRecipe
from .recipes import EmailAddressRecipe
log = logging.getLogger(__name__)
class TestAPIResponses(BaseAPITestCase):
"""
Smoke test to see if (almost) all api views work.
"""
# Overwrite args for detail routes that do not use an id
detail_args = {}
ignore_urls = ['api-root']
def setUp(self):
super().setUp()
urls = set(list([u.name for u in router.get_urls()]))
self.default_viewset_urls = []
self.extra_urls = []
for u in urls:
if u in self.ignore_urls:
continue
u = 'api:{}'.format(u)
if u.endswith('-detail') or u.endswith('-list'):
self.default_viewset_urls.append(u)
else:
self.extra_urls.append(u)
def test_default_viewset_reponses(self):
for name in self.default_viewset_urls:
if name.endswith('-detail'):
args = self.detail_args[name] if name in self.detail_args else [1]
url = reverse(name, args=args)
else:
url = reverse(name)
if name.endswith('-list'):
self.assert_url_list_not_empty(url)
else:
self.get_200_response(url)
def test_extra_viewset_reponses(self):
no_list_urls = []
urls = self.extra_urls
for name in urls:
url = reverse(name)
if name in no_list_urls:
self.get_200_response(url)
else:
self.assert_url_list_not_empty(url)
def test_standalone_responses(self):
urls = []
for url in urls:
self.get_200_response(url)
def test_token_login(self):
data = dict(username="bar", password="bar")
user = get_user_model().objects.create_user(**data)
EmailAddressRecipe.make(user=user, verified=True)
url = reverse('rest_login')
response = self.client.post(url, data)
assert_that(json.loads(response.content.decode("utf8")),
has_key("token"))
| 30.38961
| 82
| 0.603846
|
7c163f85d65271c772c90ebb9abe1df1e657299e
| 2,914
|
py
|
Python
|
bann/b_test_train_prepare/pytorch/p_test/functions/pr_fun.py
|
arturOnRails/BANN
|
027af04349304941fb73c2ede502aca4b76f1ad1
|
[
"MIT"
] | null | null | null |
bann/b_test_train_prepare/pytorch/p_test/functions/pr_fun.py
|
arturOnRails/BANN
|
027af04349304941fb73c2ede502aca4b76f1ad1
|
[
"MIT"
] | null | null | null |
bann/b_test_train_prepare/pytorch/p_test/functions/pr_fun.py
|
arturOnRails/BANN
|
027af04349304941fb73c2ede502aca4b76f1ad1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
""".. moduleauthor:: Artur Lissin"""
from typing import List, Tuple, Dict
from bann.b_test_train_prepare.pytorch.p_test.functions.auc_calc import calc_auc
from bann.b_test_train_prepare.container.test.rttff_c import merge_ttff_fun, RtTfF, \
check_ttff_merged
from bann.b_test_train_prepare.pytorch.p_test.functions.steps import C_STEP_SIZE, calc_step_f
def _create_fill_output_line(to_fill: Dict[str, List[float]],
th_v: int, tf_v: RtTfF,
class_id: int, /) -> Tuple[str, str, str, str]:
prec_div = (tf_v.r_tp + tf_v.r_fp)
if prec_div <= 0:
prec = 0.0
else:
prec = (1.0 * tf_v.r_tp) / prec_div
recall_div = (tf_v.r_tp + tf_v.r_fn)
if recall_div <= 0:
recall = 0.0
else:
recall = (1.0 * tf_v.r_tp) / recall_div
to_fill.setdefault(f"{recall}", []).append(prec)
return str(class_id), str(prec), str(recall), str(th_v / 100.)
def _one_class_pr_str_erg(data: Dict[int, RtTfF], cl_id: int, /) \
-> Tuple[List[Tuple[str, str, str, str]], Tuple[str, str, str]]:
dict_tpr: Dict[str, List[float]] = {}
pr_res = [
_create_fill_output_line(dict_tpr, th_v, tf_v, cl_id)
for th_v, tf_v in data.items()
]
trapz_auc, step_auc = calc_auc(dict_tpr, True)
return pr_res, (str(cl_id), str(trapz_auc), str(step_auc))
def merge_one_class_pr(data: List[Tuple[Dict[int, RtTfF], ...]], class_num: int,
step_cnt: int, /) -> Tuple[str, ...]:
step_f = calc_step_f(step_cnt)
classes_list = tuple(
{num * step_f: RtTfF() for num in range(int(C_STEP_SIZE / step_f) + 1)}
for _ in range(class_num)
)
check_sum: List[Dict[int, int]] = [{} for _ in range(class_num)]
for data_el in data:
for index in range(class_num):
for key, value in data_el[index].items():
check_sum[index][key] = check_sum[index].get(key, 0) + \
merge_ttff_fun(classes_list[index][key], value)
check_ttff_merged(check_sum)
res = tuple(_one_class_pr_str_erg(cl_el, cl_id) for cl_id, cl_el in enumerate(classes_list))
erg_list = [
"\"OneClass_PR\": {",
"\"ClassID\": [" + ','.join(cid[0] for re_t in res for cid in re_t[0]) + "],",
"\"Precision\": [" + ','.join(cid[1] for re_t in res for cid in re_t[0]) + "],",
"\"Recall\": [" + ','.join(cid[2] for re_t in res for cid in re_t[0]) + "],",
"\"Threshold_in_%\": [" + ','.join(cid[3] for re_t in res for cid in re_t[0]) + "]",
"},"
"\"OneClass_PR_AUC\": {",
"\"ClassID\": [" + ','.join(res_t[1][0] for res_t in res) + "],",
"\"Trapz_AUC\": [" + ','.join(res_t[1][1] for res_t in res) + "],",
"\"AP_AUC\": [" + ','.join(res_t[1][2] for res_t in res) + "]",
"}"
]
return tuple(erg_list)
| 42.231884
| 96
| 0.576184
|
3580d127a7685cb591059f03fc9f99612f02ea60
| 3,607
|
py
|
Python
|
PWGJE/EMCALJetTasks/Tracks/analysis/old/TriggerCorrelation.py
|
maroozm/AliPhysics
|
22ec256928cfdf8f800e05bfc1a6e124d90b6eaf
|
[
"BSD-3-Clause"
] | 114
|
2017-03-03T09:12:23.000Z
|
2022-03-03T20:29:42.000Z
|
PWGJE/EMCALJetTasks/Tracks/analysis/old/TriggerCorrelation.py
|
maroozm/AliPhysics
|
22ec256928cfdf8f800e05bfc1a6e124d90b6eaf
|
[
"BSD-3-Clause"
] | 19,637
|
2017-01-16T12:34:41.000Z
|
2022-03-31T22:02:40.000Z
|
PWGJE/EMCALJetTasks/Tracks/analysis/old/TriggerCorrelation.py
|
maroozm/AliPhysics
|
22ec256928cfdf8f800e05bfc1a6e124d90b6eaf
|
[
"BSD-3-Clause"
] | 1,021
|
2016-07-14T22:41:16.000Z
|
2022-03-31T05:15:51.000Z
|
#! /usr/bin/env python
from ROOT import TFile, TH1F, TCanvas, gPad, gDirectory, kBlack, kRed, kBlue, kGreen, kMagenta
from Helper import FileReaderException, HistNotFoundException, ReadHistList
from Graphics import Style
from SpectrumContainer import SpectrumContainer
import sys
gObjects = []
class TriggerCorrelation:
def __init__(self, mainclass):
self.__mainclass = mainclass
self.__nevents = 0
self.__otherclasses = {}
def SetMainNumberOfEvents(self, nevents):
self.__nevents = nevents
def AddTrigger(self, triggername, nevents):
self.__otherclasses[triggername] = nevents
def CreateHistogram(self, style):
result = TH1F("hcorrTrigger%s" %(self.__mainclass), "Number of events with trigger where %s is fired" %(self.__mainclass), 5, -0.5, 4.5)
result.GetXaxis().SetBinLabel(1, self.__mainclass)
result.SetBinContent(1, self.__nevents)
print "Number of events in trigger class %s: %d" %(self.__mainclass, self.__nevents)
counter = 1
for icls in sorted(self.__otherclasses.keys()):
result.GetXaxis().SetBinLabel(counter + 1, icls)
result.SetBinContent(counter + 1, self.__otherclasses[icls])
counter = counter + 1
result.GetXaxis().SetTitle("")
result.GetXaxis().SetLabelSize(0.065)
result.GetYaxis().SetTitle("Number of events")
result.GetYaxis().SetTitleSize(0.05)
result.GetYaxis().SetLabelSize(0.05)
result.GetYaxis().SetTitleOffset(1.6)
result.SetLineColor(style.GetColor())
result.SetStats(False)
return result
def ReadHistogram(filename):
hlist = ReadHistList(filename, "PtEMCalTriggerTask")
triggerhist = hlist.FindObject("hEventTriggers")
if not triggerhist:
raise HistNotFoundException("hEventTriggers")
return SpectrumContainer(triggerhist)
def CorrelateToTrigger(data, mainclass):
trgclasses = ["MinBias", "EMCJHigh", "EMCJLow", "EMCGHigh", "EMCGLow"]
result = TriggerCorrelation(mainclass)
axisMain = data.GetDimension(mainclass)
# get the number of events
proj = data.ProjectToDimension(axisMain, "h%s" %(mainclass))
result.SetMainNumberOfEvents(proj.GetBinContent(2))
data.ApplyCut(axisMain, 1, 1)
for cls in trgclasses:
if cls == mainclass:
continue
axiscls = data.GetDimension(cls)
proj = data.ProjectToDimension(axiscls, "h%s" %(cls))
result.AddTrigger(cls, proj.GetBinContent(2))
data.Reset()
return result
def MakeCorrelationPlot(filename, savePlot = False):
colors = [kBlack, kRed, kBlue, kGreen+2, kMagenta]
trgclasses = ["MinBias", "EMCJHigh", "EMCJLow", "EMCGHigh", "EMCGLow"]
data = ReadHistogram(filename)
result = TCanvas("trgcorr", "Correlation between trigger classes", 1000, 700)
result.Divide(3,2)
counter = 1
for cls in range(0, len(trgclasses)):
corr = CorrelateToTrigger(data, trgclasses[cls])
hist = corr.CreateHistogram(Style(colors[cls], 20))
result.cd(counter)
gPad.SetGrid(False,False)
gPad.SetLeftMargin(0.17)
hist.Draw("box")
title = gPad.FindObject("title")
# print title.IsA().GetName()
# title.SetTextSize(0.055)
gObjects.append(hist)
counter = counter + 1
result.cd()
gObjects.append(result)
if savePlot:
result.SaveAs("triggerCorrelation.png")
def main():
filename = sys.argv[1]
MakeCorrelationPlot(filename, True)
if __name__ == "__main__":
main()
| 37.572917
| 144
| 0.665096
|
2c453a7df801c9d79eb4b374ce59afe1ec32a49e
| 1,441
|
py
|
Python
|
src/durus_importer.py
|
Oire/TheQube
|
fcfd8a68b15948e0740642d635db24adef8cc314
|
[
"MIT"
] | 21
|
2015-08-02T21:26:14.000Z
|
2019-12-27T09:57:44.000Z
|
src/durus_importer.py
|
Oire/TheQube
|
fcfd8a68b15948e0740642d635db24adef8cc314
|
[
"MIT"
] | 34
|
2015-01-12T00:38:14.000Z
|
2020-08-31T11:19:37.000Z
|
src/durus_importer.py
|
Oire/TheQube
|
fcfd8a68b15948e0740642d635db24adef8cc314
|
[
"MIT"
] | 15
|
2015-03-24T15:42:30.000Z
|
2020-09-24T20:26:42.000Z
|
import re
import paths
import sessions
import misc
import output
class SessionImporter(object):
def __init__(self, filename=None):
self.session = sessions.current_session
if self.session.type != 'Twitter':
raise TypeError
if filename:
self.load_file(filename)
def load_file(self, filename):
output.speak(_("Loading file..."))
FileStorage, Connection = misc.import_durus()
self.file = FileStorage(paths.data_path(filename))
self.connection = Connection(self.file)
self.root = self.connection.root
def do_import(self):
output.speak("Initiating import...")
for k in self.root:
if type(self.root[k]) == list: #An old buffer that deserves buffership!
self.import_buffer(k)
self.file.close()
del(self.connection)
def import_buffer(self, buffername):
output.speak(_("Attempting import of buffer %s") % buffername)
buffer = self.session.get_buffer_by_name(buffername)
if buffer is None:
self.session.storage[buffername] = zc.blist.BList()
old_buf = self.root[buffername]
buffername = re.sub(r'^\.', r'', buffername)
buffername = re.sub(r'\.$', r'', buffername)
new_buf = self.session.storage[buffername]
if buffer:
old_buf = new_buf.find_new_data(old_buf.storage)
output.speak("In progress...")
for i in old_buf:
new_buf.insert(0, i)
transaction.commit
output.speak("Done!")
class SessionImportError(Exception): pass
| 28.82
| 75
| 0.69882
|
5159ee4c27a4cdb6a52d676d1e851ef207216548
| 13,819
|
py
|
Python
|
dulwich/contrib/diffstat.py
|
dish59742/dulwich
|
d7f1331cba0ca959e8b701265b0c1547191a4726
|
[
"Apache-2.0"
] | 624
|
2018-01-25T02:40:53.000Z
|
2022-02-02T12:38:55.000Z
|
dulwich/contrib/diffstat.py
|
dish59742/dulwich
|
d7f1331cba0ca959e8b701265b0c1547191a4726
|
[
"Apache-2.0"
] | 312
|
2018-01-25T02:56:49.000Z
|
2022-02-02T22:06:29.000Z
|
dulwich/contrib/diffstat.py
|
dish59742/dulwich
|
d7f1331cba0ca959e8b701265b0c1547191a4726
|
[
"Apache-2.0"
] | 132
|
2018-01-25T22:54:22.000Z
|
2022-02-02T18:42:56.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
# Copyright (c) 2020 Kevin B. Hendricks, Stratford Ontario Canada
# All rights reserved.
#
# This diffstat code was extracted and heavily modified from:
#
# https://github.com/techtonik/python-patch
# Under the following license:
#
# Patch utility to apply unified diffs
# Brute-force line-by-line non-recursive parsing
#
# Copyright (c) 2008-2016 anatoly techtonik
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
import re
# only needs to detect git style diffs as this is for
# use with dulwich
_git_header_name = re.compile(br"diff --git a/(.*) b/(.*)")
_GIT_HEADER_START = b"diff --git a/"
_GIT_BINARY_START = b"Binary file"
_GIT_RENAMEFROM_START = b"rename from"
_GIT_RENAMETO_START = b"rename to"
_GIT_CHUNK_START = b"@@"
_GIT_ADDED_START = b"+"
_GIT_DELETED_START = b"-"
_GIT_UNCHANGED_START = b" "
# emulate original full Patch class by just extracting
# filename and minimal chunk added/deleted information to
# properly interface with diffstat routine
def _parse_patch(lines):
"""An internal routine to parse a git style diff or patch to generate
diff stats
Args:
lines: list of byte strings "lines" from the diff to be parsed
Returns: A tuple (names, nametypes, counts) of three lists:
names = list of repo relative file paths
nametypes - list of booolean values indicating if file
is binary (True means binary file)
counts = list of tuples of (added, deleted) counts for that file
"""
names = []
nametypes = []
counts = []
in_patch_chunk = in_git_header = binaryfile = False
currentfile = None
added = deleted = 0
for line in lines:
if line.startswith(_GIT_HEADER_START):
if currentfile is not None:
names.append(currentfile)
nametypes.append(binaryfile)
counts.append((added, deleted))
currentfile = _git_header_name.search(line).group(2)
binaryfile = False
added = deleted = 0
in_git_header = True
in_patch_chunk = False
elif line.startswith(_GIT_BINARY_START) and in_git_header:
binaryfile = True
in_git_header = False
elif line.startswith(_GIT_RENAMEFROM_START) and in_git_header:
currentfile = line[12:]
elif line.startswith(_GIT_RENAMETO_START) and in_git_header:
currentfile += b" => %s" % line[10:]
elif line.startswith(_GIT_CHUNK_START) and (in_patch_chunk or in_git_header):
in_patch_chunk = True
in_git_header = False
elif line.startswith(_GIT_ADDED_START) and in_patch_chunk:
added += 1
elif line.startswith(_GIT_DELETED_START) and in_patch_chunk:
deleted += 1
elif not line.startswith(_GIT_UNCHANGED_START) and in_patch_chunk:
in_patch_chunk = False
# handle end of input
if currentfile is not None:
names.append(currentfile)
nametypes.append(binaryfile)
counts.append((added, deleted))
return names, nametypes, counts
# note must all done using bytes not string because on linux filenames
# may not be encodable even to utf-8
def diffstat(lines, max_width=80):
"""Generate summary statistics from a git style diff ala
(git diff tag1 tag2 --stat)
Args:
lines: list of byte string "lines" from the diff to be parsed
max_width: maximum line length for generating the summary
statistics (default 80)
Returns: A byte string that lists the changed files with change
counts and histogram
"""
names, nametypes, counts = _parse_patch(lines)
insert = []
delete = []
namelen = 0
maxdiff = 0 # max changes for any file used for histogram width calc
for i, filename in enumerate(names):
i, d = counts[i]
insert.append(i)
delete.append(d)
namelen = max(namelen, len(filename))
maxdiff = max(maxdiff, i + d)
output = b""
statlen = len(str(maxdiff)) # stats column width
for i, n in enumerate(names):
binaryfile = nametypes[i]
# %-19s | %-4d %s
# note b'%d' % namelen is not supported until Python 3.5
# To convert an int to a format width specifier for byte
# strings use str(namelen).encode('ascii')
format = (
b" %-"
+ str(namelen).encode("ascii")
+ b"s | %"
+ str(statlen).encode("ascii")
+ b"s %s\n"
)
binformat = b" %-" + str(namelen).encode("ascii") + b"s | %s\n"
if not binaryfile:
hist = b""
# -- calculating histogram --
width = len(format % (b"", b"", b""))
histwidth = max(2, max_width - width)
if maxdiff < histwidth:
hist = b"+" * insert[i] + b"-" * delete[i]
else:
iratio = (float(insert[i]) / maxdiff) * histwidth
dratio = (float(delete[i]) / maxdiff) * histwidth
iwidth = dwidth = 0
# make sure every entry that had actual insertions gets
# at least one +
if insert[i] > 0:
iwidth = int(iratio)
if iwidth == 0 and 0 < iratio < 1:
iwidth = 1
# make sure every entry that had actual deletions gets
# at least one -
if delete[i] > 0:
dwidth = int(dratio)
if dwidth == 0 and 0 < dratio < 1:
dwidth = 1
hist = b"+" * int(iwidth) + b"-" * int(dwidth)
output += format % (
bytes(names[i]),
str(insert[i] + delete[i]).encode("ascii"),
hist,
)
else:
output += binformat % (bytes(names[i]), b"Bin")
output += b" %d files changed, %d insertions(+), %d deletions(-)" % (
len(names),
sum(insert),
sum(delete),
)
return output
def main():
argv = sys.argv
# allow diffstat.py to also be used from the comand line
if len(sys.argv) > 1:
diffpath = argv[1]
data = b""
with open(diffpath, "rb") as f:
data = f.read()
lines = data.split(b"\n")
result = diffstat(lines)
print(result.decode("utf-8"))
return 0
# if no path argument to a diff file is passed in, run
# a self test. The test case includes tricky things like
# a diff of diff, binary files, renames with futher changes
# added files and removed files.
# All extracted from Sigil-Ebook/Sigil's github repo with
# full permission to use under this license.
selftest = b"""
diff --git a/docs/qt512.7_remove_bad_workaround.patch b/docs/qt512.7_remove_bad_workaround.patch
new file mode 100644
index 00000000..64e34192
--- /dev/null
+++ b/docs/qt512.7_remove_bad_workaround.patch
@@ -0,0 +1,15 @@
+--- qtbase/src/gui/kernel/qwindow.cpp.orig 2019-12-12 09:15:59.000000000 -0500
++++ qtbase/src/gui/kernel/qwindow.cpp 2020-01-10 10:36:53.000000000 -0500
+@@ -218,12 +218,6 @@
+ QGuiApplicationPrivate::window_list.removeAll(this);
+ if (!QGuiApplicationPrivate::is_app_closing)
+ QGuiApplicationPrivate::instance()->modalWindowList.removeOne(this);
+-
+- // focus_window is normally cleared in destroy(), but the window may in
+- // some cases end up becoming the focus window again. Clear it again
+- // here as a workaround. See QTBUG-75326.
+- if (QGuiApplicationPrivate::focus_window == this)
+- QGuiApplicationPrivate::focus_window = 0;
+ }
+
+ void QWindowPrivate::init(QScreen *targetScreen)
diff --git a/docs/testplugin_v017.zip b/docs/testplugin_v017.zip
new file mode 100644
index 00000000..a4cf4c4c
Binary files /dev/null and b/docs/testplugin_v017.zip differ
diff --git a/ci_scripts/macgddeploy.py b/ci_scripts/gddeploy.py
similarity index 73%
rename from ci_scripts/macgddeploy.py
rename to ci_scripts/gddeploy.py
index a512d075..f9dacd33 100644
--- a/ci_scripts/macgddeploy.py
+++ b/ci_scripts/gddeploy.py
@@ -1,19 +1,32 @@
#!/usr/bin/env python3
import os
+import sys
import subprocess
import datetime
import shutil
+import glob
gparent = os.path.expandvars('$GDRIVE_DIR')
grefresh_token = os.path.expandvars('$GDRIVE_REFRESH_TOKEN')
-travis_branch = os.path.expandvars('$TRAVIS_BRANCH')
-travis_commit = os.path.expandvars('$TRAVIS_COMMIT')
-travis_build_number = os.path.expandvars('$TRAVIS_BUILD_NUMBER')
+if sys.platform.lower().startswith('darwin'):
+ travis_branch = os.path.expandvars('$TRAVIS_BRANCH')
+ travis_commit = os.path.expandvars('$TRAVIS_COMMIT')
+ travis_build_number = os.path.expandvars('$TRAVIS_BUILD_NUMBER')
+
+ origfilename = './bin/Sigil.tar.xz'
+ newfilename = './bin/Sigil-{}-{}-build_num-{}.tar.xz'.format(travis_branch, travis_commit[:7],travis_build_numbe\
r)
+else:
+ appveyor_branch = os.path.expandvars('$APPVEYOR_REPO_BRANCH')
+ appveyor_commit = os.path.expandvars('$APPVEYOR_REPO_COMMIT')
+ appveyor_build_number = os.path.expandvars('$APPVEYOR_BUILD_NUMBER')
+ names = glob.glob('.\\installer\\Sigil-*-Setup.exe')
+ if not names:
+ exit(1)
+ origfilename = names[0]
+ newfilename = '.\\installer\\Sigil-{}-{}-build_num-{}-Setup.exe'.format(appveyor_branch, appveyor_commit[:7], ap\
pveyor_build_number)
-origfilename = './bin/Sigil.tar.xz'
-newfilename = './bin/Sigil-{}-{}-build_num-{}.tar.xz'.format(travis_branch, travis_commit[:7],travis_build_number)
shutil.copy2(origfilename, newfilename)
folder_name = datetime.date.today()
diff --git a/docs/qt512.6_backport_009abcd_fix.patch b/docs/qt512.6_backport_009abcd_fix.patch
deleted file mode 100644
index f4724347..00000000
--- a/docs/qt512.6_backport_009abcd_fix.patch
+++ /dev/null
@@ -1,26 +0,0 @@
---- qtbase/src/widgets/kernel/qwidget.cpp.orig 2019-11-08 10:57:07.000000000 -0500
-+++ qtbase/src/widgets/kernel/qwidget.cpp 2019-12-11 12:32:24.000000000 -0500
-@@ -8934,6 +8934,23 @@
- }
- }
- switch (event->type()) {
-+ case QEvent::PlatformSurface: {
-+ // Sync up QWidget's view of whether or not the widget has been created
-+ switch (static_cast<QPlatformSurfaceEvent*>(event)->surfaceEventType()) {
-+ case QPlatformSurfaceEvent::SurfaceCreated:
-+ if (!testAttribute(Qt::WA_WState_Created))
-+ create();
-+ break;
-+ case QPlatformSurfaceEvent::SurfaceAboutToBeDestroyed:
-+ if (testAttribute(Qt::WA_WState_Created)) {
-+ // Child windows have already been destroyed by QWindow,
-+ // so we skip them here.
-+ destroy(false, false);
-+ }
-+ break;
-+ }
-+ break;
-+ }
- case QEvent::MouseMove:
- mouseMoveEvent((QMouseEvent*)event);
- break;
diff --git a/docs/Building_Sigil_On_MacOSX.txt b/docs/Building_Sigil_On_MacOSX.txt
index 3b41fd80..64914c78 100644
--- a/docs/Building_Sigil_On_MacOSX.txt
+++ b/docs/Building_Sigil_On_MacOSX.txt
@@ -113,7 +113,7 @@ install_name_tool -add_rpath @loader_path/../../Frameworks ./bin/Sigil.app/Content
# To test if the newly bundled python 3 version of Sigil is working properly ypou can do the following:
-1. download testplugin_v014.zip from https://github.com/Sigil-Ebook/Sigil/tree/master/docs
+1. download testplugin_v017.zip from https://github.com/Sigil-Ebook/Sigil/tree/master/docs
2. open Sigil.app to the normal nearly blank template epub it generates when opened
3. use Plugins->Manage Plugins menu and make sure the "Use Bundled Python" checkbox is checked
4. use the "Add Plugin" button to navigate to and add testplugin.zip and then hit "Okay" to exit the Manage Plugins Dialog
""" # noqa: E501 W293
testoutput = b""" docs/qt512.7_remove_bad_workaround.patch | 15 ++++++++++++
docs/testplugin_v017.zip | Bin
ci_scripts/macgddeploy.py => ci_scripts/gddeploy.py | 0
docs/qt512.6_backport_009abcd_fix.patch | 26 ---------------------
docs/Building_Sigil_On_MacOSX.txt | 2 +-
5 files changed, 16 insertions(+), 27 deletions(-)""" # noqa: W291
# return 0 on success otherwise return -1
result = diffstat(selftest.split(b"\n"))
if result == testoutput:
print("self test passed")
return 0
print("self test failed")
print("Received:")
print(result.decode("utf-8"))
print("Expected:")
print(testoutput.decode("utf-8"))
return -1
if __name__ == "__main__":
sys.exit(main())
| 39.482857
| 123
| 0.64585
|
35812ed6972c3be13a09cbe7488c2d0cf986f213
| 700
|
py
|
Python
|
bgx/families/smart_bgt_python/smart_bgt/__init__.py
|
DGT-Network/DGT-Kawartha-1.0
|
dfe177f1a10d260949075f96422df34620d0bd54
|
[
"Apache-2.0"
] | 2
|
2018-09-27T04:43:33.000Z
|
2019-10-23T14:32:31.000Z
|
bgx/families/smart_bgt_python/smart_bgt/__init__.py
|
DGT-Network/DGT-Kawartha-1.0
|
dfe177f1a10d260949075f96422df34620d0bd54
|
[
"Apache-2.0"
] | 10
|
2020-05-12T06:58:15.000Z
|
2022-02-26T23:59:35.000Z
|
bgx/families/smart_bgt_python/smart_bgt/__init__.py
|
DGT-Network/DGT-Mississauga
|
52b5f1f4015db2aa7196e727a25b399de5fbf3c3
|
[
"Apache-2.0"
] | 1
|
2021-03-09T03:36:36.000Z
|
2021-03-09T03:36:36.000Z
|
# Copyright 2018 NTRlab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
__all__ = [
'client_cli',
'processor'
]
| 35
| 80
| 0.658571
|
3a2351db69cdf81ecfdda939928a1ee02f370d2c
| 6,625
|
py
|
Python
|
python/seldon_core/storage.py
|
SandhyaaGopchandani/seldon-core
|
00360bd7c90e85da980730c34e55318997907d44
|
[
"Apache-2.0"
] | null | null | null |
python/seldon_core/storage.py
|
SandhyaaGopchandani/seldon-core
|
00360bd7c90e85da980730c34e55318997907d44
|
[
"Apache-2.0"
] | null | null | null |
python/seldon_core/storage.py
|
SandhyaaGopchandani/seldon-core
|
00360bd7c90e85da980730c34e55318997907d44
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 kubeflow.org.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copied from kfserving project as starter.
#
import glob
import logging
import tempfile
import os
import re
from urllib.parse import urlparse
from azure.storage.blob import BlockBlobService
from google.auth import exceptions
from google.cloud import storage
from minio import Minio
_GCS_PREFIX = "gs://"
_S3_PREFIX = "s3://"
_BLOB_RE = "https://(.+?).blob.core.windows.net/(.+)"
_LOCAL_PREFIX = "file://"
class Storage(object): # pylint: disable=too-few-public-methods
@staticmethod
def download(uri: str, out_dir: str = None) -> str:
logging.info("Copying contents of %s to local", uri)
is_local = False
if uri.startswith(_LOCAL_PREFIX) or os.path.exists(uri):
is_local = True
if out_dir is None:
if is_local:
# noop if out_dir is not set and the path is local
return Storage._download_local(uri)
out_dir = tempfile.mkdtemp()
if uri.startswith(_GCS_PREFIX):
Storage._download_gcs(uri, out_dir)
elif uri.startswith(_S3_PREFIX):
Storage._download_s3(uri, out_dir)
elif re.search(_BLOB_RE, uri):
Storage._download_blob(uri, out_dir)
elif is_local:
return Storage._download_local(uri, out_dir)
else:
raise Exception("Cannot recognize storage type for " + uri +
"\n'%s', '%s', and '%s' are the current available storage type." %
(_GCS_PREFIX, _S3_PREFIX, _LOCAL_PREFIX))
logging.info("Successfully copied %s to %s", uri, out_dir)
return out_dir
@staticmethod
def _download_s3(uri, temp_dir: str):
client = Storage._create_minio_client()
bucket_args = uri.replace(_S3_PREFIX, "", 1).split("/", 1)
bucket_name = bucket_args[0]
bucket_path = bucket_args[1] if len(bucket_args) > 1 else ""
objects = client.list_objects(bucket_name, prefix=bucket_path, recursive=True)
for obj in objects:
# Replace any prefix from the object key with temp_dir
subdir_object_key = obj.object_name.replace(bucket_path, "", 1).strip("/")
# fget_object handles directory creation if does not exist
if not obj.is_dir:
if subdir_object_key == "":
subdir_object_key = obj.object_name
client.fget_object(bucket_name, obj.object_name,
os.path.join(temp_dir, subdir_object_key))
@staticmethod
def _download_gcs(uri, temp_dir: str):
try:
storage_client = storage.Client()
except exceptions.DefaultCredentialsError:
storage_client = storage.Client.create_anonymous_client()
bucket_args = uri.replace(_GCS_PREFIX, "", 1).split("/", 1)
bucket_name = bucket_args[0]
bucket_path = bucket_args[1] if len(bucket_args) > 1 else ""
bucket = storage_client.bucket(bucket_name)
prefix = bucket_path
if not prefix.endswith("/"):
prefix = prefix + "/"
blobs = bucket.list_blobs(prefix=prefix)
for blob in blobs:
# Replace any prefix from the object key with temp_dir
subdir_object_key = blob.name.replace(bucket_path, "", 1).strip("/")
# Create necessary subdirectory to store the object locally
if "/" in subdir_object_key:
local_object_dir = os.path.join(temp_dir, subdir_object_key.rsplit("/", 1)[0])
if not os.path.isdir(local_object_dir):
os.makedirs(local_object_dir, exist_ok=True)
if subdir_object_key.strip() != "":
dest_path = os.path.join(temp_dir, subdir_object_key)
logging.info("Downloading: %s", dest_path)
blob.download_to_filename(dest_path)
@staticmethod
def _download_blob(uri, out_dir: str):
match = re.search(_BLOB_RE, uri)
account_name = match.group(1)
storage_url = match.group(2)
container_name, prefix = storage_url.split("/", 1)
logging.info("Connecting to BLOB account: %s, contianer: %s", account_name, container_name)
block_blob_service = BlockBlobService(account_name=account_name)
blobs = block_blob_service.list_blobs(container_name, prefix=prefix)
for blob in blobs:
if "/" in blob.name:
head, _ = os.path.split(blob.name)
dir_path = os.path.join(out_dir, head)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
dest_path = os.path.join(out_dir, blob.name)
logging.info("Downloading: %s", dest_path)
block_blob_service.get_blob_to_path(container_name, blob.name, dest_path)
@staticmethod
def _download_local(uri, out_dir=None):
local_path = uri.replace(_LOCAL_PREFIX, "", 1)
if not os.path.exists(local_path):
raise Exception("Local path %s does not exist." % (uri))
if out_dir is None:
return local_path
elif not os.path.isdir(out_dir):
os.makedirs(out_dir)
if os.path.isdir(local_path):
local_path = os.path.join(local_path, "*")
for src in glob.glob(local_path):
_, tail = os.path.split(src)
dest_path = os.path.join(out_dir, tail)
logging.info("Linking: %s to %s", src, dest_path)
os.symlink(src, dest_path)
return out_dir
@staticmethod
def _create_minio_client():
# Remove possible http scheme for Minio
url = urlparse(os.getenv("S3_ENDPOINT", ""))
use_ssl = url.scheme=='https' if url.scheme else bool(os.getenv("USE_SSL", True))
minioClient = Minio(url.netloc,
access_key=os.getenv("AWS_ACCESS_KEY_ID", ""),
secret_key=os.getenv("AWS_SECRET_ACCESS_KEY", ""),
secure=use_ssl)
return minioClient
| 40.151515
| 99
| 0.622943
|
a00a1981999d928605ba042a4ce1d539137e62a0
| 37
|
py
|
Python
|
Part_2_intermediate/mod_1/lesson_7/ex_5_relative_import_main/estudent_book/grades/promotion_status.py
|
Mikma03/InfoShareacademy_Python_Courses
|
3df1008c8c92831bebf1625f960f25b39d6987e6
|
[
"MIT"
] | null | null | null |
Part_2_intermediate/mod_1/lesson_7/ex_5_relative_import_main/estudent_book/grades/promotion_status.py
|
Mikma03/InfoShareacademy_Python_Courses
|
3df1008c8c92831bebf1625f960f25b39d6987e6
|
[
"MIT"
] | null | null | null |
Part_2_intermediate/mod_1/lesson_7/ex_5_relative_import_main/estudent_book/grades/promotion_status.py
|
Mikma03/InfoShareacademy_Python_Courses
|
3df1008c8c92831bebf1625f960f25b39d6987e6
|
[
"MIT"
] | null | null | null |
FAILED = "FAILED"
PASSED = "PASSED"
| 9.25
| 17
| 0.648649
|
a1f1c76cef88930f2b45de8f247485be9d98cb83
| 5,600
|
py
|
Python
|
docker/sane-doc-reports/src/sane_doc_reports/elements/table.py
|
glicht/dockerfiles
|
7dff92792a7ec6ac1e04950a41927867af5f147c
|
[
"MIT"
] | 1
|
2020-06-16T16:32:42.000Z
|
2020-06-16T16:32:42.000Z
|
docker/sane-doc-reports/src/sane_doc_reports/elements/table.py
|
glicht/dockerfiles
|
7dff92792a7ec6ac1e04950a41927867af5f147c
|
[
"MIT"
] | 25
|
2018-12-24T22:40:45.000Z
|
2021-06-25T15:26:39.000Z
|
docker/sane-doc-reports/src/sane_doc_reports/elements/table.py
|
glicht/dockerfiles
|
7dff92792a7ec6ac1e04950a41927867af5f147c
|
[
"MIT"
] | 1
|
2019-02-06T06:39:57.000Z
|
2019-02-06T06:39:57.000Z
|
from sane_doc_reports.domain.CellObject import CellObject
from sane_doc_reports.domain.Element import Element
from sane_doc_reports.conf import DEBUG, PYDOCX_FONT_SIZE, \
DEFAULT_TABLE_FONT_SIZE, DEFAULT_TABLE_STYLE, PYDOCX_FONT_NAME, \
PYDOCX_FONT_COLOR, DEFAULT_FONT_COLOR, DEFAULT_TITLE_FONT_SIZE, \
PYDOCX_FONT_BOLD, DEFAULT_TITLE_COLOR
from sane_doc_reports.domain.Section import Section
from sane_doc_reports.elements import error, image
from sane_doc_reports.populate.utils import insert_text
from sane_doc_reports.utils import get_chart_font
def fix_order(ordered, readable_headers) -> list:
""" Return the readable headers by the order given """
readable_headers_values = readable_headers.values()
temp_readable = {
**{i[0].lower() + i[1:]: i for i in readable_headers_values},
**{i.lower(): i for i in readable_headers_values}}
temp_readable = {k.replace(" ", ""): v for k, v in temp_readable.items()}
# Old json format table columns are not lowercase
inv_fix = {i: i for i in readable_headers_values}
temp_readable = {**temp_readable, **inv_fix}
# New format fix
if any([isinstance(i, dict) for i in ordered]):
ret = []
for k in ordered:
key = k.get('key')
key = readable_headers.get(key, key)
if key not in ret:
ret.append(key)
return ret
ret = []
for ordered_key in ordered:
if isinstance(ordered_key, str):
ret.append(temp_readable[ordered_key])
return ret
class TableElement(Element):
style = {
'text': {
PYDOCX_FONT_SIZE: DEFAULT_TABLE_FONT_SIZE,
PYDOCX_FONT_NAME: get_chart_font(),
PYDOCX_FONT_COLOR: DEFAULT_FONT_COLOR,
PYDOCX_FONT_BOLD: False,
},
'title': {
PYDOCX_FONT_NAME: get_chart_font(),
PYDOCX_FONT_COLOR: DEFAULT_TITLE_COLOR,
PYDOCX_FONT_SIZE: DEFAULT_TITLE_FONT_SIZE,
PYDOCX_FONT_BOLD: False,
}
}
def insert(self):
if DEBUG:
print("Adding table...")
table_data = self.section.contents
if 'tableColumns' not in self.section.layout:
return
if isinstance(table_data, dict):
table_data = table_data.get('data', table_data)
# Fix new lists
if isinstance(table_data, dict):
wrapper_table = self.cell_object.cell.add_table(rows=2, cols=len(
table_data.keys()))
i = 0
# Add the wrapping headers
for wrapper_header, table_contents in table_data.items():
hdr = wrapper_table.cell(0, i)
insert_text(hdr, wrapper_header,
self.style['title'])
body = wrapper_table.cell(1, i)
c = CellObject(body)
# Hacky but will do the job
invoke(c, Section('table', table_contents, {}, {}))
i += 1
return
if 'readableHeaders' in self.section.layout:
ordered = self.section.layout['tableColumns']
readable_headers = self.section.layout['readableHeaders']
table_columns = fix_order(ordered, readable_headers)
else:
table_columns = self.section.layout['tableColumns']
# Quick fix, word crashes on more than 64 columns.
table_columns = table_columns[0:63]
for i, header_text in enumerate(table_columns):
if not isinstance(header_text, str):
table_columns.remove(header_text)
if 'title' in self.section.extra:
table = self.cell_object.cell.add_table(rows=2,
cols=len(table_columns))
title = table.cell(0, 0)
title.merge(table.cell(0, len(table_columns) - 1))
insert_text(title, self.section.extra['title'], self.style['title'])
hdr_cells = table.rows[1].cells
else:
table = self.cell_object.cell.add_table(rows=2,
cols=len(table_columns))
hdr_cells = table.rows[0].cells
table.style = DEFAULT_TABLE_STYLE
if 'list_style' in self.section.extra and self.section.extra[
'list_style']:
table.style = None
for i, header_text in enumerate(table_columns):
insert_text(hdr_cells[i], header_text, self.style['text'])
if len(table_columns) > 63:
# TODO: add error.
pass
for r in table_data:
row_cells = table.add_row().cells
for i, header_text in enumerate(table_columns):
if header_text not in r:
continue
# Old json format can have 'Avatars', which are images
if isinstance(r[header_text], dict) and \
r[header_text]['type'] == 'image':
row_temp = r[header_text]
s = Section(row_temp['type'], row_temp['data'], {}, {})
co = CellObject(row_cells[i], add_run=False)
image.invoke(co, s)
else:
insert_text(row_cells[i], r[header_text],
self.style['text'])
def invoke(cell_object, section):
if section.type != 'table':
section.contents = f'Called table but not table - [{section}]'
return error.invoke(cell_object, section)
TableElement(cell_object, section).insert()
| 36.363636
| 80
| 0.586607
|
2dc1f58b1c0ee4fb9d501c0e68e13f9e6f558f62
| 1,081
|
py
|
Python
|
ovl/target_filters/shape_filters/rotated_rectangle_filter.py
|
ofekashery/Ovl-Python
|
117e3f4ae1a8a5624c41792bd36b760afbe86c8e
|
[
"Apache-2.0"
] | 1
|
2020-10-11T16:14:46.000Z
|
2020-10-11T16:14:46.000Z
|
ovl/target_filters/shape_filters/rotated_rectangle_filter.py
|
ofekashery/Ovl-Python
|
117e3f4ae1a8a5624c41792bd36b760afbe86c8e
|
[
"Apache-2.0"
] | 1
|
2020-10-18T05:00:06.000Z
|
2020-12-24T20:03:44.000Z
|
ovl/target_filters/shape_filters/rotated_rectangle_filter.py
|
ofekashery/Ovl-Python
|
117e3f4ae1a8a5624c41792bd36b760afbe86c8e
|
[
"Apache-2.0"
] | null | null | null |
import cv2
from ..contour_filter import contour_filter
from ...math.shape_fill_ratios import rotating_rectangle_fill_ratio
from ...helpers.types import RangedNumber
@contour_filter
def rotated_rectangle_filter(contour_list, min_area_ratio: RangedNumber(0, 1) = 0.8):
"""
Receives a list of contours and returns only those that are approximately a rectangle regardless
of the angle of rotation.
:param contour_list: List of Contours to filter
:param min_area_ratio: the minimum ratio between the contour area and the bounding shape
:return: the contour list filtered.
"""
output_list = []
ratio_list = []
for current_contour in contour_list:
fill_ratio, w, h = rotating_rectangle_fill_ratio(current_contour)
peri = cv2.arcLength(current_contour, True)
approximation = cv2.approxPolyDP(current_contour, 0.02 * peri, True)
if fill_ratio > min_area_ratio and len(approximation) == 4:
ratio_list.append(fill_ratio)
output_list.append(current_contour)
return output_list, ratio_list
| 38.607143
| 100
| 0.736355
|
8900014d6fcb2a6062353605c7143cbc9a9cdae1
| 34,131
|
py
|
Python
|
samples/client/petstore/python-experimental/petstore_api/api/user_api.py
|
0x4a616e/openapi-generator
|
99f522cdd367471e5057895c339f5f1804290e8a
|
[
"Apache-2.0"
] | 1
|
2018-07-12T10:56:05.000Z
|
2018-07-12T10:56:05.000Z
|
samples/client/petstore/python-experimental/petstore_api/api/user_api.py
|
0x4a616e/openapi-generator
|
99f522cdd367471e5057895c339f5f1804290e8a
|
[
"Apache-2.0"
] | null | null | null |
samples/client/petstore/python-experimental/petstore_api/api/user_api.py
|
0x4a616e/openapi-generator
|
99f522cdd367471e5057895c339f5f1804290e8a
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
# python 2 and python 3 compatibility library
import six
from petstore_api.api_client import ApiClient, Endpoint
from petstore_api.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
int,
none_type,
str,
validate_and_convert_types
)
from petstore_api.model import user
class UserApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __create_user(
self,
body,
**kwargs
):
"""Create user # noqa: E501
This can only be done by the logged in user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_user(body, async_req=True)
>>> result = thread.get()
Args:
body (user.User): Created user object
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index', 0)
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.create_user = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/user',
'operation_id': 'create_user',
'http_method': 'POST',
'servers': [],
},
params_map={
'all': [
'body',
],
'required': [
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'body':
(user.User,),
},
'attribute_map': {
},
'location_map': {
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client,
callable=__create_user
)
def __create_users_with_array_input(
self,
body,
**kwargs
):
"""Creates list of users with given input array # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_users_with_array_input(body, async_req=True)
>>> result = thread.get()
Args:
body ([user.User]): List of user object
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index', 0)
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.create_users_with_array_input = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/user/createWithArray',
'operation_id': 'create_users_with_array_input',
'http_method': 'POST',
'servers': [],
},
params_map={
'all': [
'body',
],
'required': [
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'body':
([user.User],),
},
'attribute_map': {
},
'location_map': {
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client,
callable=__create_users_with_array_input
)
def __create_users_with_list_input(
self,
body,
**kwargs
):
"""Creates list of users with given input array # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_users_with_list_input(body, async_req=True)
>>> result = thread.get()
Args:
body ([user.User]): List of user object
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index', 0)
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.create_users_with_list_input = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/user/createWithList',
'operation_id': 'create_users_with_list_input',
'http_method': 'POST',
'servers': [],
},
params_map={
'all': [
'body',
],
'required': [
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'body':
([user.User],),
},
'attribute_map': {
},
'location_map': {
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client,
callable=__create_users_with_list_input
)
def __delete_user(
self,
username,
**kwargs
):
"""Delete user # noqa: E501
This can only be done by the logged in user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_user(username, async_req=True)
>>> result = thread.get()
Args:
username (str): The name that needs to be deleted
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index', 0)
kwargs['username'] = \
username
return self.call_with_http_info(**kwargs)
self.delete_user = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/user/{username}',
'operation_id': 'delete_user',
'http_method': 'DELETE',
'servers': [],
},
params_map={
'all': [
'username',
],
'required': [
'username',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'username':
(str,),
},
'attribute_map': {
'username': 'username',
},
'location_map': {
'username': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client,
callable=__delete_user
)
def __get_user_by_name(
self,
username,
**kwargs
):
"""Get user by user name # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_by_name(username, async_req=True)
>>> result = thread.get()
Args:
username (str): The name that needs to be fetched. Use user1 for testing.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
user.User
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index', 0)
kwargs['username'] = \
username
return self.call_with_http_info(**kwargs)
self.get_user_by_name = Endpoint(
settings={
'response_type': (user.User,),
'auth': [],
'endpoint_path': '/user/{username}',
'operation_id': 'get_user_by_name',
'http_method': 'GET',
'servers': [],
},
params_map={
'all': [
'username',
],
'required': [
'username',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'username':
(str,),
},
'attribute_map': {
'username': 'username',
},
'location_map': {
'username': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/xml',
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_user_by_name
)
def __login_user(
self,
username,
password,
**kwargs
):
"""Logs user into the system # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.login_user(username, password, async_req=True)
>>> result = thread.get()
Args:
username (str): The user name for login
password (str): The password for login in clear text
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
str
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index', 0)
kwargs['username'] = \
username
kwargs['password'] = \
password
return self.call_with_http_info(**kwargs)
self.login_user = Endpoint(
settings={
'response_type': (str,),
'auth': [],
'endpoint_path': '/user/login',
'operation_id': 'login_user',
'http_method': 'GET',
'servers': [],
},
params_map={
'all': [
'username',
'password',
],
'required': [
'username',
'password',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'username':
(str,),
'password':
(str,),
},
'attribute_map': {
'username': 'username',
'password': 'password',
},
'location_map': {
'username': 'query',
'password': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/xml',
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__login_user
)
def __logout_user(
self,
**kwargs
):
"""Logs out current logged in user session # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.logout_user(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index', 0)
return self.call_with_http_info(**kwargs)
self.logout_user = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/user/logout',
'operation_id': 'logout_user',
'http_method': 'GET',
'servers': [],
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client,
callable=__logout_user
)
def __update_user(
self,
username,
body,
**kwargs
):
"""Updated user # noqa: E501
This can only be done by the logged in user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user(username, body, async_req=True)
>>> result = thread.get()
Args:
username (str): name that need to be deleted
body (user.User): Updated user object
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index', 0)
kwargs['username'] = \
username
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.update_user = Endpoint(
settings={
'response_type': None,
'auth': [],
'endpoint_path': '/user/{username}',
'operation_id': 'update_user',
'http_method': 'PUT',
'servers': [],
},
params_map={
'all': [
'username',
'body',
],
'required': [
'username',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'username':
(str,),
'body':
(user.User,),
},
'attribute_map': {
'username': 'username',
},
'location_map': {
'username': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client,
callable=__update_user
)
| 35.114198
| 174
| 0.450646
|
c6cde9be589afb2c5b91ac3d37ca38e2ff9adb29
| 13,906
|
py
|
Python
|
python/cudf/cudf/tests/test_replace.py
|
williamBlazing/cudf
|
072785e24fd59b6f4eeaad3b54592a8c803ee96b
|
[
"Apache-2.0"
] | 2
|
2019-12-25T14:20:17.000Z
|
2019-12-25T14:33:02.000Z
|
python/cudf/cudf/tests/test_replace.py
|
CZZLEGEND/cudf
|
5d2465d6738d00628673fffdc1fac51fad7ef9a7
|
[
"Apache-2.0"
] | null | null | null |
python/cudf/cudf/tests/test_replace.py
|
CZZLEGEND/cudf
|
5d2465d6738d00628673fffdc1fac51fad7ef9a7
|
[
"Apache-2.0"
] | 1
|
2021-10-03T20:29:10.000Z
|
2021-10-03T20:29:10.000Z
|
import numpy as np
import pandas as pd
import pytest
from cudf.core import DataFrame, Series
from cudf.tests.utils import assert_eq
def test_series_replace():
a1 = np.array([0, 1, 2, 3, 4])
# Numerical
a2 = np.array([5, 1, 2, 3, 4])
sr1 = Series(a1)
sr2 = sr1.replace(0, 5)
np.testing.assert_equal(sr2.to_array(), a2)
# Categorical
psr3 = pd.Series(["one", "two", "three"], dtype="category")
psr4 = psr3.replace("one", "two")
sr3 = Series.from_pandas(psr3)
sr4 = sr3.replace("one", "two")
pd.testing.assert_series_equal(sr4.to_pandas(), psr4)
# List input
a6 = np.array([5, 6, 2, 3, 4])
sr6 = sr1.replace([0, 1], [5, 6])
np.testing.assert_equal(sr6.to_array(), a6)
with pytest.raises(TypeError):
sr1.replace([0, 1], [5.5, 6.5])
# Series input
a8 = np.array([5, 5, 5, 3, 4])
sr8 = sr1.replace(sr1[:3], 5)
np.testing.assert_equal(sr8.to_array(), a8)
# large input containing null
sr9 = Series(list(range(400)) + [None])
sr10 = sr9.replace([22, 323, 27, 0], None)
assert sr10.null_count == 5
assert len(sr10.to_array()) == (401 - 5)
sr11 = sr9.replace([22, 323, 27, 0], -1)
assert sr11.null_count == 1
assert len(sr11.to_array()) == (401 - 1)
# large input not containing nulls
sr9 = sr9.fillna(-11)
sr12 = sr9.replace([22, 323, 27, 0], None)
assert sr12.null_count == 4
assert len(sr12.to_array()) == (401 - 4)
sr13 = sr9.replace([22, 323, 27, 0], -1)
assert sr13.null_count == 0
assert len(sr13.to_array()) == 401
def test_series_replace_with_nulls():
a1 = np.array([0, 1, 2, 3, 4])
# Numerical
a2 = np.array([-10, 1, 2, 3, 4])
sr1 = Series(a1)
sr2 = sr1.replace(0, None).fillna(-10)
np.testing.assert_equal(sr2.to_array(), a2)
# List input
a6 = np.array([-10, 6, 2, 3, 4])
sr6 = sr1.replace([0, 1], [None, 6]).fillna(-10)
np.testing.assert_equal(sr6.to_array(), a6)
sr1 = Series([0, 1, 2, 3, 4, None])
with pytest.raises(TypeError):
sr1.replace([0, 1], [5.5, 6.5]).fillna(-10)
# Series input
a8 = np.array([-10, -10, -10, 3, 4, -10])
sr8 = sr1.replace(sr1[:3], None).fillna(-10)
np.testing.assert_equal(sr8.to_array(), a8)
a9 = np.array([-10, 6, 2, 3, 4, -10])
sr9 = sr1.replace([0, 1], [None, 6]).fillna(-10)
np.testing.assert_equal(sr9.to_array(), a9)
def test_dataframe_replace():
# numerical
pdf1 = pd.DataFrame({"a": [0, 1, 2, 3], "b": [0, 1, 2, 3]})
gdf1 = DataFrame.from_pandas(pdf1)
pdf2 = pdf1.replace(0, 4)
gdf2 = gdf1.replace(0, 4)
pd.testing.assert_frame_equal(gdf2.to_pandas(), pdf2)
# categorical
pdf4 = pd.DataFrame(
{"a": ["one", "two", "three"], "b": ["one", "two", "three"]},
dtype="category",
)
gdf4 = DataFrame.from_pandas(pdf4)
pdf5 = pdf4.replace("two", "three")
gdf5 = gdf4.replace("two", "three")
pd.testing.assert_frame_equal(gdf5.to_pandas(), pdf5)
# list input
pdf6 = pdf1.replace([0, 1], [4, 5])
gdf6 = gdf1.replace([0, 1], [4, 5])
pd.testing.assert_frame_equal(gdf6.to_pandas(), pdf6)
pdf7 = pdf1.replace([0, 1], 4)
gdf7 = gdf1.replace([0, 1], 4)
pd.testing.assert_frame_equal(gdf7.to_pandas(), pdf7)
# dict input:
pdf8 = pdf1.replace({"a": 0, "b": 0}, {"a": 4, "b": 5})
gdf8 = gdf1.replace({"a": 0, "b": 0}, {"a": 4, "b": 5})
pd.testing.assert_frame_equal(gdf8.to_pandas(), pdf8)
pdf9 = pdf1.replace({"a": 0}, {"a": 4})
gdf9 = gdf1.replace({"a": 0}, {"a": 4})
pd.testing.assert_frame_equal(gdf9.to_pandas(), pdf9)
def test_dataframe_replace_with_nulls():
# numerical
pdf1 = pd.DataFrame({"a": [0, 1, 2, 3], "b": [0, 1, 2, 3]})
gdf1 = DataFrame.from_pandas(pdf1)
pdf2 = pdf1.replace(0, 4)
gdf2 = gdf1.replace(0, None).fillna(4)
pd.testing.assert_frame_equal(gdf2.to_pandas(), pdf2)
# list input
pdf6 = pdf1.replace([0, 1], [4, 5])
gdf6 = gdf1.replace([0, 1], [4, None]).fillna(5)
pd.testing.assert_frame_equal(gdf6.to_pandas(), pdf6)
pdf7 = pdf1.replace([0, 1], 4)
gdf7 = gdf1.replace([0, 1], None).fillna(4)
pd.testing.assert_frame_equal(gdf7.to_pandas(), pdf7)
# dict input:
pdf8 = pdf1.replace({"a": 0, "b": 0}, {"a": 4, "b": 5})
gdf8 = gdf1.replace({"a": 0, "b": 0}, {"a": None, "b": 5}).fillna(4)
pd.testing.assert_frame_equal(gdf8.to_pandas(), pdf8)
gdf1 = DataFrame({"a": [0, 1, 2, 3], "b": [0, 1, 2, None]})
gdf9 = gdf1.replace([0, 1], [4, 5]).fillna(3)
pd.testing.assert_frame_equal(gdf9.to_pandas(), pdf6)
def test_replace_strings():
pdf = pd.Series(["a", "b", "c", "d"])
gdf = Series(["a", "b", "c", "d"])
assert_eq(pdf.replace("a", "e"), gdf.replace("a", "e"))
@pytest.mark.parametrize(
"data_dtype", ["int8", "int16", "int32", "int64", "float32", "float64"]
)
@pytest.mark.parametrize(
"fill_dtype", ["int8", "int16", "int32", "int64", "float32", "float64"]
)
@pytest.mark.parametrize("fill_type", ["scalar", "series"])
@pytest.mark.parametrize("null_value", [None, np.nan])
@pytest.mark.parametrize("inplace", [True, False])
def test_series_fillna_numerical(
data_dtype, fill_dtype, fill_type, null_value, inplace
):
# TODO: These tests should use Pandas' nullable int type
# when we support a recent enough version of Pandas
# https://pandas.pydata.org/pandas-docs/stable/user_guide/integer_na.html
if fill_type == "scalar":
fill_value = np.random.randint(0, 5)
expect = np.array([0, 1, fill_value, 2, fill_value], dtype=data_dtype)
elif fill_type == "series":
data = np.random.randint(0, 5, (5,))
fill_value = pd.Series(data, dtype=data_dtype)
expect = np.array(
[0, 1, fill_value[2], 2, fill_value[4]], dtype=data_dtype
)
sr = Series([0, 1, null_value, 2, null_value], dtype=data_dtype)
result = sr.fillna(fill_value, inplace=inplace)
if inplace:
result = sr
got = result.to_array()
np.testing.assert_equal(expect, got)
@pytest.mark.parametrize("fill_type", ["scalar", "series"])
@pytest.mark.parametrize("null_value", [None, np.nan])
@pytest.mark.parametrize("inplace", [True, False])
def test_fillna_categorical(fill_type, null_value, inplace):
data = pd.Series(
["a", "b", "a", null_value, "c", null_value], dtype="category"
)
sr = Series.from_pandas(data)
if fill_type == "scalar":
fill_value = "c"
expect = pd.Series(["a", "b", "a", "c", "c", "c"], dtype="category")
elif fill_type == "series":
fill_value = pd.Series(
["c", "c", "c", "c", "c", "a"], dtype="category"
)
expect = pd.Series(["a", "b", "a", "c", "c", "a"], dtype="category")
got = sr.fillna(fill_value, inplace=inplace)
if inplace:
got = sr
assert_eq(expect, got)
@pytest.mark.parametrize("fill_type", ["scalar", "series"])
@pytest.mark.parametrize("inplace", [True, False])
def test_fillna_datetime(fill_type, inplace):
psr = pd.Series(pd.date_range("2010-01-01", "2020-01-10", freq="1y"))
if fill_type == "scalar":
fill_value = pd.Timestamp("2010-01-02")
elif fill_type == "series":
fill_value = psr + pd.Timedelta("1d")
psr[[5, 9]] = None
sr = Series.from_pandas(psr)
expect = psr.fillna(fill_value)
got = sr.fillna(fill_value, inplace=inplace)
if inplace:
got = sr
assert_eq(expect, got)
@pytest.mark.parametrize("fill_type", ["scalar", "series", "dict"])
@pytest.mark.parametrize("inplace", [True, False])
def test_fillna_dataframe(fill_type, inplace):
pdf = pd.DataFrame({"a": [1, 2, None], "b": [None, None, 5]})
gdf = DataFrame.from_pandas(pdf)
if fill_type == "scalar":
fill_value_pd = 5
fill_value_cudf = fill_value_pd
elif fill_type == "series":
fill_value_pd = pd.Series([3, 4, 5])
fill_value_cudf = Series.from_pandas(fill_value_pd)
else:
fill_value_pd = {"a": 5, "b": pd.Series([3, 4, 5])}
fill_value_cudf = {
"a": fill_value_pd["a"],
"b": Series.from_pandas(fill_value_pd["b"]),
}
# https://github.com/pandas-dev/pandas/issues/27197
# pandas df.fill_value with series is not working
if isinstance(fill_value_pd, pd.Series):
expect = pd.DataFrame()
for col in pdf.columns:
expect[col] = pdf[col].fillna(fill_value_pd)
else:
expect = pdf.fillna(fill_value_pd)
got = gdf.fillna(fill_value_cudf, inplace=inplace)
if inplace:
got = gdf
assert_eq(expect, got)
@pytest.mark.parametrize("fill_type", ["scalar", "series"])
@pytest.mark.parametrize("inplace", [True, False])
def test_fillna_string(fill_type, inplace):
psr = pd.Series(["z", None, "z", None])
if fill_type == "scalar":
fill_value_pd = "a"
fill_value_cudf = fill_value_pd
elif fill_type == "series":
fill_value_pd = pd.Series(["a", "b", "c", "d"])
fill_value_cudf = Series.from_pandas(fill_value_pd)
sr = Series.from_pandas(psr)
expect = psr.fillna(fill_value_pd)
got = sr.fillna(fill_value_cudf, inplace=inplace)
if inplace:
got = sr
assert_eq(expect, got)
@pytest.mark.parametrize("data_dtype", ["int8", "int16", "int32", "int64"])
def test_series_fillna_invalid_dtype(data_dtype):
gdf = Series([1, 2, None, 3], dtype=data_dtype)
fill_value = 2.5
with pytest.raises(TypeError) as raises:
gdf.fillna(fill_value)
raises.match(
"Cannot safely cast non-equivalent {} to {}".format(
type(fill_value).__name__, gdf.dtype.type.__name__
)
)
@pytest.mark.parametrize(
"data_dtype", ["int8", "int16", "int32", "int64", "float32", "float64"]
)
@pytest.mark.parametrize("fill_value", [100, 100.0, 128.5])
def test_series_where(data_dtype, fill_value):
psr = pd.Series(list(range(10)), dtype=data_dtype)
sr = Series.from_pandas(psr)
if sr.dtype.type(fill_value) != fill_value:
with pytest.raises(TypeError):
sr.where(sr > 0, fill_value)
else:
# Cast back to original dtype as pandas automatically upcasts
expect = psr.where(psr > 0, fill_value).astype(psr.dtype)
got = sr.where(sr > 0, fill_value)
assert_eq(expect, got)
if sr.dtype.type(fill_value) != fill_value:
with pytest.raises(TypeError):
sr.where(sr < 0, fill_value)
else:
expect = psr.where(psr < 0, fill_value).astype(psr.dtype)
got = sr.where(sr < 0, fill_value)
assert_eq(expect, got)
if sr.dtype.type(fill_value) != fill_value:
with pytest.raises(TypeError):
sr.where(sr == 0, fill_value)
else:
expect = psr.where(psr == 0, fill_value).astype(psr.dtype)
got = sr.where(sr == 0, fill_value)
assert_eq(expect, got)
@pytest.mark.parametrize("fill_value", [100, 100.0, 100.5])
def test_series_with_nulls_where(fill_value):
psr = pd.Series([None] * 3 + list(range(5)))
sr = Series.from_pandas(psr)
expect = psr.where(psr > 0, fill_value)
got = sr.where(sr > 0, fill_value)
assert_eq(expect, got)
expect = psr.where(psr < 0, fill_value)
got = sr.where(sr < 0, fill_value)
assert_eq(expect, got)
expect = psr.where(psr == 0, fill_value)
got = sr.where(sr == 0, fill_value)
assert_eq(expect, got)
def test_series_multiple_times_with_nulls():
sr = Series([1, 2, 3, None])
expected = Series([None, None, None, None], dtype=np.int64)
for i in range(3):
got = sr.replace([1, 2, 3], None)
assert_eq(expected, got)
# BUG: #2695
# The following series will acquire a chunk of memory and update with
# values, but these values may still linger even after the memory
# gets released. This memory space might get used for replace in
# subsequent calls and the memory used for mask may have junk values.
# So, if it is not updated properly, the result would be wrong.
# So, this will help verify that scenario.
Series([1, 1, 1, None])
@pytest.mark.parametrize(
"series_dtype", ["int8", "int16", "int32", "int64", "float32", "float64"]
)
@pytest.mark.parametrize(
"replacement", [128, 128.0, 128.5, -32769, -32769.0, -32769.5]
)
def test_numeric_series_replace_dtype(series_dtype, replacement):
psr = pd.Series([-2, -1, 0, 1, 2], dtype=series_dtype)
sr = Series.from_pandas(psr)
# Both Scalar
if sr.dtype.type(replacement) != replacement:
with pytest.raises(TypeError):
sr.replace(1, replacement)
else:
expect = psr.replace(1, replacement).astype(psr.dtype)
got = sr.replace(1, replacement)
assert_eq(expect, got)
# to_replace is a list, replacement is a scalar
if sr.dtype.type(replacement) != replacement:
with pytest.raises(TypeError):
sr.replace([-1, 1], replacement)
else:
expect = psr.replace([-1, 1], replacement).astype(psr.dtype)
got = sr.replace([-1, 1], replacement)
assert_eq(expect, got)
# If to_replace is a scalar and replacement is a list
with pytest.raises(TypeError):
sr.replace(0, [replacement, 2])
# Both list of uneqal length
with pytest.raises(ValueError):
sr.replace([0, 1], [replacement])
# Both lists of equal length
if (np.dtype(type(replacement)).kind == "f" and sr.dtype.kind == "i") or (
sr.dtype.type(replacement) != replacement
):
with pytest.raises(TypeError):
sr.replace([-1, 1], [replacement, replacement])
else:
expect = psr.replace([-1, 1], [replacement, replacement]).astype(
psr.dtype
)
got = sr.replace([-1, 1], [replacement, replacement])
assert_eq(expect, got)
| 32.189815
| 78
| 0.608442
|
4c7cd2884399cf4366ac1d8a92c6c0946aae4595
| 6,633
|
py
|
Python
|
tools/git_batch_proc.py
|
Joejiong/Paddle_AST_Infrastructure
|
7c56e41da55c354610317821ec970f8a376001a4
|
[
"Apache-2.0"
] | 1
|
2020-09-08T02:35:57.000Z
|
2020-09-08T02:35:57.000Z
|
tools/git_batch_proc.py
|
Joejiong/Paddle_AST_Infrastructure
|
7c56e41da55c354610317821ec970f8a376001a4
|
[
"Apache-2.0"
] | null | null | null |
tools/git_batch_proc.py
|
Joejiong/Paddle_AST_Infrastructure
|
7c56e41da55c354610317821ec970f8a376001a4
|
[
"Apache-2.0"
] | 2
|
2020-09-07T13:21:16.000Z
|
2020-09-28T04:01:34.000Z
|
from git import Repo, InvalidGitRepositoryError, GitCommandError, Git
from os import listdir
from os.path import join
import argparse
# 暂存列表
stash_repos = []
def path_to_repo(path):
"""将路径转换成Repo对象,若非Git目录,则抛出异常"""
try:
return Repo(path)
except InvalidGitRepositoryError:
return None
def not_none(obj):
return obj is not None
def get_all_git_repos(path):
"""获取指定路径中的全部Git目录"""
return list(filter(not_none, map(path_to_repo, map(join, [path] * len(listdir(path)), listdir(path)))))
def pull_repos(repos):
"""拉取到最新代码"""
for repo in repos:
git_pull_single_repo(repo)
def git_pull_single_repo(repo):
"""拉取到最新代码"""
stashed = False
if repo.is_dirty():
print(repo.git_dir + " 包含未提交文件,已暂存。")
repo.git.stash('save')
stash_repos.append(repo)
stashed = True
repo.remote().pull()
print(repo.working_dir.split('/')[-1] + ' pull finished.')
if stashed:
try:
repo.git.stash('pop')
print(repo.git_dir + " stash pop finished.")
except GitCommandError:
print(repo.git_dir + " merge conflict, please merge by yourself.")
def get_branch_name(branch):
return branch.name
def get_remote_branch_name(branch_name):
return 'origin/' + branch_name
def checkout_repos(repos, branch):
"""切换分支"""
for repo in repos:
checkout(repo, branch)
def get_all_local_branches(repo):
"""获取本地分支"""
return list(map(get_branch_name, repo.branches))
def get_all_remote_branches(repo):
"""获取远端分支"""
return list(map(get_branch_name, repo.remotes.origin.refs))
def checkout(repo, branch, log=True):
# 远端分支名称
remote_branch = get_remote_branch_name(branch)
try:
if branch in get_all_local_branches(repo):
# 如果存在本地分支,则直接checkout到本地分支
repo.git.checkout(branch)
if log:
print(get_repo_dir_name(repo) + ' checkout finished.')
elif remote_branch in get_all_remote_branches(repo):
# 如果存在远端分支,则追踪至远端分支
repo.git.checkout(remote_branch, b=branch)
if log:
print(get_repo_dir_name(repo) + ' checkout finished.')
else:
if log:
print(get_repo_dir_name(repo) + ' does not have this branch.')
except GitCommandError:
print("TODO")
def create_branches(repos, branch, filter_file):
"""拉取新分支"""
if filter_file:
# 传入过滤文件,则仅从过滤文件中拉取新分支
handle_dirs = []
with open(filter_file, 'r') as f:
for handle_dir in f:
handle_dirs.append(handle_dir.replace('\n', ''))
for repo in repos:
if get_repo_dir_name(repo) not in handle_dirs:
return
create_branch(repo, branch)
else:
for repo in repos:
create_branch(repo, branch)
def create_branch(repo, branch):
# 切换至dev分支
checkout(repo, 'dev', log=False)
# 创建本地分支
repo.create_head(branch)
# 切换至新分支
checkout(repo, branch, log=False)
# push到远端
repo.git.push('origin', branch)
print(get_repo_dir_name(repo) + ' create new branch and push to origin.')
def get_repo_dir_name(repo):
"""返回仓库文件夹名称"""
return repo.working_dir.split('/')[-1]
def delete_branches(repos, branch, remote=False):
"""删除分支"""
for repo in repos:
delete_branch(repo, branch, remote)
def delete_branch(repo, branch, remote=False):
"""删除分支"""
if remote:
delete_remote_branch(branch, repo)
else:
delete_local_branch(branch, repo)
def delete_local_branch(branch, repo):
"""删除本地分支"""
if repo.active_branch.name == branch:
print(get_repo_dir_name(repo))
print('Cannot delete the branch which you are currently on.')
print()
elif branch not in get_all_local_branches(repo):
print(get_repo_dir_name(repo) + ' branch not found.')
print()
else:
repo.delete_head(branch)
print(get_repo_dir_name(repo) + ' delete ' + branch + ' finished.')
print()
def delete_remote_branch(branch, repo):
"""删除远端分支"""
remote_branch = get_remote_branch_name(branch)
if remote_branch not in get_all_remote_branches(repo):
print(get_repo_dir_name(repo) + ' branch not found.')
print()
else:
remote = repo.remote(name='origin')
remote.push(refspec=(':' + branch))
print(get_repo_dir_name(repo) + ' delete ' + branch + ' finished.')
print()
def clone_repos(path, clone_file):
with open(clone_file, 'r') as f:
print(path)
for repo_url in f:
clone_repo(path, repo_url.replace('\n', ''))
def clone_repo(path, repo_url):
"""克隆仓库"""
try:
print(path)
print(repo_url)
from pathlib import Path
Path(path).mkdir(parents=True, exist_ok=True)
repo_url_str = 'https://www.github.com/' + repo_url +'.git'
Git(path).clone(repo_url_str)
print('Clone ' + repo_url_str + ' finished.')
except GitCommandError:
print('Clone ' + repo_url_str + ' failed.')
def handle_args():
"""解析脚本参数"""
method = args.method
if method == 'clone':
if args.filter:
clone_repos(args.path, args.filter)
return
else:
print("克隆工程需要filter文件,指定克隆项目列表")
return
repos = get_all_git_repos(args.path) # 获取全部仓库
if method == 'pull':
"""拉取最新代码"""
pull_repos(repos)
elif (method == 'checkout' or method == 'co') and args.branch != '':
"""切换到指定分支"""
checkout_repos(repos, args.branch)
elif method == 'new' and args.branch != '':
"""创建新分支"""
create_branches(repos, args.branch, args.filter)
elif method == 'delete' and args.branch != '':
"""删除分支"""
delete_branches(repos, args.branch, args.remote)
else:
print("Not support method")
parser = argparse.ArgumentParser(description='Git 批处理工具')
parser.add_argument('-p', '--path', type=str, default='.', help='批处理目录,默认为当前目录', required=False)
parser.add_argument('-r', '--remote', type=bool, default=False, help='是否操作远端分支,默认为False', required=False)
parser.add_argument('-f', '--filter', type=str, help='克隆项目目标文件', required=False)
parser.add_argument('method', action='store', type=str, choices=['clone', 'pull', 'checkout', 'co', 'new', 'delete'],
help='批量执行任务, clone, pull, checkout[co], new, delete')
parser.add_argument('branch', nargs='?', action='store', type=str, default='', help='指定target分支')
args = parser.parse_args()
handle_args()
| 28.467811
| 117
| 0.619629
|
293b6f80e00145c13251a978277291ff3459ec09
| 2,572
|
py
|
Python
|
networkit/linkprediction.py
|
clintg6/networkit
|
b4cba9a82436cd7ebc139c1a612f593fca9892c6
|
[
"MIT"
] | 1
|
2019-02-26T14:59:50.000Z
|
2019-02-26T14:59:50.000Z
|
networkit/linkprediction.py
|
cndolo/networkit
|
eb52ec5e62a13f4a259fc2e7cf116b2fd2acfbee
|
[
"MIT"
] | null | null | null |
networkit/linkprediction.py
|
cndolo/networkit
|
eb52ec5e62a13f4a259fc2e7cf116b2fd2acfbee
|
[
"MIT"
] | null | null | null |
from _NetworKit import KatzIndex, CommonNeighborsIndex, JaccardIndex, PreferentialAttachmentIndex, AdamicAdarIndex, UDegreeIndex, VDegreeIndex, AlgebraicDistanceIndex, NeighborhoodDistanceIndex, TotalNeighborsIndex, NeighborsMeasureIndex, SameCommunityIndex, AdjustedRandIndex, ResourceAllocationIndex, RandomLinkSampler, ROCMetric, PrecisionRecallMetric, MissingLinksFinder, LinkThresholder, PredictionsSorter, Graph
import numpy as np
try:
import sklearn
except ImportError:
print(""" WARNING: module 'sklearn' not found, supervised link prediction won't be available """)
def trainClassifier(trainingSet, trainingGraph, classifier, *linkPredictors):
""" Trains the given classifier with the feature-vectors generated by the given linkPredictors.
Parameters
----------
trainingSet : vector[pair[node, node]]
Vector of node-pairs to generate features for,
trainingGraph : networkit.Graph
Training graph containing all edges from the training set.
classifier:
Scikit-learn classifier to train.
linkPredictors:
Predictors used for the generation of feature-vectors.
"""
# Make sure the set is sorted because the samples will be sorted by node-pairs (asc.)
# and the labels would be sorted by the initial order. That would lead to an incorrect
# matching between labels and samples.
trainingSet.sort()
trainingLabels = getLabels(trainingSet, trainingGraph)
trainingFeatures = getFeatures(trainingSet, *linkPredictors)
classifier.fit(trainingFeatures, trainingLabels)
def getFeatures(nodePairs, *linkPredictors):
""" Returns a numpy-array containing the generated scores from the predictors for the given node-pairs.
Parameters
----------
nodePairs : vector[pair[node, node]]
Node-pairs to get the samples for.
*linkPredictors
List of link predictors to use for sample-generation.
Returns
-------
A numpy-array of shape (#nodePairs, #linkPredictors) containing the generated scores
from the predictors for the given node-pairs.
"""
return np.column_stack(([list(zip(*p.runOn(nodePairs)))[1] for p in linkPredictors]))
def getLabels(nodePairs, G):
""" Returns a numpy-array containing the labels of the given node-pairs.
The labels are defined as follows: 1 = link, 0 = absent link.
Parameters
----------
nodePairs : vector[pair[node, node]]
Node-pairs to get the labels for.
G : networkit.Graph
Graph which provides ground truth for the labels.
Returns
-------
A numpy-array containing the labels of the given node-pairs.
"""
return np.array(list(map(lambda p: 1 if G.hasEdge(p[0], p[1]) else 0, nodePairs)))
| 38.969697
| 417
| 0.770218
|
41d880c1bd1db60e2f16e1234f002885e4cd14f5
| 641
|
py
|
Python
|
third_party/webrtc/src/chromium/src/tools/perf/benchmarks/image_decoding.py
|
bopopescu/webrtc-streaming-node
|
727a441204344ff596401b0253caac372b714d91
|
[
"MIT"
] | 27
|
2016-04-27T01:02:03.000Z
|
2021-12-13T08:53:19.000Z
|
third_party/webrtc/src/chromium/src/tools/perf/benchmarks/image_decoding.py
|
bopopescu/webrtc-streaming-node
|
727a441204344ff596401b0253caac372b714d91
|
[
"MIT"
] | 2
|
2017-03-09T09:00:50.000Z
|
2017-09-21T15:48:20.000Z
|
third_party/webrtc/src/chromium/src/tools/perf/benchmarks/image_decoding.py
|
bopopescu/webrtc-streaming-node
|
727a441204344ff596401b0253caac372b714d91
|
[
"MIT"
] | 17
|
2016-04-27T02:06:39.000Z
|
2019-12-18T08:07:00.000Z
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import image_decoding
import page_sets
from telemetry import benchmark
@benchmark.Disabled('xp') # http://crbug.com/532181
class ImageDecodingToughImageCases(perf_benchmark.PerfBenchmark):
test = image_decoding.ImageDecoding
# TODO: Rename this page set to tough_image_cases.py
page_set = page_sets.ImageDecodingMeasurementPageSet
@classmethod
def Name(cls):
return 'image_decoding.image_decoding_measurement'
| 30.52381
| 72
| 0.806552
|
ced519b869232eeb9a7f2a171aefc654787d60ce
| 878
|
py
|
Python
|
setup.py
|
rbiswas4/calibrate_spectra
|
78264f81c8d38876226253630e3af56e458bc86d
|
[
"MIT"
] | null | null | null |
setup.py
|
rbiswas4/calibrate_spectra
|
78264f81c8d38876226253630e3af56e458bc86d
|
[
"MIT"
] | 8
|
2015-08-12T17:37:52.000Z
|
2015-08-29T07:27:45.000Z
|
setup.py
|
rbiswas4/calibrate_spectra
|
78264f81c8d38876226253630e3af56e458bc86d
|
[
"MIT"
] | null | null | null |
#from ez_setup import use_setuptools
#use_setuptools()
from distutils.core import setup
setup(# package information
name="transientSources",
version="0.0.1dev",
description='',
long_description=''' ''',
# What code to include as packages
packages=['transientSources'],
packagedir={'transientSources':'transientSources'},
# What data to include as packages
include_package_data=True,
package_data={'transientSources':['example_data/2007uy/cfa_2007uy/*',
'example_data/2007uy/B/*',
'example_data/2007uy/V/*',
'example_data/2007uy/i/*',
'example_data/2007uy/r/*',
'example_data/filters/*']
}
)
| 38.173913
| 76
| 0.514806
|
934b282cdafd98b009b3b8df4f63a5b684527153
| 2,303
|
py
|
Python
|
ros_ws/src/crazyswarm/scripts/perceived-safety-study/cmdPositionTest.py
|
rasmus-rudling/degree-thesis
|
d74581491ec9618149c582059e290dca9957951d
|
[
"MIT"
] | null | null | null |
ros_ws/src/crazyswarm/scripts/perceived-safety-study/cmdPositionTest.py
|
rasmus-rudling/degree-thesis
|
d74581491ec9618149c582059e290dca9957951d
|
[
"MIT"
] | null | null | null |
ros_ws/src/crazyswarm/scripts/perceived-safety-study/cmdPositionTest.py
|
rasmus-rudling/degree-thesis
|
d74581491ec9618149c582059e290dca9957951d
|
[
"MIT"
] | null | null | null |
import os
import sys
from matplotlib import pyplot as plt
import numpy as np
import rospy
sys.path.append("..")
from pycrazyswarm import Crazyswarm
from tf import TransformListener
def tf_callback(self, data):
transform = data.transforms[0]
pose = transform.transform
def main():
crazyflies_yaml = str({
'crazyflies': [{
'channel': 100,
'id': 7,
'initialPosition': [0, 0, 0],
'type': 'default'
}]
})
swarm = Crazyswarm(crazyflies_yaml=crazyflies_yaml)
timeHelper = swarm.timeHelper
drone = swarm.allcfs.crazyflies[0]
# trajectoryData = np.loadtxt("hoverAndLandData.csv", delimiter=",", skiprows=1)
trajectoryData = np.loadtxt("hoverForwardAndBackLand.csv",
delimiter=",",
skiprows=1)
timestamps = np.array([row[0] for row in trajectoryData])
x = np.array([row[1] for row in trajectoryData])
y = np.array([row[2] for row in trajectoryData])
z = np.array([row[3] for row in trajectoryData])
yaw = np.array([row[4] for row in trajectoryData])
timestamps -= timestamps[0]
delta = 0.05 # In seconds, 0.05s = 20Hz
t_d = np.arange(0, timestamps[-1], delta)
num_states = t_d.shape[0]
x_d = np.interp(t_d, timestamps, x)
y_d = np.interp(t_d, timestamps, y)
z_d = np.interp(t_d, timestamps, z)
yaw_d = np.interp(t_d, timestamps, yaw)
# Get init drone position
tf = TransformListener()
tf.waitForTransform("/world", "/cf7", rospy.Time(), rospy.Duration(4.0))
t = tf.getLatestCommonTime("/world", "/cf7")
init_position, quaternion = tf.lookupTransform('/world', '/cf7', t)
init_position = np.array(init_position)
error = init_position - [x_d[0], y_d[0], z_d[0]]
record = list()
for i in range(num_states):
current_position = np.array([x_d[i], y_d[i], z_d[i]])
position_command = current_position + error
drone.cmdPosition(position_command, yaw_d[i])
timeHelper.sleep(delta)
t = tf.getLatestCommonTime("/world", "/cf7")
record.append(tf.lookupTransform('/world', '/cf7', t)[0][2])
plt.plot(t_d, z_d, "-r")
plt.plot(t_d, record, "-g")
plt.show()
if __name__ == "__main__":
main()
| 28.432099
| 84
| 0.615284
|
5162b7543bd120e6532b15ff47b9d6e2bb66282b
| 7,275
|
py
|
Python
|
utilities.py
|
aws-samples/gain_customer_insights
|
8287fa463f5fc673da57ddc3b81b0121764986a5
|
[
"MIT-0"
] | 2
|
2021-06-08T22:38:42.000Z
|
2021-11-03T01:00:55.000Z
|
utilities.py
|
aws-samples/gain_customer_insights
|
8287fa463f5fc673da57ddc3b81b0121764986a5
|
[
"MIT-0"
] | null | null | null |
utilities.py
|
aws-samples/gain_customer_insights
|
8287fa463f5fc673da57ddc3b81b0121764986a5
|
[
"MIT-0"
] | 4
|
2020-06-26T10:07:37.000Z
|
2021-11-03T01:00:48.000Z
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# This code snippet is lightly modified from that provided by AWS Secrets Manager during secrets creation.
import boto3
import base64
from botocore.exceptions import ClientError
import json
import matplotlib.pyplot as plt
def get_secret(secret_name, region_name):
# Create a Secrets Manager client
session = boto3.session.Session()
client = session.client(
service_name='secretsmanager',
region_name=region_name
)
# In this sample we only handle the specific exceptions for the 'GetSecretValue' API.
# See https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html
# We rethrow the exception by default.
try:
get_secret_value_response = client.get_secret_value(
SecretId=secret_name
)
return get_secret_value_response
except ClientError as e:
print(e)
if e.response['Error']['Code'] == 'DecryptionFailureException':
# Secrets Manager can't decrypt the protected secret text using the provided KMS key.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'InternalServiceErrorException':
# An error occurred on the server side.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'InvalidParameterException':
# You provided an invalid value for a parameter.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'InvalidRequestException':
# You provided a parameter value that is not valid for the current state of the resource.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'ResourceNotFoundException':
# We can't find the resource that you asked for.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
else:
raise e
else:
# Decrypts secret using the associated KMS CMK.
# Depending on whether the secret is a string or binary, one of these fields will be populated.
print('now in else')
if 'SecretString' in get_secret_value_response:
secret = get_secret_value_response['SecretString']
print(secret)
else:
decoded_binary_secret = base64.b64decode(get_secret_value_response['SecretBinary'])
# Extract training and validation AUC values from the results returned by
# method describe_training_job()
def get_auc_from_metrics(response, metric_type):
for x in range(len(response['FinalMetricDataList'])):
if metric_type in response['FinalMetricDataList'][x].values():
return x
# Functions for model feature exploration
def plot_feature_importance(booster, f, maxfeats = 15):
from xgboost import plot_importance
res = {k:round(v, 2) for k, v in booster.get_score(importance_type = f).items()}
gain_plot = plot_importance(res,
max_num_features = maxfeats,
importance_type = f,
title = 'Feature Importance: ' + f,
color = "#4daf4a")
plt.show()
# Calculate tree depth. Adapted the code from here
# https://stackoverflow.com/questions/29005959/depth-of-a-json-tree to Python 3.
def calculate_tree_depth(tree_dict):
# input: single tree as a dictionary
# output: depth of the tree
if 'children' in tree_dict:
return 1 + max([0] + list(map(calculate_tree_depth, tree_dict['children'])))
else:
return 1
def get_depths_as_list(all_trees):
# input: list of all trees, generated by xgboost's get_dump in json format
# output: list of the same length as all_trees where each element contains
# the depth of a tree
# list to store the depth of each tree
tree_depth = []
for i in range(len(all_trees)):
tree = json.loads(all_trees[i])
tree_depth.append(calculate_tree_depth(tree))
return tree_depth
def calculate_list_unique_elements(input_list):
# calculate number of unique elements in a list
# input: list
# output: dictionary. Keys: unique elements, values: their count
res = dict()
for i in input_list:
if i in res:
res[i] += 1
else:
res[i] = 1
return res
def find_feature(tree_dict, feature):
# input:
# tree_dict: single tree as a dictionary
# feature: feature name, str
# output: 0 if a feature is not a split, 1 if the feature is a split at any node
if "split" in tree_dict:
if tree_dict["split"] == feature:
return 1
else:
for child in tree_dict["children"]:
res = find_feature(child, feature)
if res != 0:
return res
return 0
else:
return 0
# find all trees that have a feature
def find_all_trees_with_feature(all_trees, feature):
# input:
# all_trees: list of all trees, generated by xgboost's get_dump in json format
# feature: feature name, str
# output: indices of trees where a feature has been found at any node
trees_with_features = []
for i in range(len(all_trees)):
tree = json.loads(all_trees[i])
if find_feature(tree, feature) == 1:
trees_with_features.append(i)
return trees_with_features
# given a list of features find how many trees have it
def count_trees_with_features(all_trees, feature_list):
# input:
# all_trees: list of all trees, generated by xgboost's get_dump in json format
# feature_list: list of features
# output: dictionary, keys = feature_list, values = number of trees where a feature has been found
tree_count = dict()
for i in feature_list:
tree_count[i] = 0
for i in feature_list:
for j in range(len(all_trees)):
tree = json.loads(all_trees[j])
if find_feature(tree, i) == 1:
tree_count[i] += 1
return tree_count
| 40.416667
| 106
| 0.662955
|
7e7eec1c4895645d679edbec6ae59842ae7ec540
| 559
|
py
|
Python
|
src/olympia/activity/admin.py
|
atiqueahmedziad/addons-server
|
6e1cc00bf15d245fbcdddf618286bba943731e45
|
[
"BSD-3-Clause"
] | 10
|
2018-08-16T04:55:06.000Z
|
2022-01-08T16:09:39.000Z
|
src/olympia/activity/admin.py
|
atiqueahmedziad/addons-server
|
6e1cc00bf15d245fbcdddf618286bba943731e45
|
[
"BSD-3-Clause"
] | 171
|
2018-05-20T00:27:59.000Z
|
2022-03-21T13:34:27.000Z
|
src/olympia/activity/admin.py
|
atiqueahmedziad/addons-server
|
6e1cc00bf15d245fbcdddf618286bba943731e45
|
[
"BSD-3-Clause"
] | 12
|
2018-08-01T16:46:09.000Z
|
2022-01-08T16:09:46.000Z
|
from django.contrib import admin
from .models import ActivityLog
class ActivityLogAdmin(admin.ModelAdmin):
list_display = ('created', 'user', '__unicode__',)
raw_id_fields = ('user',)
readonly_fields = ('created', 'user', '__unicode__',)
date_hierarchy = 'created'
fields = ('user', 'created', '__unicode__',)
raw_id_fields = ('user',)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
admin.site.register(ActivityLog, ActivityLogAdmin)
| 25.409091
| 57
| 0.68873
|
da2851ab30d7ee5849c7cd1678284a5fd0c7c293
| 34,136
|
py
|
Python
|
src/QMCTools/PyscfToQmcpack_Spline.py
|
kayahans/qmcpack
|
c25d77702e36363ff7368ded783bf31c1b1c5f17
|
[
"NCSA"
] | null | null | null |
src/QMCTools/PyscfToQmcpack_Spline.py
|
kayahans/qmcpack
|
c25d77702e36363ff7368ded783bf31c1b1c5f17
|
[
"NCSA"
] | null | null | null |
src/QMCTools/PyscfToQmcpack_Spline.py
|
kayahans/qmcpack
|
c25d77702e36363ff7368ded783bf31c1b1c5f17
|
[
"NCSA"
] | null | null | null |
######################################################################################
## This file is distributed under the University of Illinois/NCSA Open Source License.
## See LICENSE file in top directory for details.
##
## Copyright (c) 2016 Jeongnim Kim and QMCPACK developers.
##
## File developed by: Yubo "Paul" Yang, yubo.paul.yang@gmail.com, University of Illinois Urbana-Champaign
## Thomas Applencourt, applencourt@anl.gov, Argonne National Laboratory
## Hyeondeok Shin, hshin@anl.gov, Argonne National Laboratory
## Anouar Benali, benali@anl.gov, Argonne National Laboratory
##
## File created by: Thomas Applencourt, applencourt@anl.gov, Argonne National Laboratory
#######################################################################################
import numpy as np
import h5py
import os
try:
from lxml import etree
except:
import sys
sys.exit("Error: lxml module is needed for the generation of the XML file. Please install it using your favorite package manager")
try:
import pandas as pd
except:
import sys
sys.exit("Error: Panda module is needed for the save_eigensystem and eigensystem functions. Please install it using your package manager")
def pyscf2qmcpackspline(cell,mf,title="Default", kpts=[], kmesh=[], sp_twist=[]):
import sys, re
Restricted=True
PBC=False
Gamma=False
# Python version check
if sys.version_info < (3, 2):
sys.exit("Python < 3.2 not supported")
# FFT mesh check
if np.any(cell.mesh % 2 == 0):
sys.exit("Even number of FFT mesh not supported")
#Twists generation not yet implemented
if len(sp_twist)== 0:
sp_twist=[0.0,0.0,0.0]
val=str(mf)
ComputeMode= re.split('[. ]',val)
SizeMode=len(ComputeMode)
for i in range(SizeMode):
if ComputeMode[i] in ("UHF","KUHF","UKS"):
Restricted=False
sys.exit("Unrestricted calculations not supported")
if ComputeMode[i]=="pbc":
PBC = True
if not PBC:
sys.exit("Open boundary condition without lattice vectors not supported")
if PBC and len(kpts) == 0:
Gamma=True
if len(kpts)!= 0:
sys.exit("K-point scf not supported")
else:
loc_cell=cell
h5_fname = "{}.h5".format(title)
xml_fname = "{}.xml".format(title)
tile = [1, 1, 1]
tilematrix = [0]*9
for dim, val in enumerate(tile):
tilematrix[4*dim] = val
tilematrix_str = " ".join(map(str,tilematrix))
gvecs, eig_df = save_eigensystem(mf, save=False)
# generate wave function file
# ================================================
generate_pwscf_h5(loc_cell,gvecs,eig_df,h5_fname)
# generate QMCPACK input file
# ================================================
h5_handle = h5py.File(h5_fname,'r')
inp = InputXml()
# build <simulationcell>
sc_node = inp.simulationcell_from_cell(loc_cell)
# build <particleset>
pset_node = inp.particleset_from_hdf5(h5_handle)
psedinit_node = inp.particleset_initialposition_from_hdf5(h5_handle)
wavefunction_node = inp.wavefunction(h5_handle,h5_fname, tilematrix_str)
hamiltonien_node = inp.hamiltonian(h5_handle, loc_cell)
# assemble <qmcsystem>
sys_node = etree.Element('qmcsystem')
sys_children = [sc_node]
for child in sys_children:
sys_node.append(child)
# write input
root = etree.Element('simulation')
doc = etree.ElementTree(root)
#Change name!!!
root.append(etree.fromstring('<project id="{}" series="0"/>'.format(title)))
root.append(sys_node)
for node in [pset_node,psedinit_node,wavefunction_node,hamiltonien_node]:
root.append(node)
a = inp.vmc_dmc(h5_handle)
root.extend(a)
doc.write(xml_fname,pretty_print=True)
def get_supercell(cell,kmesh=[]):
latt_vec = cell.lattice_vectors()
if len(kmesh)==0:
# Guess kmesh
scaled_k = cell.get_scaled_kpts(kpts).round(8)
kmesh = (len(np.unique(scaled_k[:,0])),
len(np.unique(scaled_k[:,1])),
len(np.unique(scaled_k[:,2])))
R_rel_a = np.arange(kmesh[0])
R_rel_b = np.arange(kmesh[1])
R_rel_c = np.arange(kmesh[2])
R_vec_rel = lib.cartesian_prod((R_rel_a, R_rel_b, R_rel_c))
R_vec_abs = np.einsum('nu, uv -> nv', R_vec_rel, latt_vec)
# R_rel_mesh has to be construct exactly same to the Ts in super_cell function
scell = tools.super_cell(cell, kmesh)
return scell, kmesh
#end def get_supercell
def save_eigensystem(mf,gvec_fname = 'gvectors.dat'
,eigsys_fname = 'eigensystem.json',save=True):
import os
if os.path.isfile(eigsys_fname) and os.path.isfile(gvec_fname):
gvecs = np.loadtxt(gvec_fname)
eig_df = pd.read_json(eigsys_fname).set_index(
['ikpt','ispin','istate'],drop=True).sort_index()
else:
data = []
ikpt = 0 # gamma-point calculation
ispin = 0 # restricted (same orbitals for up and down electrons)
# get MOs in plane-wave basis
aoR = ao_on_grid(mf.cell)
gvecs,psig = mo_coeff_to_psig(mf.mo_coeff,aoR,mf.cell.gs,mf.cell.vol)
nstate,npw,ncomp = psig.shape
for istate in range(nstate):
entry = {'ikpt':ikpt,'ispin':ispin,'istate':istate,
'reduced_k':mf.kpt,'evalue':mf.mo_energy[istate],'evector':psig[istate,:,:]}
data.append(entry)
# end for istate
eig_df = pd.DataFrame(data).set_index(
['ikpt','ispin','istate'],drop=True).sort_index()
# end if
if save:
eig_df.reset_index().to_json(eigsys_fname)
np.savetxt(gvec_fname,gvecs)
# end if
return gvecs,eig_df
# end def save_eigensystem
def ao_on_grid(cell):
from pyscf.pbc.dft import gen_grid,numint
coords = gen_grid.gen_uniform_grids(cell)
aoR = numint.eval_ao(cell,coords)
return aoR
# end def ao_on_grid
def mo_coeff_to_psig(mo_coeff,aoR,cell_gs,cell_vol,int_gvecs=None):
"""
Inputs:
mo_coeff: molecular orbital in AO basis, each column is an MO, shape (nao,nmo)
aoR: atomic orbitals on a real-space grid, each column is an AO, shape (ngrid,nao)
cell_gs: 2*cell_gs+1 should be the shape of real-space grid (e.g. (5,5,5))
cell_vol: cell volume, used for FFT normalization
int_gvecs: specify the order of plane-waves using reciprocal lattice points
Outputs:
3. plane-wave coefficients representing the MOs, shape (ngrid,nmo)
"""
# provide the order of reciprocal lattice vectors to skip
if int_gvecs is None: # use internal order
nx,ny,nz = cell_gs
from itertools import product
int_gvecs = np.array([gvec for gvec in product(
range(-nx,nx+1),range(-ny,ny+1),range(-nz,nz+1))],dtype=int)
else:
assert (int_gvecs.dtype is int)
# end if
npw = len(int_gvecs) # number of plane waves
# put molecular orbitals on real-space grid
moR = np.dot(aoR,mo_coeff)
nao,nmo = moR.shape
rgrid_shape = 2*np.array(cell_gs)+1
assert nao == np.prod(rgrid_shape)
# for each MO, FFT to get psig
psig = np.zeros([nmo,npw,2]) # store real & complex
for istate in range(nmo):
# fill real-space FFT grid
rgrid = moR[:,istate].reshape(rgrid_shape)
# get plane-wave coefficients (on reciprocal-space FFT grid)
moG = np.fft.fftn(rgrid)/np.prod(rgrid_shape)*cell_vol
# transfer plane-wave coefficients to psig in specified order
for igvec in range(npw):
comp_val = moG[tuple(int_gvecs[igvec])]
psig[istate,igvec,:] = comp_val.real,comp_val.imag
# end for igvec
# end for istate
return int_gvecs,psig
# end def mo_coeff_to_psig
def generate_pwscf_h5(cell,gvecs,eig_df,h5_fname):
# if eigensystem was saved to disk, use the following to read
#import numpy as np
#import pandas as pd
#gvecs = np.loadtxt('../1_eigsys/gvectors.dat')
#eig_df= pd.read_json('../1_eigsys/eigensystem.json').set_index(
# ['ikpt','ispin','istate'],drop=True).sort_index()
new = h5py.File(h5_fname,'w')
ref = PwscfH5()
nelecs = ref.system_from_cell(new,cell)
ref.create_electrons_group(new,gvecs,eig_df,nelecs)
# transfer version info. !!!! hard code for now
new.create_dataset('application/code',data=[np.string_('PySCF')])
new.create_dataset('application/version',data=[np.string_('1.6.2')])
new.create_dataset('format',data=[np.string_('ES-HDF')])
new.create_dataset('version',data=[2,1,0])
new.close()
# end def generate_pwscf_h5
# =======================================================================
# Class for bspline h5 generator
# =======================================================================
class PwscfH5:
def __init__(self):
self.locations = {
'gvectors':'electrons/kpoint_0/gvectors',
'nkpt':'electrons/number_of_kpoints',
'nspin':'electrons/number_of_spins',
'nstate':'electrons/kpoint_0/spin_0/number_of_states', # !!!! same number of states per kpt
'axes':'supercell/primitive_vectors'
}
self.dtypes = {
'nkpt':int,
'nspin':int,
'nstate':int
}
self.fp = None # h5py.File object (like a file pointer)
def __del__(self):
if self.fp is not None:
self.fp.close()
# =======================================================================
# Basic Read Methods i.e. basic read/write and path access
# =======================================================================
def read(self,fname,force=False):
""" open 'fname' for reading and save handle in this class """
if not os.path.isfile(fname):
raise RuntimeError('%s not found' % fname)
if (self.fp is None) or force:
self.fp = h5py.File(fname)
else:
raise RuntimeError('already tracking a file %s'%str(self.fp))
def val(self,loc):
""" get value array of an arbitrary entry at location 'loc' """
return self.fp[loc].value
def get(self,name):
""" get value array of a known entry """
loc = self.locations[name]
return self.fp[loc].value
# =======================================================================
# Advance Read Methods i.e. more specific to QMCPACK 3.0.0
# =======================================================================
# construct typical paths
# e.g. electrons/kpoint_0/spin_0/state_0
@staticmethod
def kpoint_path(ikpt):
path = 'electrons/kpoint_%d' % (ikpt)
return path
@staticmethod
def spin_path(ikpt,ispin):
path = 'electrons/kpoint_%d/spin_%d' % (ikpt,ispin)
return path
@staticmethod
def state_path(ikpt,ispin,istate):
path = 'electrons/kpoint_%d/spin_%d/state_%d/' % (ikpt,ispin,istate)
return path
# access specific eigenvalue or eigenvector
def psig(self,ikpt=0,ispin=0,istate=0):
psig_loc = self.state_path(ikpt,ispin,istate)+'psi_g'
return self.fp[psig_loc].value
def psir(self,ikpt=0,ispin=0,istate=0):
psir_loc = self.state_path(ikpt,ispin,istate)+'psi_r'
return self.fp[psir_loc].value
def eigenvalues(self):
""" return all eigenvalues, shape=(nkpt,nspin,nstate) """
nkpt = self.get('nkpt')[0]
nspin = self.get('nspin')[0]
nstate = self.get('nstate')[0] # !!!! same number of states per kpt
evals = np.zeros([nkpt,nspin,nstate])
for ikpt in range(nkpt):
for ispin in range(nspin):
path = self.spin_path(ikpt,ispin)
evals[ikpt,ispin,:] = self.val(
os.path.join(path,'eigenvalues')
)
return evals
@classmethod
def psig_to_psir(self,gvecs,psig,rgrid_shape,vol):
""" contruct orbital given in planewave basis
Inputs:
gvecs: gvectors in reciprocal lattice units i.e. integers
psig: planewave coefficients, should have the same length as gvecs
vol: simulation cell volume, used to normalized fft
Output:
rgrid: orbital on a real-space grid """
assert len(gvecs) == len(psig)
kgrid = np.zeros(rgrid_shape,dtype=complex)
for igvec in range(len(gvecs)):
kgrid[tuple(gvecs[igvec])] = psig[igvec]
# end for
rgrid = np.fft.ifftn(kgrid) * np.prod(rgrid_shape)/vol
return rgrid
# end def psig_to_psir
def get_psir_from_psig(self,ikpt,ispin,istate,rgrid_shape=None,mesh_factor=1.0):
""" FFT psig to psir at the given (kpoint,spin,state) """
# get lattice which defines the FFT grid
axes = self.get('axes')
vol = np.dot(np.cross(axes[0],axes[1]),axes[2])
# get MO in plane-wave basis
gvecs = self.get('gvectors').astype(int)
psig_arr = self.psig(ikpt=ikpt,ispin=ispin,istate=istate)
psig = psig_arr[:,0] + 1j*psig_arr[:,1]
# determine real-space grid size (QMCPACK 3.0.0 convention)
# ref: QMCWaveFunctions/Experimental/EinsplineSetBuilder.cpp::ReadGvectors_ESHDF()
if rgrid_shape is not None: # !!!! override grid size
pass
else:
rgrid_shape = map(int, np.ceil(gvecs.max(axis=0)*4*mesh_factor) )
# end if
psir = self.psig_to_psir(gvecs,psig,rgrid_shape,vol)
return psir
# end def get_psir_from_psig
# build entire eigensystem as a dataframe
def eigensystem(self):
""" construct dataframe containing eigenvalues and eigenvectors
labeled by (kpoint,spin,state) indices """
import pandas as pd
data = []
nkpt = self.get('nkpt')
nspin= self.get('nspin')
for ikpt in range(nkpt):
k_grp = self.fp[self.kpoint_path(ikpt)]
rkvec = k_grp['reduced_k'].value
for ispin in range(nspin):
spin_loc = self.spin_path(ikpt,ispin)
sp_grp = self.fp[spin_loc]
nstate = sp_grp['number_of_states'].value[0]
evals = sp_grp['eigenvalues'].value
for istate in range(nstate):
st_loc = self.state_path(ikpt,ispin,istate)
st_grp = self.fp[st_loc]
evector= st_grp['psi_g'].value # shape (ngvec,2) (real,complex)
entry = {'ikpt':ikpt,'ispin':ispin,'istate':istate,
'reduced_k':rkvec,'evalue':evals[istate],'evector':evector}
data.append(entry)
# end for istate
# end for ispin
# end for ikpt
df = pd.DataFrame(data).set_index(['ikpt','ispin','istate'],drop=True)
return df
# end def eigensystem
# =======================================================================
# Advance Write Methods, some specialized for pyscf
# =======================================================================
@staticmethod
def create_electrons_group(h5_handle,gvec,df,nelec):
""" create and fill the /electrons group in hdf5 handle
Inputs:
h5_handle: hdf5 handle generated by h5py.File
gvec: 2D numpy array of reciprocal space vectors (npw,ndim)
df: dataframe containing the eigensystem,
indexed by (kpt,spin,state), contains (evalue,evector,reduced_k)
nelec: a list of the number of electrons per atom (if no pseudopotential, then 'species_id' returned by system_from_cell should do)
Output:
None
Effect:
fill /electrons group in 'h5_handle' """
flat_df = df.reset_index()
kpoints = flat_df['ikpt'].unique()
spins = flat_df['ispin'].unique()
nkpt,nspin = len(kpoints),len(spins)
# transfer orbitals (electrons group)
for ikpt in range(nkpt):
# !!!! assume no symmetry was used to generate the kpoints
kpt_path = 'electrons/kpoint_%d'%ikpt
kgrp = h5_handle.create_group(kpt_path)
kgrp.create_dataset('num_sym',data=[1])
kgrp.create_dataset('symgroup',data=[1])
kgrp.create_dataset('weight',data=[1])
rkvec = df.loc[ikpt,'reduced_k'].values[0]
kgrp.create_dataset('reduced_k',data=rkvec)
if ikpt == 0: # store gvectors in kpoint_0
kgrp.create_dataset('gvectors',data=gvec)
kgrp.create_dataset('number_of_gvectors',data=[len(gvec)])
# end if
for ispin in range(nspin): # assume ispin==0
nstate = len(df.loc[(ikpt,ispin)])
spin_path = os.path.join(kpt_path,'spin_%d'%ispin)
spgrp = h5_handle.create_group(spin_path)
spgrp.create_dataset('number_of_states',data=[nstate])
evals = np.zeros(nstate) # fill eigenvalues during eigenvector read
for istate in range(nstate):
state_path = os.path.join(spin_path,'state_%d'%istate)
psig = df.loc[(ikpt,ispin,istate),'evector']
psig_path = os.path.join(state_path,'psi_g')
h5_handle.create_dataset(psig_path,data=psig)
evals[istate] = df.loc[(ikpt,ispin,istate),'evalue']
# end for istate
spgrp.create_dataset('eigenvalues',data=evals)
# end for ispin
# end for ikpt
# transfer orbital info
h5_handle.create_dataset('electrons/number_of_electrons',data=nelec)
h5_handle.create_dataset('electrons/number_of_kpoints',data=[nkpt])
# !!!! hard-code restricted orbitals
h5_handle.create_dataset('electrons/number_of_spins',data=[1])
# end def create_electrons_group
@staticmethod
def system_from_cell(h5_handle,cell):
""" create and fill the /supercell and /atoms groups
Inputs:
h5_handle: hdf5 handle generated by h5py.File
cell: pyscf.pbc.gto.Cell class
Outputs:
species_id: a list of atomic numbers for each atom
Effect:
fill /supercell and /atoms group in 'h5_handle'
"""
# write lattice
axes = cell.lattice_vectors() # always in bohr
h5_handle.create_dataset('supercell/primitive_vectors',data=axes)
# write atoms
pos = cell.atom_coords() # always in bohr
elem = [cell.atom_symbol(i) for i in range(cell.natm)]
assert len(pos) == len(elem)
h5_handle.create_dataset('atoms/number_of_atoms',data=[len(elem)])
h5_handle.create_dataset('atoms/positions',data=pos)
# write species info
species, indices = np.unique(elem, return_index=True)
h5_handle.create_dataset('atoms/number_of_species',data=[len(species)])
atomic_number = {}
number_of_electrons = {}
species_map = {}
nelecs = cell.nelec
for ispec,name in enumerate(species):
species_map[name] = ispec
atomic_number[name] = cell.atom_nelec_core(indices[ispec]) + cell.atom_charge(indices[ispec])
number_of_electrons[name] = cell.atom_charge(indices[ispec])
spec_grp = h5_handle.create_group('atoms/species_%d'%ispec)
# write name
if name not in species_map.keys():
raise NotImplementedError('unknown element %s' % name)
# end if
spec_grp.create_dataset('name',data=[np.string_(name)])
# write atomic number and valence
Zn = atomic_number[name]
spec_grp.create_dataset('atomic_number',data=[Zn])
Zps = number_of_electrons[name]
spec_grp.create_dataset('valence_charge',data=[Zps])
# end for ispec
species_ids = [species_map[name] for name in elem]
h5_handle.create_dataset('atoms/species_ids',data=species_ids)
return nelecs
# end def system_from_cell
# end class PwscfH5
# =======================================================================
# Class for xml generator
# =======================================================================
class InputXml:
def __init__(self):
pass
# end def
# =======================================================================
# Basic Methods (applicable to all xml files)
# =======================================================================
def read(self,fname):
self.fname = fname
parser = etree.XMLParser(remove_blank_text=True)
self.root = etree.parse(fname,parser)
# end def
def write(self,fname=None,pretty_print=True):
if fname is None:
self.root.write(self.fname,pretty_print=pretty_print)
else:
self.root.write(fname,pretty_print=pretty_print)
# end if
# end def
def show(self,node):
""" print text representation of an xml node """
print (etree.tostring(node,pretty_print=True))
# pass along xpath expression e.g. './/particleset'
def find(self,xpath):
return self.root.find(xpath)
# end def
def find_all(self,xpath):
return self.root.findall(xpath)
# end def
@classmethod
def arr2text(self,arr):
""" format convert a numpy array into a text string """
text = ''
if len(arr.shape) == 1: # vector
text = " ".join(arr.astype(str))
elif len(arr.shape) == 2: # matrix
mat = [self.arr2text(line) for line in arr]
text = "\n " + "\n ".join(mat) + "\n"
else:
raise RuntimeError('arr2text can only convert vector or matrix.')
# end if
return text
# end def
@classmethod
def text2arr(self,text,dtype=float,flatten=False):
tlist = text.strip(' ').strip('\n').split('\n')
if len(tlist) == 1:
return np.array(tlist,dtype=dtype)
else:
if flatten:
mytext = '\n'.join(['\n'.join(line.split()) for line in tlist])
myarr = self.text2arr(mytext)
return myarr.flatten()
else:
return np.array([line.split() for line in tlist],dtype=dtype)
# end if
# end if
# end def
@classmethod
def node2dict(self,node):
entry = dict(node.attrib)
if node.text:
entry.update({'text':node.text})
# end if
return entry
# end def node2dict
# =======================================================================
# Simple Methods Specific to QMCPACK
# =======================================================================
def find_pset(self,name='e'):
""" return xml node specifying the particle set with given name
by default return the quantum particle set 'e' """
return self.find('.//particleset[@name="%s"]'%name)
# end find_pset
# =======================================================================
# Advance Methods i.e. specific to pyscf or QMCPACK 3.0
# =======================================================================
# ----------------
# simulationcell
def simulationcell_from_cell(self,cell,bconds='p p p',lr_cut=15.0):
""" construct the <simulationcell> xml element from pyscf.pbc.gto.Cell class
Inputs:
cell: pyscf.pbc.gto.Cell class, should have lattice_vectors() and unit
bconds: boundary conditions in each of the x,y,z directions, p for periodic, n for non-periodic, default to 'p p p '
lr_cut: long-range cutoff paramter rc*kc, default to 15
Output:
etree.Element representing <simulationcell>
Effect:
none
"""
# write primitive lattice vectors
axes = cell.lattice_vectors() # rely on pyscf to return a.u.
lat_node = etree.Element('parameter'
,attrib={'name':'lattice','units':'bohr'})
lat_node.text = self.arr2text(axes) + " "
# write boundary conditions
bconds_node = etree.Element('parameter',{'name':'bconds'})
bconds_node.text = bconds
# write long-range cutoff parameter
lr_node = etree.Element('parameter',{'name':'LR_dim_cutoff'})
lr_node.text = str(lr_cut)
# build <simulationcell>
sc_node = etree.Element('simulationcell')
sc_node.append(lat_node)
sc_node.append(bconds_node)
sc_node.append(lr_node)
return sc_node
# end def simulationcell_from_cell
# ----------------
def particleset_from_hdf5(self,h5_handle):
atom_grp = h5_handle.get('atoms')
nspec = atom_grp.get('number_of_species').value[0]
species_ids = atom_grp.get('species_ids').value
positions = atom_grp.get('positions').value
natom_total = atom_grp.get('number_of_atoms').value[0]
# Get the name of the atoms
groups = []
for ispec in range(nspec):
# turn h5 group into dictionary (i.e. h5ls -d)
sp_grp = atom_grp.get('species_%d'%ispec)
name = sp_grp.get('name').value[0]
valence_charge = sp_grp.get('valence_charge').value[0]
atomic_number = sp_grp.get('atomic_number').value[0]
# locate particles of this species
atom_idx = np.where(species_ids==ispec)
pos_arr = positions[atom_idx]
natom = len(pos_arr)
# build xml node
charge_node = etree.Element('parameter',{'name':'charge'})
charge_node.text = str(valence_charge)
valence_node = etree.Element('parameter',{'name':'valence'})
valence_node.text = str(valence_charge)
atomic_number_node = etree.Element('parameter',{'name':'atomicnumber'})
atomic_number_node.text = str(atomic_number)
grp_children = [charge_node,valence_node,atomic_number_node]
grp_node = etree.Element('group',{'name':name})
for child in grp_children:
grp_node.append(child)
groups.append(grp_node)
pos_node = etree.Element('attrib',{'name':'position','datatype':'posArray'})
pos_node.text = self.arr2text(positions) + " "
groups.append(pos_node)
ionid_node = etree.Element('attrib',{'name':'ionid','datatype':'stringArray'})
ionid_node.text = self.arr2text(np.array([atom_grp.get('species_{}/name'.format(id_)).value[0] for id_ in species_ids]))
groups.append(ionid_node)
# build <particleset>
pset_node = etree.Element('particleset',{'name':'ion0','size':str(natom_total)})
for group in groups:
pset_node.append(group)
return pset_node
def particleset_initialposition_from_hdf5(self,h5_handle):
pset_node = etree.Element('particleset',{'name':'e','random':"yes", 'randomsrc':'ion0'})
# size = number of electron up and down
elec_alpha_beta = h5_handle.get('electrons/number_of_electrons').value
l_name = ("u","d")
for name, electron in zip(l_name,elec_alpha_beta):
groupe_node = etree.Element('group',{'name':"{}".format(name),'size':"{}".format(electron)})
param_node = etree.Element('parameter',{'name':'charge'})
param_node.text=str(-1)
groupe_node.append(param_node)
pset_node.append(groupe_node)
return pset_node
def wavefunction(self,h5_handle,h5_path,tilematrix):
wf_node = etree.Element('wavefunction', {'name':"psi0", 'target':"e"})
determinantset_node = etree.Element('determinantset', {"type":"einspline",
"href":"{}".format(h5_path),
"source":"ion0",
"tilematrix":"{}".format(tilematrix),
"twistnum":"0",
"meshfactor":"1.0",
"check_orb_norm":"no"})
basisset_node = etree.Element('basisset')
determinantset_node.append(basisset_node)
atom_grp = h5_handle.get('electrons')
alpha, beta = atom_grp.get('number_of_electrons').value
# Slaterdet,imamt
slaterdet_node = etree.fromstring('''
<slaterdeterminant>
<determinant id="updet" size="{}" ref="updet">
<occupation mode="ground" spindataset="0">
</occupation>
</determinant>
<determinant id="downdet" size="{}" ref="downdet">
<occupation mode="ground" spindataset="0">
</occupation>
</determinant>
</slaterdeterminant>
'''.format(alpha, beta))
determinantset_node.append(slaterdet_node)
wf_node.append(determinantset_node)
# Jastrow
jastrow_node = etree.fromstring('''
<jastrow name="J2" type="Two-Body" function="Bspline" print="yes">
<correlation speciesA="u" speciesB="u" size="10">
<coefficients id="uu" type="Array"> 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0</coefficients>
</correlation>
<correlation speciesA="u" speciesB="d" size="10">
<coefficients id="ud" type="Array"> 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0</coefficients>
</correlation>
</jastrow>''')
wf_node.append(jastrow_node)
# Species_id
atom_grp = h5_handle.get('atoms')
species_ids = atom_grp.get('species_ids').value
list_atom = sorted(set(atom_grp.get('species_{}/name'.format(id_)).value[0].decode() for id_ in species_ids))
jastrow_node = etree.Element('jastrow', {'name':"J1", 'type':"One-Body", "function":"Bspline", "print":"yes", "source":"ion0"})
for atom in list_atom:
jastrow_node.append(etree.fromstring('''
<correlation elementType="{}" cusp="0.0" size="10">
<coefficients id="{}" type="Array"> 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0</coefficients>
</correlation>'''.format(atom, atom)))
wf_node.append(jastrow_node)
return wf_node
def hamiltonian(self,h5_handle, cell):
atom_grp = h5_handle.get('atoms')
species_ids = atom_grp.get('species_ids').value
list_atom = sorted(set(atom_grp.get('species_{}/name'.format(id_)).value[0].decode() for id_ in species_ids))
if cell.has_ecp():
hamiltonian_node = etree.fromstring('''
<hamiltonian name="h0" type="generic" target="e">
<pairpot name="ElecElec" type="coulomb" source="e" target="e" physical="true"/>
<pairpot name="IonIon" type="coulomb" source="ion0" target="ion0"/>
<pairpot name="PseudoPot" type="pseudo" source="ion0" wavefunction="psi0" format="xml">
</pairpot>
</hamiltonian>''')
# Add the list of ecp file for each atom.
for element in list_atom:
pset_node = etree.Element('pseudo',{'elementType':element,'href':"{}.qmcpp.xml".format(element)})
hamiltonian_node[-1].append(pset_node)
else:
hamiltonian_node = etree.fromstring('''
<hamiltonian name="h0" type="generic" target="e">
<pairpot name="ElecElec" type="coulomb" source="e" target="e" physical="true"/>
<pairpot name="IonIon" type="coulomb" source="ion0" target="ion0"/>
<pairpot type="coulomb" name="ElecIon" source="ion0" target="e"/>
</hamiltonian>''')
return hamiltonian_node
def vmc_dmc(self, h5_handle):
vmc_init_comment = '''
Example initial VMC to measure initial energy and variance
'''
vmc_init = '''
<qmc method="vmc" move="pbyp" checkpoint="-1">
<estimator name="LocalEnergy" hdf5="no"/>
<parameter name="warmupSteps">100</parameter>
<parameter name="blocks">20</parameter>
<parameter name="steps">50</parameter>
<parameter name="substeps">8</parameter>
<parameter name="timestep">0.5</parameter>
<parameter name="usedrift">no</parameter>
</qmc>
'''
loop_comment = '''
Example initial VMC optimization
Number of steps required will be computed from total requested sample
count and total number of walkers
'''
loop= '''
<loop max="4">
<qmc method="linear" move="pbyp" checkpoint="-1">
<estimator name="LocalEnergy" hdf5="no"/>
<parameter name="warmupSteps">100</parameter>
<parameter name="blocks">20</parameter>
<parameter name="timestep">0.5</parameter>
<parameter name="walkers">1</parameter>
<parameter name="samples">16000</parameter>
<parameter name="substeps">4</parameter>
<parameter name="usedrift">no</parameter>
<parameter name="MinMethod">OneShiftOnly</parameter>
<parameter name="minwalkers">0.003</parameter>
</qmc>
</loop>
'''
loop_followup_comment='''
Example follow-up VMC optimization using more samples for greater accuracy
'''
loop_followup = '''
<loop max="10">
<qmc method="linear" move="pbyp" checkpoint="-1">
<estimator name="LocalEnergy" hdf5="no"/>
<parameter name="warmupSteps">100</parameter>
<parameter name="blocks">20</parameter>
<parameter name="timestep">0.5</parameter>
<parameter name="walkers">1</parameter>
<parameter name="samples">64000</parameter>
<parameter name="substeps">4</parameter>
<parameter name="usedrift">no</parameter>
<parameter name="MinMethod">OneShiftOnly</parameter>
<parameter name="minwalkers">0.3</parameter>
</qmc>
</loop>
'''
# Generate production VMC and DMC
vmc_comment = '''Production VMC and DMC
Examine the results of the optimization before running these blocks.
e.g. Choose the best optimized jastrow from all obtained, put in
wavefunction file, do not reoptimize.'''
vmc = '''
<qmc method="vmc" move="pbyp" checkpoint="-1">
<estimator name="LocalEnergy" hdf5="no"/>
<parameter name="warmupSteps">100</parameter>
<parameter name="blocks">200</parameter>
<parameter name="steps">50</parameter>
<parameter name="substeps">8</parameter>
<parameter name="timestep">0.5</parameter>
<parameter name="usedrift">no</parameter>
<!--Sample count should match targetwalker count for DMC. Will be obtained from all nodes.-->
<parameter name="samples">16000</parameter>
</qmc>'''
dmc_comment = ""
dmc ='''
<qmc method="dmc" move="pbyp" checkpoint="20">
<estimator name="LocalEnergy" hdf5="no"/>
<parameter name="targetwalkers">16000</parameter>
<parameter name="reconfiguration">no</parameter>
<parameter name="warmupSteps">100</parameter>
<parameter name="timestep">0.005</parameter>
<parameter name="steps">100</parameter>
<parameter name="blocks">100</parameter>
<parameter name="nonlocalmoves">yes</parameter>
</qmc>
'''
a = []
l_xml = (vmc_init, loop, loop_followup, vmc, dmc)
l_xml_comment = (vmc_init_comment, loop_comment, loop_followup_comment, vmc_comment, dmc_comment)
for comment, qmc in zip(l_xml_comment,l_xml):
a.append(etree.Comment(comment))
a.append(etree.fromstring(qmc))
return a
# ----------------
# numerics
# grid
def radial_function(self,node):
assert node.tag=='radfunc'
# read grid definitions ( e.g. np.linspace(ri,rf,npts) for linear grid )
gnode = node.find('.//grid') # expected attributes:
grid_defs = self.node2dict(gnode) # type,ri,rf,npts,units
ri = float(grid_defs['ri'])
rf = float(grid_defs['rf'])
npts = int(grid_defs['npts'])
gtype = grid_defs['type']
units = grid_defs['units']
# read
dnode = node.find('.//data')
rval = self.text2arr(dnode.text,flatten=True) # read as 1D vector
assert len(rval) == npts
entry = {'type':gtype,'units':units,'ri':ri,'rf':rf,'npts':npts,'rval':rval}
return entry
# end def radial_function
# ----------------
# end class
| 36.431163
| 140
| 0.625088
|
4d3b42b6905589a1499902fe1f7f53116bfe58be
| 1,437
|
py
|
Python
|
scripts/hotwords.py
|
mazika90/roboy_snapchat
|
010a595cac9be51599ec68a4dffc032aabec21c8
|
[
"BSD-3-Clause"
] | 1
|
2020-12-20T07:50:38.000Z
|
2020-12-20T07:50:38.000Z
|
scripts/hotwords.py
|
mazika90/roboy_snapchat
|
010a595cac9be51599ec68a4dffc032aabec21c8
|
[
"BSD-3-Clause"
] | 1
|
2018-07-07T12:08:00.000Z
|
2018-07-10T15:36:18.000Z
|
scripts/hotwords.py
|
mazika90/roboy_snapchat
|
010a595cac9be51599ec68a4dffc032aabec21c8
|
[
"BSD-3-Clause"
] | 1
|
2020-10-18T03:56:52.000Z
|
2020-10-18T03:56:52.000Z
|
#!/usr/bin/env python
import snowboydecoder
import sys
import signal
import rospy
from std_msgs.msg import String
# Demo code for listening two hotwords at the same time
import rospkg
import atexit
interrupted = False
def signal_handler(signal, frame):
global interrupted
interrupted = True
def interrupt_callback():
global interrupted
return interrupted
def finish(detector):
detector.terminate()
print("Finished cleanly.")
def shutdown():
global interrupted
interrupted = True
rospack = rospkg.RosPack()
path = rospack.get_path('roboy_snapchat_filter') + '/resources/'
models = [path+"roboy.pmdl", path+"cheese.pmdl"]
rospy.init_node("snowboy")
publisher = rospy.Publisher("/roboy/cognition/apply_filter", String, queue_size=1)
def publish(string):
print('detected %s'%string)
publisher.publish(string)
# capture SIGINT signal, e.g., Ctrl+C
signal.signal(signal.SIGINT, signal_handler)
sensitivity = [0.8]*len(models)
detector = snowboydecoder.HotwordDetector(models, sensitivity=sensitivity)
atexit.register(finish, detector)
rospy.on_shutdown(shutdown)
callbacks = [lambda: True,
lambda: publish("cheese")]
print('Listening... Press Ctrl+C to exit')
# main loop
# make sure you have the same numbers of callbacks and models
detector.start(detected_callback=callbacks,
interrupt_check=interrupt_callback,
sleep_time=0.03)
detector.terminate()
| 22.107692
| 82
| 0.742519
|
9e3b21aa433cc69e7941a689db40884b50ae08ec
| 8,451
|
py
|
Python
|
tfim_spin_bath_MF.py
|
weinbe58/tfim_noise
|
36e9e569eb92a9a48277eb53ffb24cc3dae8a9b1
|
[
"BSD-3-Clause"
] | null | null | null |
tfim_spin_bath_MF.py
|
weinbe58/tfim_noise
|
36e9e569eb92a9a48277eb53ffb24cc3dae8a9b1
|
[
"BSD-3-Clause"
] | null | null | null |
tfim_spin_bath_MF.py
|
weinbe58/tfim_noise
|
36e9e569eb92a9a48277eb53ffb24cc3dae8a9b1
|
[
"BSD-3-Clause"
] | null | null | null |
from quspin.basis import spin_basis_1d,tensor_basis,boson_basis_1d
from quspin.operators import hamiltonian
from quspin.tools.evolution import evolve
import numpy as np
import cProfile,os,sys,time
import matplotlib.pyplot as plt
def anneal_bath_1(L,Nb,T,gamma=0.2,omega=1.0,path="."):
ti = time.time()
filename = os.path.join(path,"spin_bath_exact_L_{}_Nb_{}_T_{}_gamma_{}_omega_{}.npz".format(L,Nb,T,gamma,omega))
if os.path.isfile(filename):
print "file_exists...exiting run."
exit()
if Nb%2 == 1:
S = "{}/2".format(Nb)
else:
S = "{}".format(Nb//2)
print "creating basis"
spin_basis = spin_basis_1d(L,pauli=True,kblock=0,pblock=1)
bath_basis = spin_basis_1d(1,S=S)
basis = tensor_basis(spin_basis,bath_basis)
print "L={}, H-space size: {}".format(L,basis.Ns)
bath_energy=[[omega/Nb,0]]
SB_list = [[gamma/np.sqrt(Nb),i,0] for i in range(L)]
h_list = [[-1,i] for i in range(L)]
J_list = [[-1,i,(i+1)%L] for i in range(L)]
A = lambda t:(t/T)**2
B = lambda t:(1-t/T)**2
static = [
["|z",bath_energy],
["+|-",SB_list],
["-|+",SB_list]
]
dynamic = [["x|",h_list,B,()],
["zz|",J_list,A,()],
]
print "creating hamiltonian"
kwargs=dict(basis=basis,dtype=np.float64,
check_symm=False,check_pcon=False,check_herm=False)
H = hamiltonian(static,dynamic,**kwargs)
print "solving initial state"
E0,psi_0 = H.eigsh(k=1,which="SA",time=0)
psi_0 = psi_0.ravel()
print "evolving"
out = np.zeros(psi_0.shape,dtype=np.complex128)
psi_f = evolve(psi_0,0,T,H._hamiltonian__omp_SO,f_params = (out,),
solver_name="dop853",atol=1.1e-10,rtol=1.1e-10)
print "saving"
np.savez_compressed(filename,psi=psi_f)
print "dome......{} sec".format(time.time()-ti)
def anneal_bath_2(L,Nb,T,gamma=0.2,omega=1.0,path="."):
ti = time.time()
filename = os.path.join(path,"spin_bath_exact_L_{}_Nb_{}_T_{}_gamma_{}_omega_{}.npz".format(L,Nb,T,gamma,omega))
if os.path.isfile(filename):
print "file_exists...exiting run."
exit()
if Nb%2 == 1:
S = "{}/2".format(Nb)
else:
S = "{}".format(Nb//2)
print "creating basis"
spin_basis = spin_basis_1d(L,pauli=True,kblock=0,pblock=1)
bath_basis = spin_basis_1d(1,S=S)
basis = tensor_basis(spin_basis,bath_basis)
print "L={}, H-space size: {}".format(L,basis.Ns)
bath_energy=[[omega/Nb,0]]
SB_list = [[gamma/np.sqrt(Nb),i,0] for i in range(L)]
h_list = [[-1,i] for i in range(L)]
J_list = [[-1,i,(i+1)%L] for i in range(L)]
A = lambda t:(t/T)**2
B = lambda t:(1-t/T)**2
static = [
["|z",bath_energy],
]
dynamic = [["zz|",J_list,A,()],
["x|",h_list,B,()],
["+|-",SB_list,B,()],
["-|+",SB_list,B,()]
]
print "creating hamiltonian"
kwargs=dict(basis=basis,dtype=np.float64,
check_symm=False,check_pcon=False,check_herm=False)
H = hamiltonian(static,dynamic,**kwargs)
print "solving initial state"
E0,psi_0 = H.eigsh(k=1,which="SA",time=0)
psi_0 = psi_0.ravel()
print "evolving"
out = np.zeros(psi_0.shape,dtype=np.complex128)
psi_f = evolve(psi_0,0,T,H._hamiltonian__omp_SO,f_params = (out,),
solver_name="dop853",atol=1.1e-10,rtol=1.1e-10)
psi_f /= np.linalg.norm(psi_f)
print "saving"
np.savez_compressed(filename,psi=psi_f)
print "dome......{} sec".format(time.time()-ti)
def anneal_bath_3(L,Nb,T,gamma=0.2,omega=1.0,path="."):
ti = time.time()
filename = os.path.join(path,"spin_bath_exact_L_{}_Nb_{}_T_{}_gamma_{}_omega_{}.npz".format(L,Nb,T,gamma,omega))
if os.path.isfile(filename):
print "file_exists...exiting run."
exit()
if Nb%2 == 1:
S = "{}/2".format(Nb)
else:
S = "{}".format(Nb//2)
print "creating basis"
spin_basis = spin_basis_1d(L,pauli=True,kblock=0,pblock=1)
bath_basis = spin_basis_1d(1,S=S)
basis = tensor_basis(spin_basis,bath_basis)
print "L={}, H-space size: {}".format(L,basis.Ns)
bath_energy=[[-omega/Nb**2,0,0]]
SB_list = [[-gamma/Nb,i,0] for i in range(L)]
B_h_list = [[-1,0]]
h_list = [[-1,i] for i in range(L)]
J_list = [[-1,i,(i+1)%L] for i in range(L)]
A = lambda t:(t/T)**2
B = lambda t:(1-t/T)**2
static = []
dynamic = [
["x|",h_list,B,()],
["|+",B_h_list,B,()],
["|-",B_h_list,B,()],
["zz|",J_list,A,()],
["z|z",SB_list,A,()],
["|zz",bath_energy,A,()],
]
print "creating hamiltonian"
kwargs=dict(basis=basis,dtype=np.float64,
check_symm=False,check_pcon=False,check_herm=False)
H = hamiltonian(static,dynamic,**kwargs)
print "solving initial state"
E0,psi_0 = H.eigsh(k=1,which="SA",time=0)
psi_0 = psi_0.ravel()
print "evolving"
out = np.zeros(psi_0.shape,dtype=np.complex128)
psi_f = evolve(psi_0,0,T,H._hamiltonian__omp_SO,f_params = (out,),solver_name="dop853",atol=1.1e-10,rtol=1.1e-10)
print "saving"
np.savez_compressed(filename,psi=psi_f)
print "dome......{} sec".format(time.time()-ti)
def anneal_bath_4(L,Nb,T,gamma=0.2,omega=1.0,path="."):
ti = time.time()
filename = os.path.join(path,"spin_bath_exact_L_{}_Nb_{}_T_{}_gamma_{}_omega_{}.npz".format(L,Nb,T,gamma,omega))
if os.path.isfile(filename):
print "file_exists...exiting run."
exit()
if Nb%2 == 1:
S = "{}/2".format(Nb)
else:
S = "{}".format(Nb//2)
print "creating basis"
spin_basis = spin_basis_1d(L,pauli=True,kblock=0,pblock=1)
bath_basis = spin_basis_1d(1,S=S)
basis = tensor_basis(spin_basis,bath_basis)
print "L={}, H-space size: {}".format(L,basis.Ns)
bath_energy=[[-omega/Nb,0,0]]
SB_1_list = [[-gamma/Nb,i,0] for i in range(L)]
SB_2_list = [[-gamma/np.sqrt(Nb),i,0] for i in range(L)]
B_h_list = [[-1,0]]
h_list = [[-1,i] for i in range(L)]
J_list = [[-1,i,(i+1)%L] for i in range(L)]
A = lambda t:(t/T)**2
B = lambda t:(1-t/T)**2
static = [
["+|-",SB_2_list],
["-|+",SB_2_list],
]
dynamic = [
["x|",h_list,B,()],
["|+",B_h_list,B,()],
["|-",B_h_list,B,()],
["zz|",J_list,A,()],
["z|z",SB_1_list,A,()],
["|zz",bath_energy,A,()],
]
print "creating hamiltonian"
kwargs=dict(basis=basis,dtype=np.float64,
check_symm=False,check_pcon=False,check_herm=False)
H = hamiltonian(static,dynamic,**kwargs)
print "solving initial state"
E0,psi_0 = H.eigsh(k=1,which="SA",time=0)
psi_0 = psi_0.ravel()
print "evolving"
out = np.zeros(psi_0.shape,dtype=np.complex128)
psi_f = evolve(psi_0,0,T,H._hamiltonian__omp_SO,f_params = (out,),solver_name="dop853",atol=1.1e-10,rtol=1.1e-10)
print "saving"
np.savez_compressed(filename,psi=psi_f)
print "dome......{} sec".format(time.time()-ti)
def anneal_bath_5(L,Nb,T,gamma=0.2,omega=1.0,path="."):
ti = time.time()
filename = os.path.join(path,"spin_bath_exact_L_{}_Nb_{}_T_{}_gamma_{}_omega_{}.npz".format(L,Nb,T,gamma,omega))
if os.path.isfile(filename):
print "file_exists...exiting run."
exit()
if Nb%2 == 1:
S = "{}/2".format(Nb)
else:
S = "{}".format(Nb//2)
print "creating basis"
spin_basis = spin_basis_1d(L,pauli=True,kblock=0,pblock=1)
bath_basis = spin_basis_1d(1,S=S)
basis = tensor_basis(spin_basis,bath_basis)
print "L={}, H-space size: {}".format(L,basis.Ns)
bath_energy=[[omega/Nb,0]]
SB_xy_list = [[gamma/(4.0*Nb),i,0] for i in range(L)]
SB_zz_list = [[gamma/(2.0*Nb),i,0] for i in range(L)]
h_list = [[-1,i] for i in range(L)]
J_list = [[-1,i,(i+1)%L] for i in range(L)]
A = lambda t:(t/T)**2
B = lambda t:(1-t/T)**2
static = [
["|z",bath_energy],
]
dynamic = [["zz|",J_list,A,()],
["x|",h_list,B,()],
["+|-",SB_xy_list,B,()],
["-|+",SB_xy_list,B,()],
["z|z",SB_zz_list,B,()],
]
print "creating hamiltonian"
kwargs=dict(basis=basis,dtype=np.float64,
check_symm=False,check_pcon=False,check_herm=False)
H = hamiltonian(static,dynamic,**kwargs)
print "solving initial state"
E0,psi_0 = H.eigsh(k=1,which="SA",time=0)
psi_0 = psi_0.ravel()
print "evolving"
out = np.zeros(psi_0.shape,dtype=np.complex128)
psi_f = evolve(psi_0,0,T,H._hamiltonian__omp_SO,f_params = (out,),solver_name="dop853",atol=1.1e-10,rtol=1.1e-10)
print "saving"
np.savez_compressed(filename,psi=psi_f)
print "dome......{} sec".format(time.time()-ti)
L = int(sys.argv[1])
Nb = int(sys.argv[2])
T = float(sys.argv[3])
gamma = float(sys.argv[4])
omega = float(sys.argv[5])
model = int(sys.argv[6])
path = sys.argv[7]
if model == 1:
anneal_bath_1(L,Nb,T,gamma,omega,path)
elif model == 2:
anneal_bath_2(L,Nb,T,gamma,omega,path)
elif model == 3:
anneal_bath_3(L,Nb,T,gamma,omega,path)
elif model == 4:
anneal_bath_4(L,Nb,T,gamma,omega,path)
elif model == 5:
anneal_bath_5(L,Nb,T,gamma,omega,path)
| 27.708197
| 114
| 0.645604
|
444a4ecd85289cf57059c6b530d09c65be7b146e
| 169
|
py
|
Python
|
PalndromeCheck/IsPalindrome.py
|
mmmallick/LeetCode_Python
|
bf81f2ab5034cd611c5017681ad68533dcc60761
|
[
"MIT"
] | null | null | null |
PalndromeCheck/IsPalindrome.py
|
mmmallick/LeetCode_Python
|
bf81f2ab5034cd611c5017681ad68533dcc60761
|
[
"MIT"
] | null | null | null |
PalndromeCheck/IsPalindrome.py
|
mmmallick/LeetCode_Python
|
bf81f2ab5034cd611c5017681ad68533dcc60761
|
[
"MIT"
] | null | null | null |
class Solution(object):
def isPalindrome(self, s) :
s = ''.join(filter(unicode.isalnum, s))
s=s.upper()
print(s)
return s == s[::-1]
| 24.142857
| 47
| 0.514793
|
396b74032cd6b9f34b32c35d6af2e54c56057df9
| 3,368
|
py
|
Python
|
filething/server.py
|
lnmds/filething
|
70f438371c8346e3b47f2507bfcfaa2ee22c9b5f
|
[
"MIT"
] | null | null | null |
filething/server.py
|
lnmds/filething
|
70f438371c8346e3b47f2507bfcfaa2ee22c9b5f
|
[
"MIT"
] | null | null | null |
filething/server.py
|
lnmds/filething
|
70f438371c8346e3b47f2507bfcfaa2ee22c9b5f
|
[
"MIT"
] | null | null | null |
import logging
import random
import string
from aiohttp import web
log = logging.getLogger(__name__)
class Server:
"""Main Filething server class.
This class holds all the server state to continue operating
Attributes
----------
ready: bool
If the server is in a ready state for startup.
"""
def __init__(self, loop, app, config):
self.loop = loop
self.app = app
self.config = config['filething']
self.ready = False
async def initialize(self):
"""Gather necessary server state."""
log.info('Initializing')
self.ready = True
log.info('Ready!')
async def request_file(self, request):
"""`GET /i/{filename}`.
Request a file from the server.
This handler is `insecure` meaning it
doesn't do any checking to the input provided by the user.
"""
imagepath = request.match_info['filename']
try:
return web.FileResponse(f'./filething-images/{imagepath}')
except FileNotFoundError:
return web.Response(status=404, text='File Not Found')
def generate_fileid(self):
return ''.join((f'{random.choice(string.ascii_letters)}' for i in range(7)))
async def upload(self, request):
"""`POST /upload`.
Upload a file using Multipart.
"""
reader = await request.multipart()
sentfile = await reader.next()
extension = sentfile.filename.split('.')[-1]
filename = f'{self.generate_fileid()}.{extension}'
# Yes, aiohttp is shit, I know that.
# This is quite shitty.
size = 0
with open(f'./filething-images/{filename}', 'wb') as f:
while True:
chunk = await sentfile.read_chunk()
if not chunk:
break
size += len(chunk)
f.write(chunk)
log.info('[upload] Got file %r, with %d bytes, %.2f MB', filename, size, size/1024/1024)
return web.json_response({
# :^)
'bytes': size,
'kb': size / 1024,
'mb': size / 1024 / 1024,
# actual file data
'id': filename,
'url': f'{self.config["url"]}/i/{filename}',
})
def run(self):
"""Start the HTTP server.
Raises
------
RuntimeError
If the server is not ready for startup.
"""
r = self.app.router
r.add_static('/s', './static')
r.add_post('/upload', self.upload)
r.add_get('/i/{filename:.+}', self.request_file)
handler = self.app.make_handler()
f = self.loop.create_server(handler, \
self.config['host'], self.config['port'])
srv = None
try:
log.info('Running loop')
self.loop.run_until_complete(self.initialize())
srv = self.loop.run_until_complete(f)
self.loop.run_forever()
except:
log.info('Shutting down.')
srv.close()
self.loop.run_until_complete(srv.wait_closed())
self.loop.run_until_complete(self.app.shutdown())
self.loop.run_until_complete(handler.shutdown(60.0))
self.loop.run_until_complete(self.app.cleanup())
self.loop.close()
| 27.606557
| 96
| 0.549881
|
e78a7e11a0f43f104f83be672dee222ad848b94a
| 3,774
|
py
|
Python
|
tests_recording/test_api.py
|
sturivny/packit
|
ba06fb080eb8ab309ad92489f1f8985bdb61668e
|
[
"MIT"
] | null | null | null |
tests_recording/test_api.py
|
sturivny/packit
|
ba06fb080eb8ab309ad92489f1f8985bdb61668e
|
[
"MIT"
] | 1
|
2020-08-17T07:20:13.000Z
|
2020-08-17T07:20:13.000Z
|
tests_recording/test_api.py
|
sturivny/packit
|
ba06fb080eb8ab309ad92489f1f8985bdb61668e
|
[
"MIT"
] | null | null | null |
import os
import unittest
from subprocess import check_output
import rebasehelper
from rebasehelper.exceptions import RebaseHelperError
from flexmock import flexmock
from packit.api import PackitAPI
from requre.cassette import DataTypes
from tests_recording.testbase import PackitUnittestOgr
@unittest.skip("Not working yet")
class ProposeUpdate(PackitUnittestOgr):
def setUp(self):
if (
hasattr(rebasehelper, "VERSION")
and int(rebasehelper.VERSION.split(".")[1]) >= 19
):
self.cassette.data_miner.key = "rebase-helper>=0.19"
else:
self.cassette.data_miner.key = "rebase-helper<0.19"
self.cassette.data_miner.data_type = DataTypes.Dict
super().setUp()
self.api = PackitAPI(
config=self.conf, package_config=self.pc, upstream_local_project=self.lp
)
self.api._up = self.upstream
self.api._dg = self.dg
self.set_git_user()
def check_version_increase(self):
# change specfile little bit to have there some change
specfile_location = os.path.join(self.lp.working_dir, "python-ogr.spec")
with open(specfile_location, "r") as myfile:
filedata = myfile.read()
# Patch the specfile with new version
version_increase = "0.0.0"
for line in filedata.splitlines():
if "Version:" in line:
version = line.rsplit(" ", 1)[1]
v1, v2, v3 = version.split(".")
version_increase = ".".join([v1, str(int(v2) + 1), v3])
filedata = filedata.replace(version, version_increase)
break
with open(specfile_location, "w") as myfile:
myfile.write(filedata)
check_output(
f"cd {self.lp.working_dir};"
f"git commit -m 'test change' python-ogr.spec;"
f"git tag -a {version_increase} -m 'my version {version_increase}'",
shell=True,
)
self.api.sync_release("master")
def test_comment_in_spec(self):
"""
change specfile little bit to have there some change, do not increase version
"""
specfile_location = os.path.join(self.lp.working_dir, "python-ogr.spec")
version_increase = "10.0.0"
with open(specfile_location, "a") as myfile:
myfile.write("\n# comment\n")
check_output(
f"cd {self.lp.working_dir};"
f"git commit -m 'test change' python-ogr.spec;"
f"git tag -a {version_increase} -m 'my version {version_increase}'",
shell=True,
)
self.api.sync_release("master")
@unittest.skipIf(
hasattr(rebasehelper, "VERSION")
and int(rebasehelper.VERSION.split(".")[1]) >= 19,
"Older version of rebasehelper raised exception",
)
def test_version_change_exception(self):
"""
check if it raises exception, because sources are not uploaded in distgit
Downgrade rebasehelper to version < 0.19.0
"""
self.assertRaises(RebaseHelperError, self.check_version_increase)
@unittest.skipUnless(
hasattr(rebasehelper, "VERSION")
and int(rebasehelper.VERSION.split(".")[1]) >= 19,
"New version of rebasehelper works without raised exception",
)
def test_version_change_new_rebaseheler(self):
"""
check if it not raises exception, because sources are not uploaded in distgit
"""
self.check_version_increase()
def test_version_change_mocked(self):
"""
version is not not uploaded, so skip in this test
"""
flexmock(self.api).should_receive("_handle_sources").and_return(None)
self.check_version_increase()
| 36.640777
| 85
| 0.622682
|
8f719e52178c41985d3aa06e3ba048073dd243a6
| 38
|
py
|
Python
|
snmp/__init__.py
|
stanislavb/snmp-lldp
|
66d1f418a05de3b3d74685a069b5a84035178b38
|
[
"Unlicense"
] | 25
|
2015-02-10T18:38:33.000Z
|
2021-07-31T11:22:37.000Z
|
snmp/__init__.py
|
tsingliu1007/snmp-lldp
|
66d1f418a05de3b3d74685a069b5a84035178b38
|
[
"Unlicense"
] | 1
|
2015-07-03T18:01:29.000Z
|
2015-07-03T18:01:29.000Z
|
snmp/__init__.py
|
tsingliu1007/snmp-lldp
|
66d1f418a05de3b3d74685a069b5a84035178b38
|
[
"Unlicense"
] | 7
|
2017-05-25T07:02:28.000Z
|
2020-10-05T07:36:55.000Z
|
__all__ = ['snmp']
from snmp import *
| 12.666667
| 18
| 0.657895
|
c559da5c1a1bf0c44d7d790beb72caa26f25f934
| 3,408
|
py
|
Python
|
server/djangobackend/settings.py
|
roiPG08/agfzb-CloudAppDevelopment_Capstone
|
94f5e8b59f843153ddd3731546e0e820094096bf
|
[
"Apache-2.0"
] | null | null | null |
server/djangobackend/settings.py
|
roiPG08/agfzb-CloudAppDevelopment_Capstone
|
94f5e8b59f843153ddd3731546e0e820094096bf
|
[
"Apache-2.0"
] | null | null | null |
server/djangobackend/settings.py
|
roiPG08/agfzb-CloudAppDevelopment_Capstone
|
94f5e8b59f843153ddd3731546e0e820094096bf
|
[
"Apache-2.0"
] | null | null | null |
"""
Django settings for djangobackend project.
Generated by 'django-admin startproject' using Django 3.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ao5z(o(z@cvzodm99d32jkxa5e8a1!q_4sqss5-a%n6tg$#h$+'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
APPEND_SLASH = True
ALLOWED_HOSTS = ["przemeksreal.eu-gb.mybluemix.net",
"localhost"
]
# Application definition
INSTALLED_APPS = [
'djangoapp.apps.DjangoappConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'djangobackend.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.media',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'djangobackend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(STATIC_ROOT, 'media')
MEDIA_URL = '/media/'
| 26.215385
| 91
| 0.694249
|
00f88c0d7a62f04e2872123537ddddbf9b0dc249
| 5,396
|
py
|
Python
|
hc_128.py
|
gbanegas/cryp
|
0a46d8dcce1068a018f4b1e760040d9060a82be5
|
[
"Apache-2.0"
] | null | null | null |
hc_128.py
|
gbanegas/cryp
|
0a46d8dcce1068a018f4b1e760040d9060a82be5
|
[
"Apache-2.0"
] | null | null | null |
hc_128.py
|
gbanegas/cryp
|
0a46d8dcce1068a018f4b1e760040d9060a82be5
|
[
"Apache-2.0"
] | null | null | null |
class hc128:
KEY_SIZE = 32
def __init__(self):
self.P = []
self.Q = []
self.W = []
self.count = 0
self.current_stream = 0
self.current_in_stream = 0
def set_key(self, key):
"""
" Set the key for this stream cipher. <TT>key</TT> must be an array of
" bytes whose length is equal to <TT>keySize()</TT>. If the stream cipher
" includes both a key and a nonce, <TT>key</TT> contains the bytes of the
" key followed by the bytes of the nonce. The keystream generator is
" initialized, such that successive calls to <TT>encrypt()</TT> will
" encrypt or decrypt a series of bytes.
"
" @param key Key.
"""
"""Initiailization Step 1: Fill W table with key, IV and generated values"""
for i in range(0, 4):
temp = key[4 * i]
for shift in range(1, 4):
temp ^= key[4 * i + shift] << (8 * shift)
# temp = temp << 8 ^ key[4 * i + shift]
self.W.insert(i, temp)
self.W.insert(i + 4, temp)
for i in range(4, 8):
temp = key[4 * i]
for shift in range(1, 4):
temp ^= key[4 * i + shift] << (8 * shift)
# temp = temp << 8 ^ key[4 * i + shift]
self.W.insert(i + 4, temp)
self.W.insert(i + 8, temp)
for i in range(16, 1280):
f2 = ((self.W[i - 2] >> 17) ^ (self.W[i - 2] << (32 - 17))) ^ (
(self.W[i - 2] >> 19) ^ (self.W[i - 2] << (32 - 19))) ^ (self.W[i - 2] >> 10)
f1 = ((self.W[i - 15] >> 7) ^ (self.W[i - 15] << (32 - 7))) ^ (
(self.W[i - 15] >> 18) ^ (self.W[i - 15] << (32 - 18))) ^ (self.W[i - 15] >> 3)
self.W.insert(i, (f2 + f1 + self.W[i - 7] + self.W[i - 16] + i) % (2 ** 32))
"""Initiailization Step 2: Starting at element 256, copy 512 elements to P and other 512 to Q"""
for i in range(0, 512):
self.P.insert(i, self.W[i + 256])
for i in range(0, 512):
self.Q.insert(i, self.W[i + 768])
"""Initiailization Step 3: Run cipher 1024 steps"""
for i in range(0, 512):
g1 = ((((self.P[(i - 3)] >> 10) ^ (self.P[(i - 3)] << (32 - 10))) ^ (
(self.P[(i - 511)] >> 23) ^ (self.P[(i - 511)] << (32 - 23)))) + (
(self.P[(i - 10)] >> 8) ^ (self.P[(i - 10)] << (32 - 8)))) % 2 ** 32
pTemp = [self.P[(i - 12)] >> shift & 0xff for shift in (24, 16, 8, 0)]
h1 = (self.Q[pTemp[3]] + self.Q[(pTemp[1] + 256)]) % 2 ** 32
self.P[i] = ((self.P[i] + g1) % 2 ** 32) ^ h1
for i in range(0, 512):
g2 = ((((self.Q[(i - 3)] << 10) ^ (self.Q[(i - 3)] >> (32 - 10))) ^ (
(self.Q[(i - 511)] << 23) ^ (self.Q[(i - 511)] >> (32 - 23)))) + (
(self.Q[(i - 10)] << 8) ^ (self.Q[(i - 10)] >> (32 - 8)))) % 2 ** 32
qTemp = [self.Q[(i - 12)] >> shift & 0xff for shift in (24, 16, 8, 0)]
h2 = (self.P[qTemp[3]] + self.P[(qTemp[1] + 256)]) % 2 ** 32
self.Q[i] = ((self.Q[i] + g2) % 2 ** 32) ^ h2
def init_keystream(self):
i = self.count % 512
if self.count % 1024 < 512:
g1 = ((((self.P[(i - 3)] >> 10) ^ (self.P[(i - 3)] << (32 - 10))) ^ ((self.P[(i - 511)] >> 23) ^ (self.P[(i - 511)] << (32 - 23)))) + ((self.P[(i - 10)] >> 8) ^ (self.P[(i - 10)] << (32 - 8)))) % (2 ** 32)
self.P[i] = ((self.P[i] + g1) % 2 ** 32)
p_temp = [self.P[(i - 12)] >> shift & 0xff for shift in (24, 16, 8, 0)]
h1 = (self.Q[p_temp[3]] + self.Q[(p_temp[1] + 256)]) % (2 ** 32)
self.current_stream = h1 ^ self.P[i]
self.current_in_stream = 4
else:
g2 = ((((self.Q[(i - 3)] << 10) ^ (self.Q[(i - 3)] >> (32 - 10))) ^ (
(self.Q[(i - 511)] << 23) ^ (self.Q[(i - 511)] >> (32 - 23)))) + (
(self.Q[(i - 10)] << 8) ^ (self.Q[(i - 10)] >> (32 - 8)))) % (2 ** 32)
self.Q[i] = ((self.Q[i] + g2) % 2 ** 32)
qTemp = [self.Q[(i - 12)] >> shift & 0xff for shift in (24, 16, 8, 0)]
h2 = (self.P[qTemp[3]] + self.P[(qTemp[1] + 256)]) % (2 ** 32)
self.current_stream = h2 ^ self.Q[i]
self.current_in_stream = 4
self.count += 1
def crypt(self, b):
"""
" Encrypt or decrypt the given byte. Only the least significant 8 bits of
" <TT>b</TT> are used. If <TT>b</TT> is a plaintext byte, the ciphertext
" byte is returned as a value from 0 to 255. If <TT>b</TT> is a ciphertext
" byte, the plaintext byte is returned as a value from 0 to 255.
"
" @param b Plaintext byte (if encrypting), ciphertext byte (if
" decrypting).
"
" @return Ciphertext byte (if encrypting), plaintext byte (if decrypting).
"""
"""If no keystream currently generated, generate 32-bits"""
if self.current_in_stream == 0:
self.init_keystream()
"""Once there is keystream, encrypt the byte and remove used keystream from queue"""
temp = (self.current_stream >> (8 * (4 - self.current_in_stream))) & 0x000000ff
self.current_in_stream -= 1
return temp ^ b
hc = hc128()
print hc.crypt(b'abc')
| 46.517241
| 217
| 0.453855
|
7f1c4712cacbad0366f8059566d89e214b12ba25
| 4,839
|
py
|
Python
|
src/markdown.py
|
xylophone21/gist-sync
|
0d14a4e8e560401f308a60031d02bbd5d20f78a8
|
[
"Apache-2.0"
] | null | null | null |
src/markdown.py
|
xylophone21/gist-sync
|
0d14a4e8e560401f308a60031d02bbd5d20f78a8
|
[
"Apache-2.0"
] | null | null | null |
src/markdown.py
|
xylophone21/gist-sync
|
0d14a4e8e560401f308a60031d02bbd5d20f78a8
|
[
"Apache-2.0"
] | null | null | null |
import re
import os
import shutil
import urllib.parse
from zlib import compress
import base64
import string
plantuml_alphabet = string.digits + string.ascii_uppercase + string.ascii_lowercase + '-_'
base64_alphabet = string.ascii_uppercase + string.ascii_lowercase + string.digits + '+/'
b64_to_plantuml = bytes.maketrans(base64_alphabet.encode('utf-8'), plantuml_alphabet.encode('utf-8'))
class MarkdownResult:
def __init__(self):
self.title = ""
self.files = []
class MarkdownParser:
def __init__(self,parent,filename,token,user):
self.user = user
self.token = token
self.parent = parent
self.filename = filename
self.url = None
self.gistId = None
def deflate_and_encode(plantuml_text):
"""zlib compress the plantuml text and encode it for the plantuml server.
"""
zlibbed_str = compress(plantuml_text.encode('utf-8'))
compressed_string = zlibbed_str[2:-4]
return base64.b64encode(compressed_string).translate(b64_to_plantuml).decode('utf-8')
def parse(self):
reUrl = re.compile(r'\[gist-sync-url\]:(.*)',re.I)
file_path = os.path.join(self.parent, self.filename)
with open(file_path, 'r') as f:
for line in f:
urlMatch = reUrl.match(line)
if urlMatch:
self.url = urlMatch.group(1)
urlret = urllib.parse.urlparse(self.url)
path = urlret.path
if path[-1] == '/':
path = path[:-1]
self.gistId = path.split('/')[-1]
# self.user = path.split('/')[-2]
break
return self.gistId != None and self.user != None
def syncTo(self, path):
if not self.gistId or not self.user:
return None
reTitle = re.compile(r'\s?#\s+(.*)')
reImg = re.compile(r'.*!\[.*\]\((.*)\)')
reCode = re.compile(r'(\s*)```(\w*)')
retObj = MarkdownResult()
retObj.files.append("index.md")
mdPath = os.path.join(path, "index.md")
inCode = False
preSpace = ""
codeTxt = ""
with open(mdPath, 'w') as mdf:
file_path = os.path.join(self.parent, self.filename)
with open(file_path, 'r') as f:
for line in f:
codeMatch = reCode.match(line)
if codeMatch:
info = codeMatch.group(2)
if not inCode and (info == "puml" or info == "plantuml"):
inCode = True
codeTxt = ""
preSpace = codeMatch.group(1)
line = ""
elif inCode:
inCode = False
pumlCode = MarkdownParser.deflate_and_encode(codeTxt)
codeTxt = ""
line = f'\n{preSpace}\n'
if inCode:
codeTxt += line
continue
titleMatch = reTitle.match(line)
if titleMatch:
retObj.title = titleMatch.group(1)
imgMatch = reImg.match(line)
if imgMatch:
imgStr = imgMatch.group(1)
imgPath = imgStr.split()[0]
newFilename = self._convertImgFileName(imgPath)
print("find img:", imgStr)
if newFilename is not None:
oldFile = os.path.join(self.parent, imgPath)
newFile = os.path.join(path, newFilename)
shutil.copyfile(oldFile, newFile)
retObj.files.append(newFilename)
# The path ref https://gist.github.com/cben/46d9536baacb7c5d196c/
newPath = os.path.join(self.gistId, "raw" , newFilename)
line = line.replace(imgPath, newPath)
mdf.write(line)
for parent,dirnames,filenames in os.walk(path):
if ".git" in parent:
continue
for filename in filenames:
if filename not in retObj.files:
# print("remove file:" + filename)
os.remove(os.path.join(parent, filename))
return retObj
def _convertImgFileName(self, path):
if path.startswith("http"):
return None
newFilename = "z"+path.replace("/", "_").replace("..", "_")
return newFilename
| 37.511628
| 113
| 0.49597
|
f1504436af41e18a6e4df3637aa1eee2ed2129ae
| 8,574
|
py
|
Python
|
qiskit/extensions/unitary.py
|
Elliot-Coupe/qiskit-terra
|
8a604e156ba4c2fa099b1c24cd941f59b9408398
|
[
"Apache-2.0"
] | 1
|
2021-08-04T21:53:02.000Z
|
2021-08-04T21:53:02.000Z
|
qiskit/extensions/unitary.py
|
Elliot-Coupe/qiskit-terra
|
8a604e156ba4c2fa099b1c24cd941f59b9408398
|
[
"Apache-2.0"
] | null | null | null |
qiskit/extensions/unitary.py
|
Elliot-Coupe/qiskit-terra
|
8a604e156ba4c2fa099b1c24cd941f59b9408398
|
[
"Apache-2.0"
] | null | null | null |
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Arbitrary unitary circuit instruction.
"""
from collections import OrderedDict
import numpy
from qiskit.circuit import Gate, ControlledGate
from qiskit.circuit import QuantumCircuit
from qiskit.circuit import QuantumRegister, Qubit
from qiskit.circuit.exceptions import CircuitError
from qiskit.circuit._utils import _compute_control_matrix
from qiskit.circuit.library.standard_gates import U3Gate
from qiskit.extensions.quantum_initializer import isometry
from qiskit.quantum_info.operators.predicates import matrix_equal
from qiskit.quantum_info.operators.predicates import is_unitary_matrix
from qiskit.quantum_info.synthesis.one_qubit_decompose import OneQubitEulerDecomposer
from qiskit.quantum_info.synthesis.two_qubit_decompose import two_qubit_cnot_decompose
from qiskit.extensions.exceptions import ExtensionError
_DECOMPOSER1Q = OneQubitEulerDecomposer("U3")
class UnitaryGate(Gate):
"""Class for representing unitary gates"""
def __init__(self, data, label=None):
"""Create a gate from a numeric unitary matrix.
Args:
data (matrix or Operator): unitary operator.
label (str): unitary name for backend [Default: None].
Raises:
ExtensionError: if input data is not an N-qubit unitary operator.
"""
if hasattr(data, "to_matrix"):
# If input is Gate subclass or some other class object that has
# a to_matrix method this will call that method.
data = data.to_matrix()
elif hasattr(data, "to_operator"):
# If input is a BaseOperator subclass this attempts to convert
# the object to an Operator so that we can extract the underlying
# numpy matrix from `Operator.data`.
data = data.to_operator().data
# Convert to numpy array in case not already an array
data = numpy.array(data, dtype=complex)
# Check input is unitary
if not is_unitary_matrix(data):
raise ExtensionError("Input matrix is not unitary.")
# Check input is N-qubit matrix
input_dim, output_dim = data.shape
num_qubits = int(numpy.log2(input_dim))
if input_dim != output_dim or 2 ** num_qubits != input_dim:
raise ExtensionError("Input matrix is not an N-qubit operator.")
self._qasm_name = None
self._qasm_definition = None
# Store instruction params
super().__init__("unitary", num_qubits, [data], label=label)
def __eq__(self, other):
if not isinstance(other, UnitaryGate):
return False
if self.label != other.label:
return False
# Should we match unitaries as equal if they are equal
# up to global phase?
return matrix_equal(self.params[0], other.params[0], ignore_phase=True)
def __array__(self, dtype=None):
"""Return matrix for the unitary."""
# pylint: disable=unused-argument
return self.params[0]
def inverse(self):
"""Return the adjoint of the unitary."""
return self.adjoint()
def conjugate(self):
"""Return the conjugate of the unitary."""
return UnitaryGate(numpy.conj(self.to_matrix()))
def adjoint(self):
"""Return the adjoint of the unitary."""
return self.transpose().conjugate()
def transpose(self):
"""Return the transpose of the unitary."""
return UnitaryGate(numpy.transpose(self.to_matrix()))
def _define(self):
"""Calculate a subcircuit that implements this unitary."""
if self.num_qubits == 1:
q = QuantumRegister(1, "q")
qc = QuantumCircuit(q, name=self.name)
theta, phi, lam, global_phase = _DECOMPOSER1Q.angles_and_phase(self.to_matrix())
qc._append(U3Gate(theta, phi, lam), [q[0]], [])
qc.global_phase = global_phase
self.definition = qc
elif self.num_qubits == 2:
self.definition = two_qubit_cnot_decompose(self.to_matrix())
else:
q = QuantumRegister(self.num_qubits, "q")
qc = QuantumCircuit(q, name=self.name)
qc.append(isometry.Isometry(self.to_matrix(), 0, 0), qargs=q[:])
self.definition = qc
def control(self, num_ctrl_qubits=1, label=None, ctrl_state=None):
"""Return controlled version of gate
Args:
num_ctrl_qubits (int): number of controls to add to gate (default=1)
label (str): optional gate label
ctrl_state (int or str or None): The control state in decimal or as a
bit string (e.g. '1011'). If None, use 2**num_ctrl_qubits-1.
Returns:
UnitaryGate: controlled version of gate.
Raises:
QiskitError: Invalid ctrl_state.
ExtensionError: Non-unitary controlled unitary.
"""
mat = self.to_matrix()
cmat = _compute_control_matrix(mat, num_ctrl_qubits, ctrl_state=None)
iso = isometry.Isometry(cmat, 0, 0)
cunitary = ControlledGate(
"c-unitary",
num_qubits=self.num_qubits + num_ctrl_qubits,
params=[mat],
label=label,
num_ctrl_qubits=num_ctrl_qubits,
definition=iso.definition,
ctrl_state=ctrl_state,
base_gate=self.copy(),
)
from qiskit.quantum_info import Operator
# hack to correct global phase; should fix to prevent need for correction here
pmat = Operator(iso.inverse()).data @ cmat
diag = numpy.diag(pmat)
if not numpy.allclose(diag, diag[0]):
raise ExtensionError("controlled unitary generation failed")
phase = numpy.angle(diag[0])
if phase:
# need to apply to _definition since open controls creates temporary definition
cunitary._definition.global_phase = phase
return cunitary
def qasm(self):
"""The qasm for a custom unitary gate
This is achieved by adding a custom gate that corresponds to the definition
of this gate. It gives the gate a random name if one hasn't been given to it.
"""
# give this unitary a name
self._qasm_name = self.label if self.label else "unitary" + str(id(self))
# map from gates in the definition to params in the method
reg_to_qasm = OrderedDict()
current_reg = 0
gates_def = ""
for gate in self.definition.data:
# add regs from this gate to the overall set of params
for reg in gate[1] + gate[2]:
if reg not in reg_to_qasm:
reg_to_qasm[reg] = "p" + str(current_reg)
current_reg += 1
curr_gate = "\t{} {};\n".format(
gate[0].qasm(),
",".join([reg_to_qasm[j] for j in gate[1] + gate[2]]),
)
gates_def += curr_gate
# name of gate + params + {definition}
overall = (
"gate "
+ self._qasm_name
+ " "
+ ",".join(reg_to_qasm.values())
+ " {\n"
+ gates_def
+ "}"
)
self._qasm_definition = overall
return self._qasmif(self._qasm_name)
def validate_parameter(self, parameter):
"""Unitary gate parameter has to be an ndarray."""
if isinstance(parameter, numpy.ndarray):
return parameter
else:
raise CircuitError(
"invalid param type {} in gate " "{}".format(type(parameter), self.name)
)
def unitary(self, obj, qubits, label=None):
"""Apply unitary gate to q."""
gate = UnitaryGate(obj, label=label)
if isinstance(qubits, QuantumRegister):
qubits = qubits[:]
# for single qubit unitary gate, allow an 'int' or a 'list of ints' as qubits.
if gate.num_qubits == 1:
if isinstance(qubits, (int, Qubit)) or len(qubits) > 1:
qubits = [qubits]
return self.append(gate, qubits, [])
QuantumCircuit.unitary = unitary
| 37.441048
| 92
| 0.629111
|
7f9ec2a349c4dfc509a49044b2117867f9d099c0
| 841
|
py
|
Python
|
opennem/db/migrations/versions/109f0ddd92ad_indexes_on_facility_scada.py
|
paulculmsee/opennem
|
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
|
[
"MIT"
] | null | null | null |
opennem/db/migrations/versions/109f0ddd92ad_indexes_on_facility_scada.py
|
paulculmsee/opennem
|
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
|
[
"MIT"
] | null | null | null |
opennem/db/migrations/versions/109f0ddd92ad_indexes_on_facility_scada.py
|
paulculmsee/opennem
|
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
|
[
"MIT"
] | null | null | null |
# pylint: disable=no-member
"""
Indexes on facility_scada
Revision ID: 109f0ddd92ad
Revises: df1a9890f5c8
Create Date: 2021-11-24 09:35:08.922469
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "109f0ddd92ad"
down_revision = "df1a9890f5c8"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.execute(
"""
create index if not exists idx_facility_scada_day_network_fueltech_aest
on facility_scada (date_trunc('day', trading_interval at time zone 'AEST'), network_id, facility_code)
"""
)
op.execute(
"""
create index if not exists idx_facility_scada_day_network_fueltech_awst
on facility_scada (date_trunc('day', trading_interval at time zone 'AWST'), network_id, facility_code)
"""
)
def downgrade() -> None:
pass
| 22.72973
| 110
| 0.701546
|
859ff0b9d3d4c38b74086c3a801a25642d1bf4d0
| 11,180
|
py
|
Python
|
python_modules/dagster-graphql/dagster_graphql_tests/test_cli.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 4,606
|
2018-06-21T17:45:20.000Z
|
2022-03-31T23:39:42.000Z
|
python_modules/dagster-graphql/dagster_graphql_tests/test_cli.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 6,221
|
2018-06-12T04:36:01.000Z
|
2022-03-31T21:43:05.000Z
|
python_modules/dagster-graphql/dagster_graphql_tests/test_cli.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 619
|
2018-08-22T22:43:09.000Z
|
2022-03-31T22:48:06.000Z
|
import json
import os
import tempfile
import time
from contextlib import contextmanager
from click.testing import CliRunner
from dagster import seven
from dagster.core.storage.pipeline_run import PipelineRunStatus
from dagster.core.test_utils import instance_for_test
from dagster.utils import file_relative_path
from dagster_graphql.cli import ui
@contextmanager
def dagster_cli_runner():
with tempfile.TemporaryDirectory() as dagster_home_temp:
with instance_for_test(
temp_dir=dagster_home_temp,
overrides={
"run_launcher": {
"module": "dagster.core.launcher.sync_in_memory_run_launcher",
"class": "SyncInMemoryRunLauncher",
}
},
):
yield CliRunner(env={"DAGSTER_HOME": dagster_home_temp})
def test_basic_introspection():
query = "{ __schema { types { name } } }"
workspace_path = file_relative_path(__file__, "./cli_test_workspace.yaml")
with dagster_cli_runner() as runner:
result = runner.invoke(ui, ["-w", workspace_path, "-t", query])
assert result.exit_code == 0
result_data = json.loads(result.output)
assert result_data["data"]
def test_basic_repositories():
query = "{ repositoriesOrError { ... on RepositoryConnection { nodes { name } } } }"
workspace_path = file_relative_path(__file__, "./cli_test_workspace.yaml")
with dagster_cli_runner() as runner:
result = runner.invoke(ui, ["-w", workspace_path, "-t", query])
assert result.exit_code == 0
result_data = json.loads(result.output)
assert result_data["data"]["repositoriesOrError"]["nodes"]
def test_basic_repository_locations():
query = "{ workspaceOrError { ... on Workspace { locationEntries { __typename, name, locationOrLoadError { __typename, ... on RepositoryLocation { __typename, name } ... on PythonError { message } } } } } }"
workspace_path = file_relative_path(__file__, "./cli_test_error_workspace.yaml")
with dagster_cli_runner() as runner:
result = runner.invoke(ui, ["-w", workspace_path, "-t", query])
assert result.exit_code == 0, str(result.exception)
result_data = json.loads(result.output)
nodes = result_data["data"]["workspaceOrError"]["locationEntries"]
assert len(nodes) == 2, str(nodes)
assert nodes[0]["locationOrLoadError"]["__typename"] == "RepositoryLocation"
assert nodes[0]["name"] == "test_cli_location"
assert nodes[1]["locationOrLoadError"]["__typename"] == "PythonError"
assert nodes[1]["name"] == "test_cli_location_error"
assert "No module named" in nodes[1]["locationOrLoadError"]["message"]
def test_basic_variables():
query = """
query FooBar($pipelineName: String! $repositoryName: String! $repositoryLocationName: String!){
pipelineOrError(params:{pipelineName: $pipelineName repositoryName: $repositoryName repositoryLocationName: $repositoryLocationName})
{ ... on Pipeline { name } }
}
"""
variables = '{"pipelineName": "math", "repositoryName": "test", "repositoryLocationName": "test_cli_location"}'
workspace_path = file_relative_path(__file__, "./cli_test_workspace.yaml")
with dagster_cli_runner() as runner:
result = runner.invoke(ui, ["-w", workspace_path, "-v", variables, "-t", query])
assert result.exit_code == 0
result_data = json.loads(result.output)
assert result_data["data"]["pipelineOrError"]["name"] == "math"
LAUNCH_PIPELINE_EXECUTION_QUERY = """
mutation ($executionParams: ExecutionParams!) {
launchPipelineExecution(executionParams: $executionParams) {
__typename
... on LaunchPipelineRunSuccess {
run {
runId
pipeline { ...on PipelineReference { name } }
}
}
... on PipelineConfigValidationInvalid {
pipelineName
errors { message }
}
... on PipelineNotFoundError {
pipelineName
}
... on PythonError {
message
stack
}
}
}
"""
def test_start_execution_text():
variables = seven.json.dumps(
{
"executionParams": {
"selector": {
"repositoryLocationName": "test_cli_location",
"repositoryName": "test",
"pipelineName": "math",
},
"runConfigData": {"solids": {"add_one": {"inputs": {"num": {"value": 123}}}}},
"mode": "default",
}
}
)
workspace_path = file_relative_path(__file__, "./cli_test_workspace.yaml")
with dagster_cli_runner() as runner:
result = runner.invoke(
ui, ["-w", workspace_path, "-v", variables, "-t", LAUNCH_PIPELINE_EXECUTION_QUERY]
)
assert result.exit_code == 0
try:
result_data = json.loads(result.output.strip("\n").split("\n")[-1])
assert (
result_data["data"]["launchPipelineExecution"]["__typename"]
== "LaunchPipelineRunSuccess"
)
except Exception as e:
raise Exception("Failed with {} Exception: {}".format(result.output, e))
def test_start_execution_file():
variables = seven.json.dumps(
{
"executionParams": {
"selector": {
"pipelineName": "math",
"repositoryLocationName": "test_cli_location",
"repositoryName": "test",
},
"runConfigData": {"solids": {"add_one": {"inputs": {"num": {"value": 123}}}}},
"mode": "default",
}
}
)
workspace_path = file_relative_path(__file__, "./cli_test_workspace.yaml")
with dagster_cli_runner() as runner:
result = runner.invoke(
ui,
[
"-w",
workspace_path,
"-v",
variables,
"--file",
file_relative_path(__file__, "./execute.graphql"),
],
)
assert result.exit_code == 0
result_data = json.loads(result.output.strip("\n").split("\n")[-1])
assert (
result_data["data"]["launchPipelineExecution"]["__typename"]
== "LaunchPipelineRunSuccess"
)
def test_start_execution_save_output():
"""
Test that the --output flag saves the GraphQL response to the specified file
"""
variables = seven.json.dumps(
{
"executionParams": {
"selector": {
"repositoryLocationName": "test_cli_location",
"repositoryName": "test",
"pipelineName": "math",
},
"runConfigData": {"solids": {"add_one": {"inputs": {"num": {"value": 123}}}}},
"mode": "default",
}
}
)
workspace_path = file_relative_path(__file__, "./cli_test_workspace.yaml")
with dagster_cli_runner() as runner:
with tempfile.TemporaryDirectory() as temp_dir:
file_name = os.path.join(temp_dir, "output_file")
result = runner.invoke(
ui,
[
"-w",
workspace_path,
"-v",
variables,
"--file",
file_relative_path(__file__, "./execute.graphql"),
"--output",
file_name,
],
)
assert result.exit_code == 0
assert os.path.isfile(file_name)
with open(file_name, "r") as f:
lines = f.readlines()
result_data = json.loads(lines[-1])
assert (
result_data["data"]["launchPipelineExecution"]["__typename"]
== "LaunchPipelineRunSuccess"
)
def test_start_execution_predefined():
variables = seven.json.dumps(
{
"executionParams": {
"selector": {
"repositoryLocationName": "test_cli_location",
"repositoryName": "test",
"pipelineName": "math",
},
"runConfigData": {"solids": {"add_one": {"inputs": {"num": {"value": 123}}}}},
"mode": "default",
}
}
)
workspace_path = file_relative_path(__file__, "./cli_test_workspace.yaml")
with dagster_cli_runner() as runner:
result = runner.invoke(
ui, ["-w", workspace_path, "-v", variables, "-p", "launchPipelineExecution"]
)
assert result.exit_code == 0
result_data = json.loads(result.output.strip("\n").split("\n")[-1])
if not result_data.get("data"):
raise Exception(result_data)
assert (
result_data["data"]["launchPipelineExecution"]["__typename"]
== "LaunchPipelineRunSuccess"
)
def test_logs_in_start_execution_predefined():
variables = seven.json.dumps(
{
"executionParams": {
"selector": {
"repositoryLocationName": "test_cli_location",
"repositoryName": "test",
"pipelineName": "math",
},
"runConfigData": {"solids": {"add_one": {"inputs": {"num": {"value": 123}}}}},
"mode": "default",
}
}
)
workspace_path = file_relative_path(__file__, "./cli_test_workspace.yaml")
with tempfile.TemporaryDirectory() as temp_dir:
with instance_for_test(
temp_dir=temp_dir,
overrides={
"run_launcher": {
"module": "dagster.core.launcher.sync_in_memory_run_launcher",
"class": "SyncInMemoryRunLauncher",
}
},
) as instance:
runner = CliRunner(env={"DAGSTER_HOME": temp_dir})
result = runner.invoke(
ui, ["-w", workspace_path, "-v", variables, "-p", "launchPipelineExecution"]
)
assert result.exit_code == 0
result_data = json.loads(result.output.strip("\n").split("\n")[-1])
assert (
result_data["data"]["launchPipelineExecution"]["__typename"]
== "LaunchPipelineRunSuccess"
)
run_id = result_data["data"]["launchPipelineExecution"]["run"]["runId"]
# allow FS events to flush
retries = 5
while retries != 0 and not _is_done(instance, run_id):
time.sleep(0.333)
retries -= 1
# assert that the watching run storage captured the run correctly from the other process
run = instance.get_run_by_id(run_id)
assert run.status == PipelineRunStatus.SUCCESS
def _is_done(instance, run_id):
return instance.has_run(run_id) and instance.get_run_by_id(run_id).is_finished
| 33.878788
| 211
| 0.556977
|
599d5b319d3b4b742fbd4bc67264033524d1cc88
| 727
|
py
|
Python
|
app/custom_widgets.py
|
Pavel-Maksimov/shift_logs_flask
|
69e8772e216cedad20ac6a8867252f55bf754baf
|
[
"MIT"
] | null | null | null |
app/custom_widgets.py
|
Pavel-Maksimov/shift_logs_flask
|
69e8772e216cedad20ac6a8867252f55bf754baf
|
[
"MIT"
] | null | null | null |
app/custom_widgets.py
|
Pavel-Maksimov/shift_logs_flask
|
69e8772e216cedad20ac6a8867252f55bf754baf
|
[
"MIT"
] | null | null | null |
from wtforms.widgets import html_params
def select_multi_checkbox(field, ul_class='dropdown-menu', **kwargs):
kwargs.setdefault('type', 'checkbox')
field_id = kwargs.pop('id', field.id)
html = ['<ul %s aria-labelledby="dropdownMenu1">' % html_params(id=field_id, class_=ul_class)]
for value, label, checked in field.iter_choices():
choice_id = '%s-%s' % (field_id, value)
options = dict(kwargs, name=field.name, value=value, id=choice_id)
if checked:
options['checked'] = 'checked'
html.append('<li><input %s /> ' % html_params(**options))
html.append('<label for="%s">%s</label></li>' % (field_id, label))
html.append('</ul>')
return ''.join(html)
| 42.764706
| 98
| 0.631362
|
d086098aa964155b72ee9d2f0f0922a9492d195f
| 1,818
|
py
|
Python
|
sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_resource_spec.py
|
Strasser-Pablo/pipelines
|
a1d513eb412f3ffd44edf82af2fa7edb05c3b952
|
[
"Apache-2.0"
] | 1
|
2021-08-23T19:09:56.000Z
|
2021-08-23T19:09:56.000Z
|
sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_resource_spec.py
|
Strasser-Pablo/pipelines
|
a1d513eb412f3ffd44edf82af2fa7edb05c3b952
|
[
"Apache-2.0"
] | 2
|
2021-06-01T10:02:51.000Z
|
2021-06-07T07:19:14.000Z
|
sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_resource_spec.py
|
Strasser-Pablo/pipelines
|
a1d513eb412f3ffd44edf82af2fa7edb05c3b952
|
[
"Apache-2.0"
] | 3
|
2022-01-10T13:40:24.000Z
|
2022-03-21T08:46:14.000Z
|
# Copyright 2020 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kfp import components
from kfp.v2 import dsl
import kfp.v2.compiler as compiler
import pathlib
test_data_dir = pathlib.Path(__file__).parent / 'component_yaml'
ingestion_op = components.load_component_from_file(
str(test_data_dir / 'ingestion_component.yaml'))
training_op = components.load_component_from_file(
str(test_data_dir / 'fancy_trainer_component.yaml'))
@dsl.pipeline(
name='two-step-pipeline-with-resource-spec',
pipeline_root='dummy_root',
description='A linear two-step pipeline with resource specification.')
def my_pipeline(input_location: str = 'gs://test-bucket/pipeline_root',
optimizer: str = 'sgd',
n_epochs: int = 200):
ingestor = ingestion_op(input_location=input_location)
_ = (
training_op(
examples=ingestor.outputs['examples'],
optimizer=optimizer,
n_epochs=n_epochs).set_cpu_limit('4').set_memory_limit(
'14Gi').add_node_selector_constraint(
'cloud.google.com/gke-accelerator',
'tpu-v3').set_gpu_limit(1))
if __name__ == '__main__':
compiler.Compiler().compile(
pipeline_func=my_pipeline,
package_path=__file__.replace('.py', '.json'))
| 35.647059
| 74
| 0.714521
|
803bf550bfc8fbb9d1a0fe1eb797cabfe277219c
| 3,850
|
py
|
Python
|
scpp_base/scpp_base/src/utils/crypto_utils.py
|
scorelab/social-currency
|
f539893104bdfe098cfa58c8d9fabcbb00874c52
|
[
"Apache-2.0"
] | 4
|
2018-10-02T06:31:18.000Z
|
2019-11-16T15:21:34.000Z
|
scpp_base/scpp_base/src/utils/crypto_utils.py
|
horizon00/social-currency
|
f539893104bdfe098cfa58c8d9fabcbb00874c52
|
[
"Apache-2.0"
] | 2
|
2017-12-06T11:54:14.000Z
|
2019-11-11T11:34:06.000Z
|
scpp_base/scpp_base/src/utils/crypto_utils.py
|
horizon00/social-currency
|
f539893104bdfe098cfa58c8d9fabcbb00874c52
|
[
"Apache-2.0"
] | 5
|
2017-02-27T10:10:41.000Z
|
2019-11-11T11:45:37.000Z
|
import os.path
import logging
from Crypto.Hash import SHA256
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
from base64 import b64encode
from config.config import clientname
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
filehandler = logging.FileHandler('logs/stock_exchange.logs')
filehandler.setLevel(logging.INFO)
# create a logging format
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
filehandler.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(filehandler)
def init_keys():
"""
Initilize keys from here, device name exists in the 'name' file. Verify
weather name file exists. If not exits we have to generate keys(public key
and private key). We are storing keys in .keys/ directory in project root
We are doing
1. Create .keys directoy and name file
2. Generate rsa keys
3. Save ras keys in .keys directory
"""
def init_dirs(senzy_name):
"""
Create '.keys' directory and 'name' file if not exits. We have to write
senzy name in name file.
Args:
senzy_name - name of the senz client(username)
"""
if not os.path.exists('.keys/name'):
# first we have to create .keys/ directory if not exists
try:
os.makedirs('.keys')
except OSError:
logger.info('keys exists')
# create name file from here
senzy_name_file = open('.keys/name', 'w')
senzy_name_file.write(senzy_name)
senzy_name_file.close()
# test
# generate keys
key_pair = RSA.generate(1024, e=65537)
public_key = key_pair.publickey().exportKey("PEM")
private_key = key_pair.exportKey("PEM")
# save keys in pem file
save_key('publicKey.pem', public_key)
save_key('privateKey.pem', private_key)
# test over
else:
logger.info('keys exists')
def save_key(file_name, key):
"""
Save key in .pem file. We are saving both public key and private key
from here. Folloing are the name and location of the keys
1. public key - .keys/publicKey.pem
2. private key - .keys/privateKey.pem
"""
key_file = open('.keys/' + file_name, 'w')
key_file.write(key)
key_file.close()
# TODO read senzy name from config file
senzy_name = clientname
init_dirs(senzy_name)
def get_pubkey():
"""
Reads a public key from the file. Public key stored in .keys/publicKey.pem
file in project roor directory
Returns:
pubkey - Base64 encoded public key
"""
pubkey = open('.keys/publicKey.pem', "r").read()
return b64encode(pubkey)
def sign_senz(senz):
"""
Digitally sing the senz message. We have to append the digital signatutre
of the message to senz paylod before sending the senz. Senz message would
be comes like below
#SHARE
#msg #time <time>
@receiver
^sender
We have to caculate digital signature of the message and append the
signature to the end of the senz. Finalized senz message would be looks
like below
#SHARE
#msg #time <time>
@receiver
^sender <digital signature>
Args:
senz: Senz message
Returns:
digitally signed senz message
"""
# load private key
key = open('.keys/privateKey.pem', "r").read()
rsakey = RSA.importKey(key)
# sign senz
signer = PKCS1_v1_5.new(rsakey)
digest = SHA256.new("".join(senz.split()))
signature = signer.sign(digest)
signed_senz = "%s %s" % (senz, b64encode(signature))
return signed_senz
| 29.389313
| 85
| 0.624935
|
94ab1530813abe09a4cf310d3b3fbba99a04d2e1
| 32,281
|
py
|
Python
|
da_unidirectional_training.py
|
mlnotebook/domain_adapation_istn
|
83ff3c5265d433cc5076392ddc9ae62b0193eaa3
|
[
"MIT"
] | 9
|
2020-07-01T21:56:52.000Z
|
2022-03-01T05:21:36.000Z
|
da_unidirectional_training.py
|
mlnotebook/domain_adapation_istn
|
83ff3c5265d433cc5076392ddc9ae62b0193eaa3
|
[
"MIT"
] | 1
|
2020-10-03T08:26:10.000Z
|
2020-10-03T08:59:35.000Z
|
da_unidirectional_training.py
|
mlnotebook/domain_adapation_istn
|
83ff3c5265d433cc5076392ddc9ae62b0193eaa3
|
[
"MIT"
] | 1
|
2020-12-28T21:21:25.000Z
|
2020-12-28T21:21:25.000Z
|
import os
import json
import argparse
import torch
import logging
import time
import numpy as np
import shutil
from tqdm import tqdm
from itertools import cycle
from models.task_models import LeNet3D_regressor, LeNet3D_classifier, Discriminator
from models.itn import ITN3D
from models.stn import BSplineSTN3D, AffineSTN3D
from datasets import NiftyDatasetFromTSV
from utils.image_utils import save_tensor_sample, norm_0_255, nii3d_to_pil2d
from utils.model_utils import initialize_weights, set_requires_grad, get_regressor_output, get_classifier_output
from utils.plotting_utils import plot_grad_flow, plot_metric
from utils.training_utils import printhead, EarlyStop
def train(args, config, remote=False):
printhead('Starting Training.')
# Setup devices
if args.seed: torch.manual_seed(args.seed)
use_cuda = torch.cuda.is_available()
if not use_cuda:
print('GPU is not available.')
return
device = torch.device("cuda:" + args.dev if use_cuda else "cpu")
logging.info('Using device: {} {}'.format(torch.cuda.get_device_name(device), device))
# Load the dataset
printhead('Loading...')
logging.info('Loading dataset: {}.'.format(args.train_sets[0]))
dataset_train_1 = NiftyDatasetFromTSV(args.train_sets[0], normalizer=None, aug=args.augmentation) # Normalization applied later
dataloader_train_1 = torch.utils.data.DataLoader(dataset_train_1,
shuffle=True,
drop_last=True,
batch_size=args.batch_size,
num_workers=args.num_dataset_workers)
logging.info('Loaded {} images.'.format(len(dataset_train_1)))
logging.info('Loading dataset: {}.'.format(args.train_sets[1]))
dataset_train_2 = NiftyDatasetFromTSV(args.train_sets[1], normalizer=None, aug=args.augmentation) # Normalization applied later
dataloader_train_2 = torch.utils.data.DataLoader(dataset_train_2,
shuffle=True,
drop_last=True,
batch_size=args.batch_size,
num_workers=args.num_dataset_workers)
logging.info('Loaded {} images.'.format(len(dataset_train_2)))
logging.info('Loading dataset: {}.'.format(args.test_sets[0]))
dataset_val_1 = NiftyDatasetFromTSV(args.test_sets[0], normalizer=None) # Normalization applied later
dataloader_val_1 = torch.utils.data.DataLoader(dataset_val_1,
shuffle=False,
drop_last=True,
batch_size=args.batch_size,
num_workers=0)
logging.info('Loaded {} images.'.format(len(dataset_val_1)))
logging.info('Loading dataset: {}.'.format(args.test_sets[1]))
dataset_val_2 = NiftyDatasetFromTSV(args.test_sets[1], normalizer=None) # Normalization applied later
dataloader_val_2 = torch.utils.data.DataLoader(dataset_val_2,
drop_last=True,
shuffle=False,
batch_size=args.batch_size,
num_workers=0)
logging.info('Loaded {} images.'.format(len(dataset_val_2)))
printhead('Initializing Models')
##### Load the TASK model #####
if args.model_type == 'regressor':
task = LeNet3D_regressor(input_size=args.input_shape[0]).to(device)
get_task_output = get_regressor_output
if args.model_type == 'classifier':
task = LeNet3D_classifier(num_classes=args.num_classes).to(device)
get_task_output = get_classifier_output
if args.finetune:
logging.info('Loading {} model: {}'.format(args.model_type, args.task_model[0]))
logging.info(task.load_state_dict(torch.load(args.task_model[0], map_location=device)))
logging.info('Model loaded.')
task.train()
task_parameters = list(task.parameters())
set_requires_grad(task, True)
logging.info('Task Model')
##### Load the DISCRIMINATOR model #####
discriminator = Discriminator().to(device)
discriminator.apply(initialize_weights)
discriminator.train()
discriminator_parameters = list(discriminator.parameters())
set_requires_grad(discriminator, True)
logging.info('Discriminator')
##### Load the ITN #####
istn_A2B_parameters = []
itn_A2B = ITN3D(nf=args.nf).to(device)
istn_A2B_parameters += list(itn_A2B.parameters())
itn_A2B.train()
itn_A2B.apply(initialize_weights)
set_requires_grad(itn_A2B, True)
logging.info('ITN')
##### Load the STN #####
stn_A2B = None
if args.stn:
assert (args.stn in ['affine', 'bspline']), "STN should be one of `bspline` or `spline`."
if args.stn == 'bspline':
stn_A2B = BSplineSTN3D(input_size=args.input_shape[0],
device=device,
control_point_spacing=(args.cp_spacing[0],
args.cp_spacing[1],
args.cp_spacing[2]),
max_displacement=args.max_displacement,
nf=args.nf).to(device)
if args.stn == 'affine':
stn_A2B = AffineSTN3D(input_size=args.input_shape[0],
device=device,
input_channels=1,
nf=args.nf).to(device)
istn_A2B_parameters += list(stn_A2B.parameters())
stn_A2B.train()
# Weights are not initialized. Auto initialized in STN to perform identity transform.
set_requires_grad(stn_A2B, True)
logging.info('STN ({})'.format(args.stn))
##### Create OPTIMIZERS #####
optimizer_discriminator = torch.optim.Adam(filter(lambda p: p.requires_grad, discriminator_parameters), lr=args.learning_rate, betas=(0.5, 0.999))
optimizer_istn = torch.optim.Adam(filter(lambda p: p.requires_grad, istn_A2B_parameters), lr=args.learning_rate, betas=(0.5, 0.999))
optimizer_task = torch.optim.Adam(filter(lambda p: p.requires_grad, task_parameters), lr=0.0001)
##### Initialize TRAINING Variables #####
loss_train_istn_log = []
loss_train_discriminator_log = []
loss_train_task_log = []
err_train_task_A_log = []
err_train_task_B_log = []
err_train_task_A2B_log = []
early_stopping_counter = 0
epoch_times = []
total_istn_A2B_loss = 0.0
total_discriminator_loss = 0.0
total_task_loss = 0.0
##### Initialize VALIDATION Variables #####
loss_val_istn_log = []
loss_val_discriminator_log = []
loss_val_task_log = []
err_val_task_A_log = []
err_val_task_B_log = []
err_val_task_A2B_log = []
val_step = 0
best_val_error = [0, -1]
error_val_A_base, error_val_B_base = 0.0, 0.0
num_images_val_A_base, num_images_val_B_base = 0, 0
total_istn_val_loss, total_discriminator_val_loss, total_task_val_loss = 0.0, 0.0, 0.0
val_epochs = []
##### Create Soft Label Generators
low = torch.distributions.Uniform(0.00, 0.03)
high = torch.distributions.Uniform(0.97, 1.00)
printhead('TRAINING LOOP')
for epoch in range(0, args.epochs):
epoch_start = time.time()
error_train_A2B, error_train_A, error_train_B, num_images_A, num_images_B, num_images_A2B = 0.0, 0.0, 0.0, 0.0, 0.0, 0.0
# Set the ISTN and task model to training mode
itn_A2B.train()
set_requires_grad(itn_A2B, True)
if args.stn:
stn_A2B.train()
set_requires_grad(stn_A2B, True)
task.train()
set_requires_grad(task, True)
try: #This try/except catched the KeyboardInterrupt raised by the user and performs clean-up
for batch_idx, batch_samples in enumerate(zip(tqdm(dataloader_train_1, desc='Epoch {:03d}'.format(epoch), leave=False), cycle(dataloader_train_2))):
A, label_A = batch_samples[0]['image'].to(device), batch_samples[0][args.label_key].to(device)
B, label_B = batch_samples[1]['image'].to(device), batch_samples[1][args.label_key].to(device)
if len(B) != len(A):
# If the two datasets are not the same length, correct the size.
B, label_B = B[:len(A)], label_B[:len(A)]
A = args.normalizer(A)
B = args.normalizer(B)
# Create clones of the original data for use later
B_orig = B.clone()
A_orig = A.clone()
# GA2B(A) - domain shift
A2B = itn_A2B(A)
A2B = stn_A2B(A2B.to(device)) if args.stn else A2B
# GA2B(B) - identity
B2B = itn_A2B(B)
B2B = stn_A2B(B2B.to(device)) if args.stn else B2B
# Create clones of the original data for use later
A2B_orig = A2B.clone()
##### CALCULATE GENERATOR LOSSES #####
# GAN loss
optimizer_istn.zero_grad()
set_requires_grad(discriminator, False) #discriminator is not being updated here.
output_Dis_A2B = discriminator(A2B, return_logits=0)
output_Dis_A2B = output_Dis_A2B.view(A2B.size(0), -1)
Dis_A2B_High = high.sample(output_Dis_A2B.shape).to(device)
discriminator_A2B_gan_loss = args.gan_loss(output_Dis_A2B, Dis_A2B_High)
# Identity loss
idt_B2B_loss = 0.5 * args.cyc_weight * args.idt_loss(B2B, B) if args.idt_loss is not None else 0
# Total Loss
istn_A2B_loss = discriminator_A2B_gan_loss + idt_B2B_loss
# Perform OPTIMIZER UPDATE
istn_A2B_loss.backward()
optimizer_istn.step()
##### CALCULATE DISCRIMINATOR LOSSES #####
optimizer_discriminator.zero_grad()
set_requires_grad(discriminator, True)
output_Dis_B = discriminator(B.detach(), return_logits=0)
output_Dis_B = output_Dis_B.view(B.size(0), -1)
Dis_B_High = high.sample(output_Dis_B.shape).to(device)
discriminator_B_loss = args.dis_loss(output_Dis_B, Dis_B_High)
output_Dis_A2B = discriminator(A2B.detach(), return_logits=0)
output_Dis_A2B = output_Dis_A2B.view(A2B.size(0), -1)
Dis_A2B_Low = low.sample(output_Dis_A2B.shape).to(device)
discriminator_A2B_loss = args.dis_loss(output_Dis_A2B, Dis_A2B_Low)
#Perform DISCRIMINATOR UPDATE
discriminator_loss = 0.5 * (discriminator_B_loss + discriminator_A2B_loss)
discriminator_loss.backward()
optimizer_discriminator.step()
##### Get TASK MODEL Outputs #####
optimizer_task.zero_grad()
output_task_A_batch, error_A_batch, _, _ = get_task_output(task, A_orig, label_A)
output_task_B_batch, error_B_batch, _, _ = get_task_output(task, B_orig, label_B)
output_task_A2B_batch, error_A2B_batch, _, _ = get_task_output(task, A2B_orig.detach(), label_A)
# Only update the task model based on the transformed output
task_loss = args.task_loss(output_task_A2B_batch, label_A.float())
task_loss.backward()
optimizer_task.step()
# Populate logs
error_train_A += error_A_batch
error_train_B += error_B_batch
error_train_A2B += error_A2B_batch
total_istn_A2B_loss += istn_A2B_loss.item()
total_discriminator_loss += discriminator_loss.item()
total_task_loss += task_loss.item()
num_images_A += A_orig.size(0)
num_images_B += B_orig.size(0)
num_images_A2B += A2B_orig.size(0)
### LOG METRICS
total_istn_A2B_loss = total_istn_A2B_loss / num_images_A
total_discriminator_loss = total_discriminator_loss / num_images_A
total_task_loss = total_task_loss / num_images_A
loss_train_istn_log.append(total_istn_A2B_loss)
loss_train_discriminator_log.append(total_discriminator_loss)
loss_train_task_log.append(total_task_loss)
error_train_A = error_train_A / num_images_A
error_train_B = error_train_B / num_images_B
error_train_A2B = error_train_A2B / num_images_A2B
err_train_task_A_log.append(error_train_A)
err_train_task_B_log.append(error_train_B)
err_train_task_A2B_log.append(error_train_A2B)
### Get COMPUTATION TIMES
epoch_end = time.time()
epoch_duration = epoch_end - epoch_start
epoch_times.append(epoch_duration)
avg_epoch = torch.mean(torch.as_tensor(epoch_times[-20:])).numpy()
remaining_time = (config['epochs'] - epoch) * avg_epoch
remaining_time = time.strftime('%Hh%Mm%Ss', time.gmtime(remaining_time))
logging.info('TRAIN Epo:{:03d} Loss[ISTN/Dis/Tsk]:[{:.3f}/{:.3f}/{:.3f}/] {}[A/A2B/B]:[{:.3f}/{:.3f}/{:.3f}] ETA: {}'
.format(epoch,
loss_train_istn_log[-1],
loss_train_discriminator_log[-1],
loss_train_task_log[-1],
'MAE' if args.model_type == 'regressor' else 'Acc',
error_train_A,
error_train_A2B,
error_train_B,
remaining_time))
except KeyboardInterrupt:
printhead('USER TERMINATED at Epoch: {}'.format(epoch))
if remote:
raise KeyboardInterrupt
else:
break
###### VALIDATION STEP ######
try:
if (epoch == 0 or epoch % args.val_interval == 0 or epoch == args.epochs-1):
with torch.no_grad():
error_val_A, error_val_B, error_val_A2B = 0.0, 0.0, 0.0
num_images_val_A, num_images_val_B, num_images_val_A2B = 0, 0, 0
### Set ALL MODELS TO EVAL
discriminator.eval()
task.eval()
itn_A2B.eval()
if args.stn:
stn_A2B.eval()
for batch_idx, batch_samples in enumerate(zip(tqdm(dataloader_val_1, desc='Val', leave=False), cycle(dataloader_val_2))):
A, label_A = batch_samples[0]['image'].to(device), batch_samples[0][args.label_key].to(device)
B, label_B = batch_samples[1]['image'].to(device), batch_samples[1][args.label_key].to(device)
if len(B) != len(A):
# If the two datasets are not the same length, correct the size.
B, scanner_B, mask_B, label_B = B[:len(A)], scanner_B[:len(A)], mask_B[:len(A)], label_B[:len(A)]
A = args.normalizer(A)
B = args.normalizer(B)
# Create clones of the original data for use later
A_orig = A.clone()
B_orig = B.clone()
if epoch == 0:
# Get baselines on first epoch, and retain them for later
output_task_val_A_base_batch, error_val_A_base_batch, _, _ = get_task_output(task, A_orig, label_A)
output_task_val_B_base_batch, error_val_B_base_batch, _, _ = get_task_output(task, B_orig, label_B)
error_val_A_base += error_val_A_base_batch
error_val_B_base += error_val_B_base_batch
num_images_val_A_base += A.size(0)
num_images_val_B_base += B.size(0)
### Save a sample of validation images on first step
if epoch == 0 and batch_idx == 0 and (args.nii or args.png):
save_tensor_sample(A, '{}_val_preITN_{}'.format(epoch, 'A'), args.samples_dir, nii=args.nii, png=args.png)
A2B = itn_A2B(A)
A2B_postITN = A2B.clone()
if batch_idx == 0 and (args.nii or args.png):
save_tensor_sample(A2B_postITN, '{}_val_postITN_{}'.format(epoch, 'A2B'), args.samples_dir, nii=args.nii, png=args.png)
save_tensor_sample(A2B_postITN - A, '{}_val_postITNdiff_{}'.format(epoch, 'A2B'), args.samples_dir, nii=args.nii, png=args.png)
A2B = stn_A2B(A2B) if args.stn else A2B
A2B_postSTN = A2B.clone()
if batch_idx == 0 and (args.nii or args.png):
save_tensor_sample(A2B_postSTN, '{}_val_postSTN_{}'.format( epoch, 'A2B'), args.samples_dir, nii=args.nii, png=args.png)
save_tensor_sample(A2B_postSTN - A2B_postITN, '{}_val_postSTNdiff_{}'.format( epoch, 'A2B'), args.samples_dir, nii=args.nii, png=args.png)
output_task_val_A_batch, error_val_A_batch, _, _ = get_task_output(task, A_orig, label_A)
output_task_val_B_batch, error_val_B_batch, _, _ = get_task_output(task, B_orig, label_B)
output_task_val_A2B_batch, error_val_A2B_batch, _, _ = get_task_output(task, A2B, label_A)
error_val_A += error_val_A_batch
error_val_B += error_val_B_batch
error_val_A2B += error_val_A2B_batch
##### Get Losses
#ISTN
output_Dis_val_A2B = discriminator(A2B.detach())
output_Dis_val_A2B = output_Dis_val_A2B.view(A2B.size(0), -1)
Dis_A2B_High = high.sample(output_Dis_val_A2B.shape).to(device)
discriminator_A2B_gan_val_loss = args.gan_loss(output_Dis_val_A2B, Dis_A2B_High)
idt_val_B2B_val_loss = 0.5 * args.cyc_weight * args.idt_loss(B2B, B) if args.idt_loss is not None else 0
istn_val_loss = discriminator_A2B_gan_val_loss + idt_val_B2B_val_loss
#DISCRIMINATOR
output_Dis_val_B = discriminator(B.detach())
output_Dis_val_B = output_Dis_val_B.view(B.size(0), -1)
Dis_B_High = high.sample(output_Dis_val_B.shape).to(device)
Dis_A2B_Low = low.sample(output_Dis_val_A2B.shape).to(device)
discriminator_B_val_loss = args.dis_loss(output_Dis_val_B, Dis_B_High)
discriminator_A2B_val_loss = args.dis_loss(output_Dis_val_A2B, Dis_A2B_Low)
discriminator_val_loss = 0.5 * (discriminator_B_val_loss + discriminator_A2B_val_loss)
#TASK
task_val_loss = args.task_loss(output_task_val_A2B_batch, label_A.float())
total_istn_val_loss += istn_val_loss.item()
total_discriminator_val_loss += discriminator_val_loss.item()
total_task_val_loss += task_val_loss.item()
num_images_val_A += A_orig.size(0)
num_images_val_B += B_orig.size(0)
num_images_val_A2B += A2B.size(0)
## End Batch
##### Populate Logs
total_istn_val_loss = total_istn_val_loss / num_images_val_A
total_discriminator_val_loss = total_discriminator_val_loss / num_images_val_A
total_task_val_loss = total_task_val_loss / num_images_val_A
loss_val_istn_log.append(total_istn_val_loss)
loss_val_discriminator_log.append(total_discriminator_val_loss)
loss_val_task_log.append(total_task_val_loss)
error_val_A = error_val_A / num_images_val_A
error_val_B = error_val_B / num_images_val_B
error_val_A2B = error_val_A2B / num_images_val_A2B
err_val_task_A_log.append(error_val_A)
err_val_task_B_log.append(error_val_B)
err_val_task_A2B_log.append(error_val_A2B)
val_epochs.append(epoch)
if epoch == 0:
# Get baselines on first epoch, and retain them
error_val_A_base = error_val_A_base / num_images_val_A_base
error_val_B_base = error_val_B_base / num_images_val_B_base
logging.info('VAL Epo:{:3d} {}[A0/A/A2B]:[{:.4f}/{:.4f}/{:.4f}] {}[B0/B/Del]:[{:.4f}/{:.4f}/{:.4f}] Best[B]:[{:.4f}]'
.format(epoch, 'MAE' if args.model_type=='regressor' else 'Acc',
error_val_A_base, error_val_A, error_val_A2B,
'MAE' if args.model_type == 'regressor' else 'Acc',
error_val_B_base, error_val_B, np.abs(error_val_B_base - error_val_B),
best_val_error[1]))
##### Check for a new best model performance
if args.model_type == 'regressor':
better_than_before = 1 if error_val_A2B < best_val_error[1] or best_val_error[1] == -1 else 0
else:
better_than_before = 1 if error_val_A2B > best_val_error[1] or best_val_error[1] == -1 else 0
if better_than_before:
printhead('NEW BEST VAL A2B:{:3d} Prev [{:.5f}] New [{:.5f}] ...saving model'
.format(epoch, best_val_error[1], error_val_A2B))
torch.save(itn_A2B.state_dict(), os.path.join(args.model_dir,'val_err_{:.5f}_epoch_{:03d}_itn_A2B.pt'.format(error_val_A2B, epoch)))
if args.stn:
torch.save(stn_A2B.state_dict(), os.path.join(args.model_dir,'val_err_{:.5f}_epoch_{:03d}_stn_A2B.pt'.format(error_val_A2B, epoch)))
torch.save(task.state_dict(), os.path.join(args.model_dir, 'val_err_{:.5f}_epoch_{:03d}_{}_A2B.pt'.format(error_val_A2B, epoch, args.model_type)))
early_stopping_counter = 0
best_val_error = [epoch, error_val_A2B]
else:
early_stopping_counter += 1
##### Check number of validation steps - must have trained for at least 5 epochs.
val_step += 1
if val_step > 5 and early_stopping_counter == args.early_stopping_epochs:
printhead('EARLY STOPPING TRIGGER: No change in val_accuracy for {} epochs'.format(
early_stopping_counter))
raise EarlyStop
except EarlyStop:
break
##### After the training loop - save the final models
### NOTE: May not be the best models - this just saves the final epoch step just in case it's needed.
torch.save(itn_A2B.state_dict(), os.path.join(args.model_dir, 'itn.pt'))
if args.stn:
torch.save(stn_A2B.state_dict(), os.path.join(args.model_dir, 'stn.pt'))
torch.save(task.state_dict(), os.path.join(args.model_dir, '{}.pt'.format(args.model_type)))
printhead('Finished TRAINING.')
plot_metric({'ISTN Train': [loss_train_istn_log, range(len(loss_train_istn_log))],
'ISTN Val': [loss_val_istn_log, val_epochs]},
'ISTN Losses', 'Loss', args)
plot_metric({'Dis Train': [loss_train_discriminator_log, range(len(loss_train_discriminator_log))],
'Dis Val': [loss_val_discriminator_log, val_epochs]},
'Discriminator Losses', 'Loss', args)
plot_metric({'Task Train': [loss_train_task_log, range(len(loss_train_task_log))],
'Task Val': [loss_val_task_log, val_epochs]},
'Task Losses', 'Loss', args)
if args.model_type == 'regressor':
metric = 'MSE'
else:
metric = 'Acc'
plot_metric({'Task(A) Train': [err_train_task_A_log, range(len(err_train_task_A_log))],
'Task(A) Val': [err_val_task_A_log, val_epochs]},
'Task(A) {}'.format(metric), metric, args)
plot_metric({'Task(B) Train': [err_train_task_B_log, range(len(err_train_task_B_log))],
'Task(B) Val': [err_val_task_B_log, val_epochs]},
'Task(B) {}'.format(metric), metric, args)
plot_metric({'Task(A2B) Train': [err_train_task_A2B_log, range(len(err_train_task_A2B_log))],
'Task(A2B) Val': [err_val_task_A2B_log, val_epochs]},
'Task(A2B) {}'.format(metric), metric, args)
if __name__ == '__main__':
# Set up argument parser
parser = argparse.ArgumentParser(description='Domain Adaptation with Adversarial Training of ISTN')
parser.add_argument('--dev', default='0', help='cuda device (default: 0)')
parser.add_argument('--seed', type=int, default=42, help='random seed (default: 42)')
parser.add_argument('--config', default="config/config_train_unidirectional.json", help='config file')
parser.add_argument('--output_dir', default='./output', help='output root directory')
parser.add_argument('--num_dataset_workers', type=int, default=4, help='number of worker to use for each dataset.')
parser.add_argument('--B2A', action='store_true', help='swap siteA and siteB')
parser.add_argument('--nii', action='store_true', help='save samples as .nii.gz')
parser.add_argument('--png', action='store_true', help='save samples as .png')
parser.add_argument('--model_type', required=True, choices={'classifier', 'regressor'}, type=str,
help='Type of model: `regressor` or `classifier`.')
args = parser.parse_args()
with open(args.config) as f:
config = json.load(f)
######################################### OPTIONS ###################################################
### DATASET OPTIONS
args.train_sets = [config['siteA_train'], config['siteB_train']]
for i in range(len(args.train_sets)):
assert (os.path.exists(args.train_sets[i])), 'Training data does not exist at {}'.format(args.train_sets[0])
args.test_sets = [config['siteA_val'], config['siteB_val']]
for i in range(len(args.test_sets)):
assert (os.path.exists(args.test_sets[i])), 'Validation data does not exist at {}'.format(args.test_sets[0])
args.finetune = config['finetune']
if args.finetune:
args.task_model = config['task_model']
assert(os.path.exists(args.task_model)), 'Finetuning is ON, but task model does not exist at {}'.format(args.task_model)
else:
args.task_model = None
if args.B2A:
if args.finetune:
printhead('Note: Both `B2A` & `finetune` passed as args. A & B switched. Check task model is trained on site B.')
args.training_sets = args.training_sets[::-1]
args.test_sets = args.test_sets[::-1]
args.input_shape= config['input_shape']
args.label_key = config['label_key']
### PREPROCESSING OPTIONS
args.augmentation = config['augmentation']
assert(config['normalizer'] in ['', 'tanh'])
if config['normalizer'] == 'tanh':
args.normalizer = torch.tanh
else:
args.normalizer = None
### TASK MODEL OPTIONS
if args.model_type == 'regression':
args.num_classes = None
else:
args.num_classes = config['num_classes']
### ISTN OPTIONS
args.nf = config['nf']
args.stn = config['stn']
assert(args.stn in ['none', 'bspline', 'affine']), "STN should be `bspline`, `affine` or `none`."
if args.stn == 'bspline':
args.max_displacement = config['max_displacement']
args.cp_spacing = config['cp_spacing']
else:
args.max_displacement = None
args.cp_spacing = None
### LOSS FUNCTION OPTIONS
loss_functions = {'bce': torch.nn.BCELoss(),
'mse': torch.nn.MSELoss(),
'l1': torch.nn.L1Loss()}
assert (config['gan_loss'] in loss_functions.keys())
assert (config['idt_loss'] in loss_functions.keys())
args.gan_loss = loss_functions[config['gan_loss']]
args.idt_loss = loss_functions[config['idt_loss']]
args.dis_loss = loss_functions[config['dis_loss']]
if args.model_type == 'regressor':
args.task_loss = torch.nn.MSELoss()
else:
args.task_loss = torch.nn.BCELoss()
### TRAINING LOOP OPTIONS
args.early_stopping_epochs = config['early_stopping_epochs']
args.learning_rate = config['learning_rate']
args.epochs = config['epochs']
args.batch_size = config['batch_size']
args.cyc_weight = config['cyc_weight']
args.val_interval = 1 if config['val_interval'] > args.epochs else config['val_interval']
### OUTPUT OPTIONS
args.class_names = '{}_{}'.format(config['siteA_name'], config['siteB_name'])
args.out = '{}_{}_STN_{}'.format('UniDA_{}_{}'.format(args.model_type, args.label_key),
args.class_names,
str(args.stn) if args.stn else 'NONE')
args.params = '_L_{}_E_{:d}_B_{:d}_{}_Cyc_{}_GL_{}_IL_{}_DL_{}_A_{}'.format(args.learning_rate,
args.epochs,
args.batch_size,
'' if args.stn != 'bspline' else 'Sp_{}_MaxD_{}_'.format(args.cp_spacing[0],
args.max_displacement),
args.cyc_weight,
config['gan_loss'],
config['idt_loss'],
config['dis_loss'],
args.augmentation)
args.out = os.path.join(args.output_dir, args.out + args.params)
args.model_dir = os.path.join(args.out, 'model')
args.samples_dir = os.path.join(args.out, 'samples')
args.code_dir = os.path.join(args.out, 'code')
os.makedirs(args.model_dir, exist_ok=True)
os.makedirs(args.samples_dir, exist_ok=True)
os.makedirs(args.code_dir, exist_ok=True)
for file in tqdm(config['files'], desc="Copying script files..."):
shutil.copyfile(file, os.path.join(args.code_dir, os.path.basename(file)))
### LOGGING OPTIONS
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(levelname)-5s %(message)s',
datefmt='%d%m %H:%M:%S',
filename=os.path.join(args.out, 'log.txt'),
filemode='w')
consoleHandler = logging.StreamHandler()
consoleHandler.setLevel(logging.INFO)
formatter = logging.Formatter('%(message)s')
consoleHandler.setFormatter(formatter)
logging.getLogger('').addHandler(consoleHandler)
#################################################################################################################
train(args, config)
| 50.439063
| 170
| 0.573712
|
127de0a861338e1016fe8787a1ac276c8663bb30
| 3,258
|
py
|
Python
|
firmware/centralNodesDataRead.py
|
mi3nts/mqttSubscribers
|
c5891f85a14a2e12fa68be3d9c25f83c5934511d
|
[
"MIT"
] | null | null | null |
firmware/centralNodesDataRead.py
|
mi3nts/mqttSubscribers
|
c5891f85a14a2e12fa68be3d9c25f83c5934511d
|
[
"MIT"
] | null | null | null |
firmware/centralNodesDataRead.py
|
mi3nts/mqttSubscribers
|
c5891f85a14a2e12fa68be3d9c25f83c5934511d
|
[
"MIT"
] | null | null | null |
# MQTT Client demo
# Continuously monitor two different MQTT topics for data,
# check if the received data matches two predefined 'commands'
import paho.mqtt.client as mqtt
import ast
import datetime
import yaml
import collections
import json
import ssl
from mintsXU4 import mintsSensorReader as mSR
from mintsXU4 import mintsDefinitions as mD
from mintsXU4 import mintsLatest as mL
import sys
mqttPort = mD.mqttPort
mqttBroker = mD.mqttBroker
mqttCredentialsFile = mD.mqttCredentialsFile
fileIn = mD.centralNodesFile
tlsCert = mD.tlsCert
# For mqtt
credentials = yaml.load(open(mqttCredentialsFile))
transmitDetail = yaml.load(open(fileIn))
connected = False # Stores the connection status
broker = mqttBroker
port = mqttPort # Secure port
mqttUN = credentials['mqtt']['username']
mqttPW = credentials['mqtt']['password']
transmitters = transmitDetail['nodes']
sensors = transmitDetail['sensors']
decoder = json.JSONDecoder(object_pairs_hook=collections.OrderedDict)
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() - if we lose the connection and
# reconnect then subscriptions will be renewed.
for transmitter in transmitters:
for sensor in sensors:
topic = transmitter+"/"+ sensor
client.subscribe(topic)
print("Subscrbing to Topic: "+ topic)
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
print()
print(" - - - MINTS DATA RECEIVED - - - ")
print()
# print(msg.topic+":"+str(msg.payload))
try:
[nodeID,sensorID ] = msg.topic.split('/')
sensorDictionary = decoder.decode(msg.payload.decode("utf-8","ignore"))
print("Node ID :" + nodeID)
print("Sensor ID :" + sensorID)
print("Data : " + str(sensorDictionary))
if sensorID== "FRG001":
dateTime = datetime.datetime.strptime(sensorDictionary["dateTime"], '%Y-%m-%d %H:%M:%S')
else:
dateTime = datetime.datetime.strptime(sensorDictionary["dateTime"], '%Y-%m-%d %H:%M:%S.%f')
writePath = mSR.getWritePathMQTT(nodeID,sensorID,dateTime)
exists = mSR.directoryCheck(writePath)
sensorDictionary = decoder.decode(msg.payload.decode("utf-8","ignore"))
print("Writing MQTT Data")
print(writePath)
mSR.writeCSV2(writePath,sensorDictionary,exists)
mL.writeJSONLatestMQTT(sensorDictionary,nodeID,sensorID)
except Exception as e:
print("[ERROR] Could not publish data, error: {}".format(e))
# Create an MQTT client and attach our routines to it.
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.username_pw_set(mqttUN,mqttPW)
client.tls_set(ca_certs=tlsCert, certfile=None,
keyfile=None, cert_reqs=ssl.CERT_REQUIRED,
tls_version=ssl.PROTOCOL_TLSv1_2, ciphers=None)
client.tls_insecure_set(True)
client.connect(broker, port, 60)
client.loop_forever()
| 34.659574
| 104
| 0.677716
|
b3db455b5d7cba8d4985eefad687e425837790c5
| 12,227
|
py
|
Python
|
batchglm/unit_test/test_acc_glm_all_numpy.py
|
le-ander/batchglm
|
31b905b99b6baa7c94b82550d6a74f00d81966ea
|
[
"BSD-3-Clause"
] | null | null | null |
batchglm/unit_test/test_acc_glm_all_numpy.py
|
le-ander/batchglm
|
31b905b99b6baa7c94b82550d6a74f00d81966ea
|
[
"BSD-3-Clause"
] | null | null | null |
batchglm/unit_test/test_acc_glm_all_numpy.py
|
le-ander/batchglm
|
31b905b99b6baa7c94b82550d6a74f00d81966ea
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
import numpy as np
import scipy.sparse
import unittest
import batchglm.api as glm
glm.setup_logging(verbosity="WARNING", stream="STDOUT")
logger = logging.getLogger(__name__)
class _TestAccuracyGlmAllEstim:
def __init__(
self,
simulator,
quick_scale,
noise_model,
sparse,
init_mode
):
if noise_model is None:
raise ValueError("noise_model is None")
else:
if noise_model == "nb":
from batchglm.api.models.numpy.glm_nb import Estimator, InputDataGLM
else:
raise ValueError("noise_model not recognized")
if sparse:
input_data = InputDataGLM(
data=scipy.sparse.csr_matrix(simulator.input_data.x),
design_loc=simulator.input_data.design_loc,
design_scale=simulator.input_data.design_scale,
design_loc_names=simulator.input_data.design_loc_names,
design_scale_names=simulator.input_data.design_scale_names,
constraints_loc=simulator.input_data.constraints_loc,
constraints_scale=simulator.input_data.constraints_scale,
size_factors=simulator.input_data.size_factors,
chunk_size_cells=int(1e9),
chunk_size_genes=2
)
else:
input_data = InputDataGLM(
data=simulator.input_data.x,
design_loc=simulator.input_data.design_loc,
design_scale=simulator.input_data.design_scale,
design_loc_names=simulator.input_data.design_loc_names,
design_scale_names=simulator.input_data.design_scale_names,
constraints_loc=simulator.input_data.constraints_loc,
constraints_scale=simulator.input_data.constraints_scale,
size_factors=simulator.input_data.size_factors,
chunk_size_cells=int(1e9),
chunk_size_genes=2
)
self.estimator = Estimator(
input_data=input_data,
quick_scale=quick_scale,
init_a=init_mode,
init_b=init_mode
)
self.sim = simulator
def estimate(
self
):
self.estimator.initialize()
self.estimator.train_sequence(training_strategy="DEFAULT")
def eval_estimation(
self,
train_loc,
train_scale
):
threshold_dev_a = 0.2
threshold_dev_b = 0.2
threshold_std_a = 1
threshold_std_b = 1
success = True
if train_loc:
mean_rel_dev_a = np.mean((self.estimator.model.a_var - self.sim.a_var) / self.sim.a_var)
std_rel_dev_a = np.std((self.estimator.model.a_var - self.sim.a_var) / self.sim.a_var)
logging.getLogger("batchglm").info("mean_rel_dev_a %f" % mean_rel_dev_a)
logging.getLogger("batchglm").info("std_rel_dev_a %f" % std_rel_dev_a)
if np.abs(mean_rel_dev_a) > threshold_dev_a or std_rel_dev_a > threshold_std_a:
success = False
if train_scale:
mean_rel_dev_b = np.mean((self.estimator.model.b_var - self.sim.b_var) / self.sim.b_var)
std_rel_dev_b = np.std((self.estimator.model.b_var - self.sim.b_var) / self.sim.b_var)
logging.getLogger("batchglm").info("mean_rel_dev_b %f" % mean_rel_dev_b)
logging.getLogger("batchglm").info("std_rel_dev_b %f" % std_rel_dev_b)
if np.abs(mean_rel_dev_b) > threshold_dev_b or std_rel_dev_b > threshold_std_b:
success = False
return success
class _TestAccuracyGlmAll(
unittest.TestCase
):
"""
Test whether optimizers yield exact results.
Accuracy is evaluted via deviation of simulated ground truth.
The unit tests test individual training graphs and multiple optimizers
(incl. one tensorflow internal optimizer and newton-rhapson)
for each training graph. The training graphs tested are as follows:
- full data model
- train a and b model: test_full_global_a_and_b()
- train a model only: test_full_global_a_only()
- train b model only: test_full_global_b_only()
- batched data model
- train a and b model: test_batched_global_a_and_b()
- train a model only: test_batched_global_a_only()
- train b model only: test_batched_global_b_only()
The unit tests throw an assertion error if the required accurcy is
not met. Accuracy thresholds are fairly lenient so that unit_tests
pass even with noise inherent in fast optimisation and random
initialisation in simulation. Still, large biases (i.e. graph errors)
should be discovered here.
Note on settings by optimised:
IRLS_TR: Needs slow TR collapse to converge.
"""
noise_model: str
optims_tested: dict
def simulate(self):
self.simulate1()
self.simulate2()
def get_simulator(self):
if self.noise_model is None:
raise ValueError("noise_model is None")
else:
if self.noise_model == "nb":
from batchglm.api.models.numpy.glm_nb import Simulator
elif self.noise_model == "norm":
from batchglm.api.models import Simulator
elif self.noise_model == "beta":
from batchglm.api.models.numpy.glm_beta import Simulator
else:
raise ValueError("noise_model not recognized")
return Simulator(num_observations=1000, num_features=10)
def simulate1(self):
self.sim1 = self.get_simulator()
self.sim1.generate_sample_description(num_batches=2, num_conditions=2, intercept_scale=True)
def rand_fn_ave(shape):
if self.noise_model in ["nb", "norm"]:
theta = np.random.uniform(10, 1000, shape)
elif self.noise_model in ["beta"]:
theta = np.random.uniform(0.1, 0.7, shape)
else:
raise ValueError("noise model not recognized")
return theta
def rand_fn_loc(shape):
if self.noise_model in ["nb", "norm"]:
theta = np.random.uniform(1, 3, shape)
elif self.noise_model in ["beta"]:
theta = np.random.uniform(0, 0.15, shape)
else:
raise ValueError("noise model not recognized")
return theta
def rand_fn_scale(shape):
if self.noise_model in ["nb"]:
theta = np.random.uniform(1, 3, shape)
elif self.noise_model in ["norm"]:
theta = np.random.uniform(1, 3, shape)
elif self.noise_model in ["beta"]:
theta = np.random.uniform(0, 0.15, shape)
else:
raise ValueError("noise model not recognized")
return theta
self.sim1.generate_params(
rand_fn_ave=lambda shape: rand_fn_ave(shape),
rand_fn_loc=lambda shape: rand_fn_loc(shape),
rand_fn_scale=lambda shape: rand_fn_scale(shape)
)
self.sim1.generate_data()
def simulate2(self):
self.sim2 = self.get_simulator()
self.sim2.generate_sample_description(num_batches=0, num_conditions=2, intercept_scale=True)
def rand_fn_ave(shape):
if self.noise_model in ["nb", "norm"]:
theta = np.random.uniform(10, 1000, shape)
elif self.noise_model in ["beta"]:
theta = np.random.uniform(0.1, 0.9, shape)
else:
raise ValueError("noise model not recognized")
return theta
def rand_fn_loc(shape):
if self.noise_model in ["nb", "norm"]:
theta = np.ones(shape)
elif self.noise_model in ["beta"]:
theta = np.zeros(shape)+0.05
else:
raise ValueError("noise model not recognized")
return theta
def rand_fn_scale(shape):
if self.noise_model in ["nb"]:
theta = np.ones(shape)
elif self.noise_model in ["norm"]:
theta = np.ones(shape)
elif self.noise_model in ["beta"]:
theta = np.ones(shape) - 0.8
else:
raise ValueError("noise model not recognized")
return theta
self.sim2.generate_params(
rand_fn_ave=lambda shape: rand_fn_ave(shape),
rand_fn_loc=lambda shape: rand_fn_loc(shape),
rand_fn_scale=lambda shape: rand_fn_scale(shape)
)
self.sim2.generate_data()
def simulator(self, train_loc):
if train_loc:
return self.sim1
else:
return self.sim2
def basic_test(
self,
batched,
train_loc,
train_scale,
sparse
):
self.optims_tested = {
"nb": ["IRLS"],
"beta": ["IRLS"],
"norm": ["IRLS"]
}
init_mode = "standard"
for algo in self.optims_tested[self.noise_model]:
logger.info("algorithm: %s" % algo)
acc = 1e-14
glm.pkg_constants.TRUST_REGION_T1 = 0.5
glm.pkg_constants.TRUST_REGION_T2 = 1.5
glm.pkg_constants.CHOLESKY_LSTSQS = True
glm.pkg_constants.CHOLESKY_LSTSQS_BATCHED = True
glm.pkg_constants.JACOBIAN_MODE = "analytic"
estimator = _TestAccuracyGlmAllEstim(
simulator=self.simulator(train_loc=train_loc),
quick_scale=False if train_scale else True,
noise_model=self.noise_model,
sparse=sparse,
init_mode=init_mode
)
estimator.estimate()
estimator.estimator.finalize()
success = estimator.eval_estimation(
train_loc=train_loc,
train_scale=train_scale,
)
assert success, "%s did not yield exact results" % algo
return True
def _test_full_a_and_b(self, sparse):
return self.basic_test(
batched=False,
train_loc=True,
train_scale=True,
sparse=sparse
)
def _test_full_a_only(self, sparse):
return self.basic_test(
batched=False,
train_loc=True,
train_scale=False,
sparse=sparse
)
def _test_full_b_only(self, sparse):
return self.basic_test(
batched=False,
train_loc=False,
train_scale=True,
sparse=sparse
)
def _test_batched_a_and_b(self, sparse):
return self.basic_test(
batched=True,
train_loc=True,
train_scale=True,
sparse=sparse
)
def _test_batched_a_only(self, sparse):
return self.basic_test(
batched=True,
train_loc=True,
train_scale=False,
sparse=sparse
)
def _test_batched_b_only(self, sparse):
return self.basic_test(
batched=True,
train_loc=False,
train_scale=True,
sparse=sparse
)
def _test_full(self, sparse):
self._test_full_a_and_b(sparse=sparse)
self._test_full_a_only(sparse=sparse)
self._test_full_b_only(sparse=sparse)
def _test_batched(self, sparse):
self._test_batched_a_and_b(sparse=sparse)
self._test_batched_a_only(sparse=sparse)
self._test_batched_b_only(sparse=sparse)
class TestAccuracyGlmNb(
_TestAccuracyGlmAll,
unittest.TestCase
):
"""
Test whether optimizers yield exact results for negative binomial distributed data.
"""
def test_full_nb(self):
logging.getLogger("batchglm").setLevel(logging.INFO)
logger.error("TestAccuracyGlmNb.test_full_nb()")
np.random.seed(1)
self.noise_model = "nb"
self.simulate()
self._test_full(sparse=False)
self._test_full(sparse=True)
if __name__ == '__main__':
unittest.main()
| 33.683196
| 100
| 0.594831
|
2d4e58e65c13021b6aef301648f4edb2f65c41db
| 608
|
py
|
Python
|
doc/samples/custom_loader.py
|
m4ta1l/doit
|
d1a1b7b3abc7641d977d3b78b580d97aea4e27ea
|
[
"MIT"
] | 1,390
|
2015-01-01T21:11:47.000Z
|
2022-03-31T11:35:44.000Z
|
doc/samples/custom_loader.py
|
m4ta1l/doit
|
d1a1b7b3abc7641d977d3b78b580d97aea4e27ea
|
[
"MIT"
] | 393
|
2015-01-05T11:18:29.000Z
|
2022-03-20T11:46:46.000Z
|
doc/samples/custom_loader.py
|
m4ta1l/doit
|
d1a1b7b3abc7641d977d3b78b580d97aea4e27ea
|
[
"MIT"
] | 176
|
2015-01-07T16:58:56.000Z
|
2022-03-28T12:12:11.000Z
|
#! /usr/bin/env python3
import sys
from doit.task import dict_to_task
from doit.cmd_base import TaskLoader2
from doit.doit_cmd import DoitMain
my_builtin_task = {
'name': 'sample_task',
'actions': ['echo hello from built in'],
'doc': 'sample doc',
}
class MyLoader(TaskLoader2):
def setup(self, opt_values):
pass
def load_doit_config(self):
return {'verbosity': 2}
def load_tasks(self, cmd, pos_args):
task_list = [dict_to_task(my_builtin_task)]
return task_list
if __name__ == "__main__":
sys.exit(DoitMain(MyLoader()).run(sys.argv[1:]))
| 20.266667
| 52
| 0.669408
|
32d9cda0338d1c232b876f78fc4c71fa21a9997f
| 2,461
|
py
|
Python
|
adbForTest/Utils/PackUtils.py
|
LiuTianen/PackManage
|
4b067954cc223baa14569a6f1517954b9cdb968f
|
[
"MIT"
] | null | null | null |
adbForTest/Utils/PackUtils.py
|
LiuTianen/PackManage
|
4b067954cc223baa14569a6f1517954b9cdb968f
|
[
"MIT"
] | null | null | null |
adbForTest/Utils/PackUtils.py
|
LiuTianen/PackManage
|
4b067954cc223baa14569a6f1517954b9cdb968f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#coding=utf-8
"""
使用Python获取待测APP的相关参数信息
"""
import os
import subprocess
from Utils.Common import Common
class APP:
# 获取APP的文件大小
def get_apk_size(self):
size = os.path.getsize(Common().apkPath()) / (1024 * 1000)
return('%.2f' % size)+ "M" #保留小数点后两位
# 获取APP的版本信息
def get_apk_version(self):
cmd = Common().aaPath()
result = ""
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
if output != "":
result = output.split()[3].decode()[12:]
result = result.split("'")[1]
return result
# 获取APP的名字
def get_apk_name(self):
cmd = Common().aaPath() + " | findstr application-label-zu: "
result = ""
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
output = str(output, encoding='utf8')
if output != "":
result = output.split("'")[1]
return result
# 获取APP的包名
def get_apk_package(self):
cmd = Common().aaPath()+ " | findstr package:"
result = ""
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
output = str(output, encoding='utf8')
if output != "":
result = output.split()[1][6:-1]
return result
# 得到启动类
def get_apk_activity(self):
cmd = Common().aaPath() + " | findstr launchable-activity:"
result = ""
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
if output != "":
result = output.split()[1].decode()[6:-1]
return result
if __name__ == '__main__':
APPInfo = APP()
# print("应用名称:", APPInfo.get_apk_name())
# print("app文件大小:", APPInfo.get_apk_size())
# print("app版本信息:", APPInfo.get_apk_version())
# print("app包名:", APPInfo.get_apk_package())
# print("app的启动类:", APPInfo.get_apk_activity())
| 31.151899
| 69
| 0.536774
|
3283f4830199367a1dc3a0e987a97f9320ab55ba
| 1,548
|
py
|
Python
|
end_to_end_tests/golden-record/my_test_api_client/api/default/post_common_parameters.py
|
barjomet/openapi-python-client
|
3d0b96478a81a84468f9f34e70c715a486915108
|
[
"MIT"
] | null | null | null |
end_to_end_tests/golden-record/my_test_api_client/api/default/post_common_parameters.py
|
barjomet/openapi-python-client
|
3d0b96478a81a84468f9f34e70c715a486915108
|
[
"MIT"
] | null | null | null |
end_to_end_tests/golden-record/my_test_api_client/api/default/post_common_parameters.py
|
barjomet/openapi-python-client
|
3d0b96478a81a84468f9f34e70c715a486915108
|
[
"MIT"
] | null | null | null |
from typing import Any, Dict, Union
import httpx
from ...client import Client
from ...types import UNSET, Response, Unset
def _get_kwargs(
*,
client: Client,
common: Union[Unset, str] = UNSET,
) -> Dict[str, Any]:
url = "{}/common_parameters".format(client.base_url)
headers: Dict[str, Any] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
params: Dict[str, Any] = {
"common": common,
}
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
return {
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"params": params,
}
def _build_response(*, response: httpx.Response) -> Response[Any]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=None,
)
def sync_detailed(
*,
client: Client,
common: Union[Unset, str] = UNSET,
) -> Response[Any]:
kwargs = _get_kwargs(
client=client,
common=common,
)
response = httpx.post(
**kwargs,
)
return _build_response(response=response)
async def asyncio_detailed(
*,
client: Client,
common: Union[Unset, str] = UNSET,
) -> Response[Any]:
kwargs = _get_kwargs(
client=client,
common=common,
)
async with httpx.AsyncClient() as _client:
response = await _client.post(**kwargs)
return _build_response(response=response)
| 21.205479
| 82
| 0.609173
|
fe63e9208c4c60b5bd16357a5e115d0f987793cf
| 11,307
|
py
|
Python
|
Pyrado/pyrado/algorithms/step_based/a2c.py
|
swami1995/SimuRLacra
|
795e6ea45fbb722242ddb0c0ea5c62432826411e
|
[
"DOC",
"Zlib",
"BSD-3-Clause"
] | 52
|
2020-05-02T13:55:09.000Z
|
2022-03-09T14:49:36.000Z
|
Pyrado/pyrado/algorithms/step_based/a2c.py
|
swami1995/SimuRLacra
|
795e6ea45fbb722242ddb0c0ea5c62432826411e
|
[
"DOC",
"Zlib",
"BSD-3-Clause"
] | 40
|
2020-09-01T15:19:22.000Z
|
2021-11-02T14:51:41.000Z
|
Pyrado/pyrado/algorithms/step_based/a2c.py
|
swami1995/SimuRLacra
|
795e6ea45fbb722242ddb0c0ea5c62432826411e
|
[
"DOC",
"Zlib",
"BSD-3-Clause"
] | 13
|
2020-07-03T11:39:21.000Z
|
2022-02-20T01:12:42.000Z
|
# Copyright (c) 2020, Fabio Muratore, Honda Research Institute Europe GmbH, and
# Technical University of Darmstadt.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of Fabio Muratore, Honda Research Institute Europe GmbH,
# or Technical University of Darmstadt, nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL FABIO MURATORE, HONDA RESEARCH INSTITUTE EUROPE GMBH,
# OR TECHNICAL UNIVERSITY OF DARMSTADT BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import sys
from typing import Optional, Sequence
import numpy as np
import torch as to
from torch.distributions.kl import kl_divergence
from tqdm import tqdm
import pyrado
from pyrado.algorithms.base import Algorithm
from pyrado.algorithms.step_based.actor_critic import ActorCritic
from pyrado.algorithms.step_based.gae import GAE
from pyrado.algorithms.utils import compute_action_statistics, num_iter_from_rollouts
from pyrado.environments.base import Env
from pyrado.exploration.stochastic_action import NormalActNoiseExplStrat
from pyrado.logger.step import StepLogger
from pyrado.policies.base import Policy
from pyrado.policies.recurrent.base import RecurrentPolicy
from pyrado.sampling.parallel_rollout_sampler import ParallelRolloutSampler
from pyrado.sampling.step_sequence import StepSequence, discounted_values
from pyrado.utils.math import explained_var
class A2C(ActorCritic):
"""Advantage Actor Critic (A2C)"""
name: str = "a2c"
def __init__(
self,
save_dir: pyrado.PathLike,
env: Env,
policy: Policy,
critic: GAE,
max_iter: int,
min_rollouts: int = None,
min_steps: int = None,
vfcn_coeff: float = 0.5,
entropy_coeff: float = 1e-3,
batch_size: int = 32,
std_init: float = 1.0,
max_grad_norm: Optional[float] = None,
num_workers: int = 4,
lr: float = 5e-4,
lr_scheduler=None,
lr_scheduler_hparam: Optional[dict] = None,
logger: StepLogger = None,
):
r"""
Constructor
:param save_dir: directory to save the snapshots i.e. the results in
:param env: the environment which the policy operates
:param policy: policy to be updated
:param critic: advantage estimation function $A(s,a) = Q(s,a) - V(s)$
:param max_iter: maximum number of iterations (i.e. policy updates) that this algorithm runs
:param min_rollouts: minimum number of rollouts sampled per policy update batch
:param min_steps: minimum number of state transitions sampled per policy update batch
:param vfcn_coeff: weighting factor of the value function term in the combined loss, specific to PPO2
:param entropy_coeff: weighting factor of the entropy term in the combined loss, specific to PPO2
:param batch_size: number of samples per policy update batch
:param std_init: initial standard deviation on the actions for the exploration noise
:param max_grad_norm: maximum L2 norm of the gradients for clipping, set to `None` to disable gradient clipping
:param num_workers: number of environments for parallel sampling
:param lr: (initial) learning rate for the optimizer which can be by modified by the scheduler.
By default, the learning rate is constant.
:param lr_scheduler: learning rate scheduler that does one step per epoch (pass through the whole data set)
:param lr_scheduler_hparam: hyper-parameters for the learning rate scheduler
:param logger: logger for every step of the algorithm, if `None` the default logger will be created
"""
# Call ActorCritic's constructor
super().__init__(env, policy, critic, save_dir, max_iter, logger)
# Store the inputs
self.min_rollouts = min_rollouts
self.min_steps = min_steps
self.vfcn_coeff = vfcn_coeff
self.entropy_coeff = entropy_coeff
self.batch_size = batch_size
self.max_grad_norm = max_grad_norm
# Initialize
self._expl_strat = NormalActNoiseExplStrat(self._policy, std_init=std_init)
self._sampler = ParallelRolloutSampler(
env, self.expl_strat, num_workers=num_workers, min_steps=min_steps, min_rollouts=min_rollouts
)
self.optim = to.optim.RMSprop(
[
{"params": self._policy.parameters()},
{"params": self.expl_strat.noise.parameters()},
{"params": self._critic.vfcn.parameters()},
],
lr=lr,
eps=1e-5,
)
self._lr_scheduler = lr_scheduler
self._lr_scheduler_hparam = lr_scheduler_hparam
if lr_scheduler is not None:
self._lr_scheduler = lr_scheduler(self.optim, **lr_scheduler_hparam)
def loss_fcn(self, log_probs: to.Tensor, adv: to.Tensor, v_pred: to.Tensor, v_targ: to.Tensor):
"""
A2C loss function
:param log_probs: logarithm of the probabilities of the taken actions
:param adv: advantage values
:param v_pred: predicted value function values
:param v_targ: target value function values
:return: combined loss value
"""
# Policy, value function, and entropy losses
policy_loss = -to.mean(adv.to(self.policy.device) * log_probs)
vfcn_loss = 0.5 * to.mean(
to.pow(v_targ.to(self.policy.device) - v_pred.to(self.policy.device), 2)
) # former v_targ.cpu() - v_pred.cpu()
entropy_mean = to.mean(self.expl_strat.noise.get_entropy())
# Return the combined loss
return policy_loss + self.vfcn_coeff * vfcn_loss - self.entropy_coeff * entropy_mean
def update(self, rollouts: Sequence[StepSequence]):
# Turn the batch of rollouts into a list of steps
concat_ros = StepSequence.concat(rollouts)
concat_ros.torch(data_type=to.get_default_dtype())
# Compute the value targets (empirical discounted returns) for all samples before fitting the V-fcn parameters
adv = self._critic.gae(concat_ros) # done with to.no_grad()
v_targ = (
discounted_values(rollouts, self._critic.gamma).view(-1, 1).to(self.policy.device)
) # empirical discounted returns
with to.no_grad():
# Compute value predictions and the GAE using the old (before the updates) value function approximator
v_pred = self._critic.values(concat_ros)
# Compute the action probabilities using the old (before update) policy
act_stats = compute_action_statistics(concat_ros, self._expl_strat)
log_probs_old = act_stats.log_probs
act_distr_old = act_stats.act_distr
loss_before = self.loss_fcn(log_probs_old, adv, v_pred, v_targ)
self.logger.add_value("loss before", loss_before, 4)
concat_ros.add_data("adv", adv)
concat_ros.add_data("v_targ", v_targ)
# For logging the gradients' norms
policy_grad_norm = []
for batch in tqdm(
concat_ros.split_shuffled_batches(
self.batch_size,
complete_rollouts=self._policy.is_recurrent or isinstance(self._critic.vfcn, RecurrentPolicy),
),
total=num_iter_from_rollouts(None, concat_ros, self.batch_size),
desc="Updating",
unit="batches",
file=sys.stdout,
leave=False,
):
# Reset the gradients
self.optim.zero_grad()
# Compute log of the action probabilities for the mini-batch
log_probs = compute_action_statistics(batch, self._expl_strat).log_probs
# Compute value predictions for the mini-batch
v_pred = self._critic.values(batch)
# Compute combined loss and backpropagate
loss = self.loss_fcn(log_probs, batch.adv, v_pred, batch.v_targ)
loss.backward()
# Clip the gradients if desired
policy_grad_norm.append(Algorithm.clip_grad(self.expl_strat.policy, self.max_grad_norm))
# Call optimizer
self.optim.step()
# Update the learning rate if a scheduler has been specified
if self._lr_scheduler is not None:
self._lr_scheduler.step()
if to.isnan(self.expl_strat.noise.std).any():
raise RuntimeError(
f"At least one exploration parameter became NaN! The exploration parameters are"
f"\n{self.expl_strat.std.item()}"
)
# Logging
with to.no_grad():
# Compute value predictions and the GAE using the new (after the updates) value function approximator
v_pred = self._critic.values(concat_ros).to(self.policy.device)
adv = self._critic.gae(concat_ros) # done with to.no_grad()
# Compute the action probabilities using the new (after the updates) policy
act_stats = compute_action_statistics(concat_ros, self._expl_strat)
log_probs_new = act_stats.log_probs
act_distr_new = act_stats.act_distr
loss_after = self.loss_fcn(log_probs_new, adv, v_pred, v_targ)
kl_avg = to.mean(kl_divergence(act_distr_old, act_distr_new)) # mean seeking a.k.a. inclusive KL
explvar = explained_var(v_pred, v_targ) # values close to 1 are desired
self.logger.add_value("loss after", loss_after, 4)
self.logger.add_value("KL(old_new)", kl_avg, 4)
self.logger.add_value("explained var", explvar, 4)
ent = self.expl_strat.noise.get_entropy()
self.logger.add_value("avg expl strat std", to.mean(self.expl_strat.noise.std), 4)
self.logger.add_value("expl strat entropy", to.mean(ent), 4)
self.logger.add_value("avg grad norm policy", np.mean(policy_grad_norm), 4)
if self._lr_scheduler is not None:
self.logger.add_value("avg lr", np.mean(self._lr_scheduler.get_last_lr()), 6)
| 47.1125
| 119
| 0.683824
|
e8705f68c8d3fe2062eff1bb0ad8737418dd2f6d
| 3,253
|
py
|
Python
|
sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2019_05_10/aio/_configuration.py
|
xolve/azure-sdk-for-python
|
9f5baa19c392f77f811d936ee43450e4ea524002
|
[
"MIT"
] | 1
|
2022-03-09T08:59:13.000Z
|
2022-03-09T08:59:13.000Z
|
sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2019_05_10/aio/_configuration.py
|
xolve/azure-sdk-for-python
|
9f5baa19c392f77f811d936ee43450e4ea524002
|
[
"MIT"
] | null | null | null |
sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2019_05_10/aio/_configuration.py
|
xolve/azure-sdk-for-python
|
9f5baa19c392f77f811d936ee43450e4ea524002
|
[
"MIT"
] | 1
|
2022-03-04T06:21:56.000Z
|
2022-03-04T06:21:56.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class ResourceManagementClientConfiguration(Configuration):
"""Configuration for ResourceManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
super(ResourceManagementClientConfiguration, self).__init__(**kwargs)
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2019-05-10"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-resource/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
| 47.838235
| 130
| 0.701506
|
3abad2a6cb91b98718b33d5d90d6b544a6ff781b
| 3,494
|
py
|
Python
|
onnx/backend/test/case/node/cumsum.py
|
justinchuby/onnx
|
805ae1e634697e37b43701e585c9c253a29ce076
|
[
"Apache-2.0"
] | 1
|
2022-03-04T03:29:37.000Z
|
2022-03-04T03:29:37.000Z
|
onnx/backend/test/case/node/cumsum.py
|
justinchuby/onnx
|
805ae1e634697e37b43701e585c9c253a29ce076
|
[
"Apache-2.0"
] | null | null | null |
onnx/backend/test/case/node/cumsum.py
|
justinchuby/onnx
|
805ae1e634697e37b43701e585c9c253a29ce076
|
[
"Apache-2.0"
] | 1
|
2022-03-27T19:17:02.000Z
|
2022-03-27T19:17:02.000Z
|
# SPDX-License-Identifier: Apache-2.0
import numpy as np # type: ignore
import onnx
from ..base import Base
from . import expect
class CumSum(Base):
@staticmethod
def export_cumsum_1d() -> None:
node = onnx.helper.make_node(
'CumSum',
inputs=['x', 'axis'],
outputs=['y']
)
x = np.array([1., 2., 3., 4., 5.]).astype(np.float64)
axis = np.int32(0)
y = np.array([1., 3., 6., 10., 15.]).astype(np.float64)
expect(node, inputs=[x, axis], outputs=[y],
name='test_cumsum_1d')
@staticmethod
def export_cumsum_1d_exclusive() -> None:
node = onnx.helper.make_node(
'CumSum',
inputs=['x', 'axis'],
outputs=['y'],
exclusive=1
)
x = np.array([1., 2., 3., 4., 5.]).astype(np.float64)
axis = np.int32(0)
y = np.array([0., 1., 3., 6., 10.]).astype(np.float64)
expect(node, inputs=[x, axis], outputs=[y],
name='test_cumsum_1d_exclusive')
@staticmethod
def export_cumsum_1d_reverse() -> None:
node = onnx.helper.make_node(
'CumSum',
inputs=['x', 'axis'],
outputs=['y'],
reverse=1
)
x = np.array([1., 2., 3., 4., 5.]).astype(np.float64)
axis = np.int32(0)
y = np.array([15., 14., 12., 9., 5.]).astype(np.float64)
expect(node, inputs=[x, axis], outputs=[y],
name='test_cumsum_1d_reverse')
@staticmethod
def export_cumsum_1d_reverse_exclusive() -> None:
node = onnx.helper.make_node(
'CumSum',
inputs=['x', 'axis'],
outputs=['y'],
reverse=1,
exclusive=1
)
x = np.array([1., 2., 3., 4., 5.]).astype(np.float64)
axis = np.int32(0)
y = np.array([14., 12., 9., 5., 0.]).astype(np.float64)
expect(node, inputs=[x, axis], outputs=[y],
name='test_cumsum_1d_reverse_exclusive')
@staticmethod
def export_cumsum_2d_axis_0() -> None:
node = onnx.helper.make_node(
'CumSum',
inputs=['x', 'axis'],
outputs=['y'],
)
x = np.array([1., 2., 3., 4., 5., 6.]).astype(np.float64).reshape((2, 3))
axis = np.int32(0)
y = np.array([1., 2., 3., 5., 7., 9.]).astype(np.float64).reshape((2, 3))
expect(node, inputs=[x, axis], outputs=[y],
name='test_cumsum_2d_axis_0')
@staticmethod
def export_cumsum_2d_axis_1() -> None:
node = onnx.helper.make_node(
'CumSum',
inputs=['x', 'axis'],
outputs=['y'],
)
x = np.array([1., 2., 3., 4., 5., 6.]).astype(np.float64).reshape((2, 3))
axis = np.int32(1)
y = np.array([1., 3., 6., 4., 9., 15.]).astype(np.float64).reshape((2, 3))
expect(node, inputs=[x, axis], outputs=[y],
name='test_cumsum_2d_axis_1')
@staticmethod
def export_cumsum_2d_negative_axis() -> None:
node = onnx.helper.make_node(
'CumSum',
inputs=['x', 'axis'],
outputs=['y'],
)
x = np.array([1., 2., 3., 4., 5., 6.]).astype(np.float64).reshape((2, 3))
axis = np.int32(-1)
y = np.array([1., 3., 6., 4., 9., 15.]).astype(np.float64).reshape((2, 3))
expect(node, inputs=[x, axis], outputs=[y],
name='test_cumsum_2d_negative_axis')
| 32.962264
| 82
| 0.495993
|
950980ee6f78b97a9e8ddadee24e2942a883396d
| 5,004
|
py
|
Python
|
julesDataHandler.py
|
tquaife/julesML
|
5b3c01a7c947c2798cc15e74356eb08918701b80
|
[
"Apache-2.0"
] | 1
|
2020-06-09T18:58:07.000Z
|
2020-06-09T18:58:07.000Z
|
julesDataHandler.py
|
tquaife/julesML
|
5b3c01a7c947c2798cc15e74356eb08918701b80
|
[
"Apache-2.0"
] | null | null | null |
julesDataHandler.py
|
tquaife/julesML
|
5b3c01a7c947c2798cc15e74356eb08918701b80
|
[
"Apache-2.0"
] | 2
|
2021-01-14T18:34:41.000Z
|
2021-01-19T23:18:45.000Z
|
from datetime import datetime
from copy import copy
import numpy as np
class julesData(object):
def __init__(self, file_name):
"""Class to hold jules data and
provide samples for machine learning
"""
self.file_name=file_name
self.read_data()
self.lags=[0,]
def read_data(self):
"""read in the data & dates from the file
"""
self.read_header()
indx=self.header.index("value")
self.data=np.genfromtxt(self.file_name,usecols=(indx,),skip_header=1)
indx=self.header.index("date")
self.dates=np.genfromtxt(self.file_name,usecols=(indx,),skip_header=1,dtype='U')
def read_header(self):
"""read the header line of the file
and get the variable name from the first
line of data
"""
with open(self.file_name) as f:
head=f.readline()
data=f.readline()
self.header=head.split()[1:]
self.var_name=data.split()[0]
def transform_data_to_day_of_year(self):
"""replace the data with the day of
year. Useful for input into ML.
"""
self.var_name="day_of_year"
for (i,date_str) in enumerate(self.dates):
self.data[i]=datetime.strptime(date_str,"%Y-%m-%d").timetuple().tm_yday
def transform_data_rc_running_mean(self,window_size=30):
"""replace the data with a *right* centered running mean
"""
filt=np.ones(window_size)
data_new=copy(self.data)
for i in range(len(self.data)):
data_new[i]=np.mean(self.data[np.max([0,i-window_size+1]):i+1])
self.data=data_new
class sampleBuilder(object):
def __init__(self, samples, target, feature_list):
"""Class to generate and hold training/testing
data sets for use with ML algorithms.
samples :: integer list/array of sample positions
target :: an instance of the julesData class holding target data
feature_list :: a list of julesData class instances holding feature data
"""
self.samples=np.array(samples)
self.target=target
self.feature_list=feature_list
self.is_scaled=False
self.build_data()
def build_data(self):
"""Build the data that will go into the ML algorithm
where X are the features and Y is the target variable.
"""
#target variable:
self.Y=self.target.data[self.samples]
#count features (because some have
#multiple lags):
nfeat=0
for feature in self.feature_list:
nfeat+=len(feature.lags)
self.X=np.zeros([len(self.Y),nfeat])
#build X, the feature matrix:
nfeat=0
for feature in self.feature_list:
for lag in feature.lags:
self.X[:,nfeat]=feature.data[self.samples-lag]
nfeat+=1
def scale(self):
"""Set the target variable and each feature so
that it has mean=0 and var=1. Needed for some
ML techniques.
"""
#don't scale multiple times
if self.is_scaled:
return
#need to store statistics so
#we can unscale in needed
self.Y_mean=self.Y.mean()
self.Y_std=self.Y.std()
self.Y=(self.Y-self.Y_mean)/self.Y_std
self.X_mean=self.X.mean(axis=0)
self.X_std=self.X.std(axis=0)
for i in range(np.shape(self.X)[1]):
self.X[:,i]=(self.X[:,i]-self.X_mean[i])/self.X_std[i]
self.is_scaled=True
def unscale(self):
"""Revert scaled data back to original values
"""
#don't unscale multiple times
if self.is_scaled==False:
return
self.Y=self.Y*self.Y_std+self.Y_mean
for i in range(np.shape(self.X)[1]):
self.X[:,i]=self.X[:,i]*self.X_std[i]+self.X_mean[i]
self.is_scaled=False
if __name__=="__main__":
"""A few tests of the above code.
"""
target=julesData("data/gh_point_smc_avail_top.txt")
feature1=julesData("data/gh_point_t1p5m_gb.txt")
feature1.transform_data_to_day_of_year()
feature2=julesData("data/gh_point_t1p5m_gb.txt")
feature2.lags=[0,1,2]
s=sampleBuilder([3,4,5,6,7,8,15,401],target,[feature1,feature2])
print(s.X)
print(s.X.mean(axis=0))
print(s.X.std(axis=0))
print("-----------------------")
s.scale()
print(s.target.data[:10])
print(s.Y)
print(s.X)
print(s.X.mean(axis=0))
print(s.X.std(axis=0))
print("-----------------------")
s.unscale()
print(s.target.data[:10])
print(s.Y)
print(s.X)
print(s.X.mean(axis=0))
print(s.X.std(axis=0))
| 29.964072
| 88
| 0.559752
|
32d81a0ecbadbbab64d8b6e915da51e5ea7039c8
| 7,725
|
py
|
Python
|
tests/python/pants_test/tasks/test_ivy_utils.py
|
WamBamBoozle/pants
|
98cadfa1a5d337146903eb66548cfe955f2627b3
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pants_test/tasks/test_ivy_utils.py
|
WamBamBoozle/pants
|
98cadfa1a5d337146903eb66548cfe955f2627b3
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pants_test/tasks/test_ivy_utils.py
|
WamBamBoozle/pants
|
98cadfa1a5d337146903eb66548cfe955f2627b3
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
import xml.etree.ElementTree as ET
from textwrap import dedent
from mock import Mock
from pants.backend.core.register import build_file_aliases as register_core
from pants.backend.jvm.ivy_utils import IvyModuleRef, IvyUtils
from pants.backend.jvm.register import build_file_aliases as register_jvm
from pants.backend.jvm.targets.exclude import Exclude
from pants.util.contextutil import temporary_file_path
from pants_test.base_test import BaseTest
class IvyUtilsTestBase(BaseTest):
@property
def alias_groups(self):
return register_core().merge(register_jvm())
class IvyUtilsGenerateIvyTest(IvyUtilsTestBase):
# TODO(John Sirois): increase coverage.
# Some examples:
# + multiple confs - via with_sources and with_docs for example
# + excludes
# + classifiers
# + with_artifact
def setUp(self):
super(IvyUtilsGenerateIvyTest, self).setUp()
self.add_to_build_file('src/java/targets',
dedent("""
jar_library(
name='a',
jars=[
jar('org1', 'name1', 'rev1'),
jar('org2', 'name2', 'rev2', force=True),
],
)
"""))
self.b_org = 'com.example'
self.b_name = 'b'
self.add_to_build_file('src/java/targets',
dedent("""
java_library(
name='b',
dependencies=[':a'],
provides=artifact('{org}', '{name}', repo=Repository()),
sources=['z.java'],
)
""".format(org=self.b_org, name=self.b_name)))
self.add_to_build_file('3rdparty',
dedent("""
jar_library(
name='example-morx',
jars = [
jar(org='commons-lang', name='commons-lang', rev='2.5', classifier='morx'),
]
)
jar_library(
name='example-fleem',
jars = [
jar(org='commons-lang', name='commons-lang', rev='2.5', classifier='fleem'),
]
)
"""))
self.add_to_build_file('src/java/targets',
dedent("""
java_library(
name='c',
dependencies=[
'3rdparty:example-morx',
'3rdparty:example-fleem',
],
sources=['w.java'],
)
""".format(org=self.b_org, name=self.b_name)))
self.a = self.target('src/java/targets:a')
self.b = self.target('src/java/targets:b')
self.c = self.target('src/java/targets:c')
context = self.context()
def test_exclude_exported(self):
_, excludes = IvyUtils.calculate_classpath([self.b])
self.assertEqual(excludes, set([Exclude(org=self.b_org, name=self.b_name)]))
def test_classifiers(self):
jars, _ = IvyUtils.calculate_classpath([self.c])
self.assertEquals(2, len(jars))
jars.sort(key=lambda jar : jar.classifier)
self.assertEquals('fleem', jars[0].classifier)
self.assertEquals('morx', jars[1].classifier)
def test_force_override(self):
jars = list(self.a.payload.jars)
with temporary_file_path() as ivyxml:
IvyUtils.generate_ivy([self.a], jars=jars, excludes=[], ivyxml=ivyxml, confs=['default'])
doc = ET.parse(ivyxml).getroot()
conf = self.find_single(doc, 'configurations/conf')
self.assert_attributes(conf, name='default')
dependencies = list(doc.findall('dependencies/dependency'))
self.assertEqual(2, len(dependencies))
dep1 = dependencies[0]
self.assert_attributes(dep1, org='org1', name='name1', rev='rev1')
conf = self.find_single(dep1, 'conf')
self.assert_attributes(conf, name='default', mapped='default')
dep2 = dependencies[1]
self.assert_attributes(dep2, org='org2', name='name2', rev='rev2', force='true')
conf = self.find_single(dep1, 'conf')
self.assert_attributes(conf, name='default', mapped='default')
override = self.find_single(doc, 'dependencies/override')
self.assert_attributes(override, org='org2', module='name2', rev='rev2')
def test_resove_conflict(self):
v1 = Mock()
v1.force = False
v1.rev ="1"
v1_force = Mock()
v1_force.force = True
v1_force.rev = "1"
v2 = Mock()
v2.force = False
v2.rev = "2"
# If neither version is forced, use the latest version
self.assertIs(v2, IvyUtils._resolve_conflict(v1, v2))
self.assertIs(v2, IvyUtils._resolve_conflict(v2, v1))
# If an earlier version is forced, use the forced version
self.assertIs(v1_force, IvyUtils._resolve_conflict(v1_force, v2))
self.assertIs(v1_force, IvyUtils._resolve_conflict(v2, v1_force))
# If the same version is forced, use the forced version
self.assertIs(v1_force, IvyUtils._resolve_conflict(v1, v1_force))
self.assertIs(v1_force, IvyUtils._resolve_conflict(v1_force, v1))
def test_does_not_visit_diamond_dep_twice(self):
ivy_info = self.parse_ivy_report('tests/python/pants_test/tasks/ivy_utils_resources/report_with_diamond.xml')
ref = IvyModuleRef("toplevel", "toplevelmodule", "latest")
seen = set()
def collector(r):
self.assertNotIn(r, seen)
seen.add(r)
return set([r])
result = ivy_info.traverse_dependency_graph(ref, collector)
self.assertEqual(
{
IvyModuleRef("toplevel", "toplevelmodule", "latest"),
IvyModuleRef(org='org1', name='name1', rev='0.0.1'),
IvyModuleRef(org='org2', name='name2', rev='0.0.1'),
IvyModuleRef(org='org3', name='name3', rev='0.0.1')
},
result)
def test_does_not_follow_cycle(self):
ivy_info = self.parse_ivy_report('tests/python/pants_test/tasks/ivy_utils_resources/report_with_cycle.xml')
ref = IvyModuleRef("toplevel", "toplevelmodule", "latest")
seen = set()
def collector(r):
self.assertNotIn(r, seen)
seen.add(r)
return set([r])
result = ivy_info.traverse_dependency_graph(ref, collector)
self.assertEqual(
{
IvyModuleRef("toplevel", "toplevelmodule", "latest"),
IvyModuleRef(org='org1', name='name1', rev='0.0.1'),
IvyModuleRef(org='org2', name='name2', rev='0.0.1'),
IvyModuleRef(org='org3', name='name3', rev='0.0.1')
},
result)
def test_memo_reused_across_calls(self):
ivy_info = self.parse_ivy_report('tests/python/pants_test/tasks/ivy_utils_resources/report_with_diamond.xml')
ref = IvyModuleRef(org='org1', name='name1', rev='0.0.1')
def collector(r):
return set([r])
memo = dict()
result1 = ivy_info.traverse_dependency_graph(ref, collector, memo=memo)
result2 = ivy_info.traverse_dependency_graph(ref, collector, memo=memo)
self.assertIs(result1, result2)
self.assertEqual(
{
IvyModuleRef(org='org1', name='name1', rev='0.0.1'),
IvyModuleRef(org='org2', name='name2', rev='0.0.1'),
IvyModuleRef(org='org3', name='name3', rev='0.0.1')
},
result1)
def parse_ivy_report(self, path):
ivy_info = IvyUtils._parse_xml_report(path)
self.assertIsNotNone(ivy_info)
return ivy_info
def find_single(self, elem, xpath):
results = list(elem.findall(xpath))
self.assertEqual(1, len(results))
return results[0]
def assert_attributes(self, elem, **kwargs):
self.assertEqual(dict(**kwargs), dict(elem.attrib))
| 33.297414
| 113
| 0.633916
|
25ff02bd25c5e1275b30ea1a58a0b007701b2b75
| 483
|
py
|
Python
|
autogram/commons/models/models.py
|
ohduran/autogram
|
e24c7ff40c44cd0eabf8018e61ad5fe0b422a6a1
|
[
"MIT"
] | null | null | null |
autogram/commons/models/models.py
|
ohduran/autogram
|
e24c7ff40c44cd0eabf8018e61ad5fe0b422a6a1
|
[
"MIT"
] | null | null | null |
autogram/commons/models/models.py
|
ohduran/autogram
|
e24c7ff40c44cd0eabf8018e61ad5fe0b422a6a1
|
[
"MIT"
] | null | null | null |
from django.db import models
class HasUser(models.Model):
user = models.ForeignKey('users.User', on_delete=models.CASCADE)
class Meta:
abstract = True
class NaturalKeyable(models.Model):
class Meta:
abstract = True
def natural_key(self):
natural_key = getattr(self, self._natural_key)
return (natural_key,)
class Usable(models.Model):
times_used = models.IntegerField(default=0)
class Meta:
abstract = True
| 17.25
| 68
| 0.666667
|
40161cd396626fccc0b721d7dbe339b0fb6802db
| 367
|
py
|
Python
|
color_tol/__init__.py
|
lazarillo/color_tol
|
a8a63f4756bc45e0151e480eec36059f93f08ffc
|
[
"MIT"
] | 3
|
2020-08-06T22:19:22.000Z
|
2021-08-04T11:30:29.000Z
|
color_tol/__init__.py
|
lazarillo/color_tol
|
a8a63f4756bc45e0151e480eec36059f93f08ffc
|
[
"MIT"
] | null | null | null |
color_tol/__init__.py
|
lazarillo/color_tol
|
a8a63f4756bc45e0151e480eec36059f93f08ffc
|
[
"MIT"
] | null | null | null |
"""
color_tol is a pure Python package for accessing Paul Tol-style colormaps.
See personal.sron.nl/~pault/colourschemes.pdf for more information.
(colourschemes.pdf is also added under /docs in this package.)
"""
__version__ = '0.1'
__all__ = []
from color_tol.funcs import diverging
from color_tol.funcs import sequential
from color_tol.funcs import qualitative
| 26.214286
| 74
| 0.787466
|
026906c89e299b16b806c1526c20d3272ef1ee42
| 15,959
|
py
|
Python
|
cloudify_agent/shell/commands/daemons.py
|
cloudify-cosmo/cloudify-agent
|
9036842a31a31e0f5a25895dc9097a2f37c3eba9
|
[
"Apache-2.0"
] | 12
|
2016-01-23T00:54:47.000Z
|
2021-11-23T19:04:02.000Z
|
cloudify_agent/shell/commands/daemons.py
|
cloudify-cosmo/cloudify-agent
|
9036842a31a31e0f5a25895dc9097a2f37c3eba9
|
[
"Apache-2.0"
] | 122
|
2015-08-18T19:34:44.000Z
|
2022-02-03T13:16:19.000Z
|
cloudify_agent/shell/commands/daemons.py
|
cloudify-cosmo/cloudify-agent
|
9036842a31a31e0f5a25895dc9097a2f37c3eba9
|
[
"Apache-2.0"
] | 22
|
2015-07-15T14:28:18.000Z
|
2021-05-11T02:18:02.000Z
|
#########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import click
import json
import os
from cloudify_agent.api import defaults
from cloudify_agent.api import utils as api_utils
from cloudify_agent.api.factory import DaemonFactory
from cloudify_agent.shell import env
from cloudify_agent.shell.decorators import handle_failures
from cloudify_agent.shell.commands import cfy
from cloudify.utils import (ENV_CFY_EXEC_TEMPDIR,
ENV_AGENT_LOG_LEVEL,
ENV_AGENT_LOG_MAX_BYTES,
ENV_AGENT_LOG_MAX_HISTORY)
class _ExpandUserPath(click.Path):
"""Like click.Path but also calls os.path.expanduser"""
def convert(self, value, param, ctx):
value = os.path.expanduser(value)
return super(_ExpandUserPath, self).convert(value, param, ctx)
@cfy.command(context_settings=dict(ignore_unknown_options=True))
@click.option('--process-management',
help='The process management system to use '
'when creating the daemon. [env {0}]'
.format(env.CLOUDIFY_DAEMON_PROCESS_MANAGEMENT),
type=click.Choice(['init.d', 'nssm', 'detach', 'systemd']),
required=True,
envvar=env.CLOUDIFY_DAEMON_PROCESS_MANAGEMENT)
@click.option('--rest-host',
help='The IP or host name of the REST service [env {0}]'
.format(env.CLOUDIFY_REST_HOST),
required=True,
envvar=env.CLOUDIFY_REST_HOST,
callback=api_utils._parse_comma_separated)
@click.option('--rest-port',
help='The manager REST port to connect to. [env {0}]'
.format(env.CLOUDIFY_REST_PORT),
envvar=env.CLOUDIFY_REST_PORT)
@click.option('--rest-username',
help='The username to use when sending REST calls. [env {0}]'
.format(env.CLOUDIFY_REST_USERNAME),
envvar=env.CLOUDIFY_REST_USERNAME)
@click.option('--rest-password',
help='The password to use when sending REST calls. [env {0}]'
.format(env.CLOUDIFY_REST_PASSWORD),
envvar=env.CLOUDIFY_REST_PASSWORD)
@click.option('--rest-token',
help='The token to use when sending REST calls. Takes '
'precedence over username/password. [env {0}]'
.format(env.CLOUDIFY_REST_TOKEN),
envvar=env.CLOUDIFY_REST_TOKEN)
@click.option('--rest-tenant',
help='The tenant to use when sending REST calls. [env {0}]'
.format(env.CLOUDIFY_REST_TENANT),
envvar=env.CLOUDIFY_REST_TENANT)
@click.option('--local-rest-cert-file',
help='The path to a local copy of the REST public cert, used for'
' cert verification, if required [env {0}]'
.format(env.CLOUDIFY_LOCAL_REST_CERT_PATH),
type=_ExpandUserPath(exists=True, readable=True, file_okay=True),
envvar=env.CLOUDIFY_LOCAL_REST_CERT_PATH)
@click.option('--name',
help='The name of the daemon. [env {0}]'.format(env.AGENT_NAME),
envvar=env.AGENT_NAME)
@click.option('--queue',
help='The name of the queue to register the daemon to. [env {0}]'
.format(env.CLOUDIFY_DAEMON_QUEUE),
envvar=env.CLOUDIFY_DAEMON_QUEUE)
@click.option('--host',
help='The ip address of the current host. [env {0}]'
.format(env.CLOUDIFY_DAEMON_HOST),
envvar=env.CLOUDIFY_DAEMON_HOST)
@click.option('--deployment-id',
help='The deployment id this daemon will belong to. [env {0}]'
.format(env.CLOUDIFY_DAEMON_DEPLOYMENT_ID),
envvar=env.CLOUDIFY_DAEMON_DEPLOYMENT_ID)
@click.option('--user',
help='The user to create this daemon under. [env {0}]'
.format(env.CLOUDIFY_DAEMON_USER),
envvar=env.CLOUDIFY_DAEMON_USER)
@click.option('--workdir',
help='Working directory for runtime files (pid, log). '
'Defaults to current working directory. [env {0}]'
.format(env.CLOUDIFY_DAEMON_WORKDIR),
type=_ExpandUserPath(file_okay=False),
envvar=env.CLOUDIFY_DAEMON_WORKDIR)
@click.option('--broker-ip',
help='The broker host name or ip to connect to. [env {0}]'
.format(env.CLOUDIFY_BROKER_IP),
envvar=env.CLOUDIFY_BROKER_IP,
callback=api_utils._parse_comma_separated)
@click.option('--broker-vhost',
help='The broker virtual host to connect to. [env {0}]'
.format(env.CLOUDIFY_BROKER_VHOST),
default='/',
envvar=env.CLOUDIFY_BROKER_VHOST)
@click.option('--broker-user',
help='The broker username to use. [env {0}]'
.format(env.CLOUDIFY_BROKER_USER),
default='guest',
envvar=env.CLOUDIFY_BROKER_USER)
@click.option('--broker-pass',
help='The broker password to use. [env {0}]'
.format(env.CLOUDIFY_BROKER_PASS),
default='guest',
envvar=env.CLOUDIFY_BROKER_PASS)
@click.option('--broker-ssl-enabled',
help='Should AMQP SSL be enabled. [env {0}]'
.format(env.CLOUDIFY_BROKER_SSL_ENABLED),
default=False,
type=bool,
envvar=env.CLOUDIFY_BROKER_SSL_ENABLED)
@click.option('--broker-ssl-cert',
help='The path to the SSL cert for the broker to use.'
'Only used when broker-ssl-enable is "true" [env {0}]'
.format(env.CLOUDIFY_BROKER_SSL_CERT),
default=None,
type=_ExpandUserPath(exists=True, readable=True, file_okay=True),
envvar=env.CLOUDIFY_BROKER_SSL_CERT)
@click.option('--broker-ssl-cert-path',
help='The path to a local copy of the Broker public cert, '
'used for cert verification, if required [env {0}]'
.format(env.CLOUDIFY_BROKER_SSL_CERT_PATH),
type=_ExpandUserPath(readable=False, file_okay=True),
envvar=env.CLOUDIFY_BROKER_SSL_CERT_PATH
)
@click.option('--heartbeat',
help='The interval of the AMQP heartbeat in seconds [env {0}]'
.format(env.CLOUDIFY_HEARTBEAT),
type=int,
default=30,
envvar=env.CLOUDIFY_HEARTBEAT
)
@click.option('--min-workers',
help='Minimum number of workers for '
'the autoscale configuration. [env {0}]'
.format(env.CLOUDIFY_DAEMON_MIN_WORKERS),
envvar=env.CLOUDIFY_DAEMON_MIN_WORKERS)
@click.option('--max-workers',
help='Maximum number of workers for '
'the autoscale configuration. [env {0}]'
.format(env.CLOUDIFY_DAEMON_MAX_WORKERS),
envvar=env.CLOUDIFY_DAEMON_MAX_WORKERS)
@click.option('--log-level',
help='Log level of the daemon. [env {0}]'
.format(ENV_AGENT_LOG_LEVEL),
envvar=ENV_AGENT_LOG_LEVEL)
@click.option('--pid-file',
help='Path to a location where the daemon pid file will be '
'stored. [env {0}]'
.format(env.CLOUDIFY_DAEMON_PID_FILE),
type=_ExpandUserPath(),
envvar=env.CLOUDIFY_DAEMON_PID_FILE)
@click.option('--log-dir',
help='Path to a location where the daemon log files will be '
'stored. [env {0}]'
.format(env.CLOUDIFY_DAEMON_LOG_DIR),
type=_ExpandUserPath(file_okay=False),
envvar=env.CLOUDIFY_DAEMON_LOG_DIR)
@click.option('--extra-env-path',
help='Path to an environment file to be added to the daemon. ['
'env {0}]'
.format(env.CLOUDIFY_DAEMON_EXTRA_ENV),
envvar=env.CLOUDIFY_DAEMON_EXTRA_ENV)
@click.option('--bypass-maintenance-mode',
help='bypass maintenance mode on rest requests. [env {0}]'
.format(env.CLOUDIFY_BYPASS_MAINTENANCE_MODE),
envvar=env.CLOUDIFY_BYPASS_MAINTENANCE_MODE)
@click.option('--network',
help='The name of the Cloudify Manager network to use [env {0}]'
.format(env.CLOUDIFY_NETWORK_NAME),
envvar=env.CLOUDIFY_NETWORK_NAME,
is_eager=True)
@click.option('--executable-temp-path',
help='Alternative path for temporary executable files',
type=_ExpandUserPath(file_okay=False),
envvar=ENV_CFY_EXEC_TEMPDIR)
@click.option('--log-max-bytes',
help='Maximum size (in bytes) of a log file before it rolls '
'over',
envvar=ENV_AGENT_LOG_MAX_BYTES)
@click.option('--log-max-history',
help='Maximum number of historical log files to keep',
envvar=ENV_AGENT_LOG_MAX_HISTORY)
# this is defined in order to allow passing any kind of option to the
# command line. in order to support creating daemons of different kind via
# the same command line. this argument is parsed as keyword arguments which
# are later passed to the daemon constructor.
@click.argument('custom-options', nargs=-1, type=click.UNPROCESSED)
@handle_failures
def create(**params):
"""
Creates and stores the daemon parameters.
"""
attributes = dict(**params)
custom_arg = attributes.pop('custom_options', ())
attributes.update(_parse_custom_options(custom_arg))
click.echo('Creating...')
from cloudify_agent.shell.main import get_logger
daemon = DaemonFactory().new(
logger=get_logger(),
**attributes
)
daemon.create()
_save_daemon(daemon)
click.echo('Successfully created daemon: {0}'
.format(daemon.name))
@cfy.command()
@click.option('--name',
help='The name of the daemon. [env {0}]'.format(env.AGENT_NAME),
required=True,
envvar=env.AGENT_NAME)
@click.option('--user',
help='The user to load the configuration from. Defaults to '
'current user. [env {0}]'
.format(env.CLOUDIFY_DAEMON_USER),
envvar=env.CLOUDIFY_DAEMON_USER)
@handle_failures
def configure(name, user=None):
"""
Configures the daemon scripts and configuration files.
"""
click.echo('Configuring...')
daemon = _load_daemon(name, user=user)
daemon.configure()
_save_daemon(daemon)
click.echo('Successfully configured daemon: {0}'
.format(daemon.name))
@cfy.command()
@click.option('--name',
help='The name of the daemon. [env {0}]'.format(env.AGENT_NAME),
required=True,
envvar=env.AGENT_NAME)
@click.option('--user',
help='The user to load the configuration from. Defaults to '
'current user. [env {0}]'
.format(env.CLOUDIFY_DAEMON_USER),
envvar=env.CLOUDIFY_DAEMON_USER)
@click.option('--interval',
help='The interval in seconds to sleep when waiting '
'for the daemon to be ready.',
default=defaults.START_INTERVAL)
@click.option('--timeout',
help='The timeout in seconds to wait '
'for the daemon to be ready.',
default=defaults.START_TIMEOUT)
@click.option('--no-delete-amqp-queue',
help='Option to prevent deletion of a pre-existing '
'queue that this daemon is listening to before the agent.',
is_flag=True,
default=not defaults.DELETE_AMQP_QUEUE_BEFORE_START)
@handle_failures
def start(name, interval, timeout, no_delete_amqp_queue, user=None):
"""
Starts the daemon.
"""
click.echo('Starting...')
daemon = _load_daemon(name, user=user)
daemon.start(
interval=interval,
timeout=timeout,
delete_amqp_queue=not no_delete_amqp_queue
)
click.echo('Successfully started daemon: {0}'.format(name))
@cfy.command()
@click.option('--name',
help='The name of the daemon. [env {0}]'.format(env.AGENT_NAME),
required=True,
envvar=env.AGENT_NAME)
@click.option('--interval',
help='The interval in seconds to sleep when waiting '
'for the daemon to stop.',
default=defaults.STOP_INTERVAL)
@click.option('--timeout',
help='The timeout in seconds to wait '
'for the daemon to stop.',
default=defaults.STOP_TIMEOUT)
@handle_failures
def stop(name, interval, timeout):
"""
Stops the daemon.
"""
click.echo('Stopping...')
daemon = _load_daemon(name)
daemon.stop(interval=interval, timeout=timeout)
click.secho('Successfully stopped daemon: {0}'.format(name))
@cfy.command()
@click.option('--name',
help='The name of the daemon. [env {0}]'.format(env.AGENT_NAME),
required=True,
envvar=env.AGENT_NAME)
@handle_failures
def restart(name):
"""
Restarts the daemon.
"""
click.echo('Restarting...')
daemon = _load_daemon(name)
daemon.restart()
click.echo('Successfully restarted daemon: {0}'.format(name))
@cfy.command()
@click.option('--name',
help='The name of the daemon. [env {0}]'.format(env.AGENT_NAME),
required=True,
envvar=env.AGENT_NAME)
@handle_failures
def delete(name):
"""
Deletes the daemon.
"""
click.echo('Deleting...')
daemon = _load_daemon(name)
daemon.delete()
DaemonFactory().delete(name)
click.echo('Successfully deleted daemon: {0}'.format(name))
@cfy.command()
@click.option('--name',
help='The name of the daemon. [env {0}]'.format(env.AGENT_NAME),
required=True,
envvar=env.AGENT_NAME)
@handle_failures
def inspect(name):
"""
Inspect daemon properties.
"""
daemon = _load_daemon(name)
click.echo(json.dumps(api_utils.internal.daemon_to_dict(daemon), indent=2))
@cfy.command('list')
@handle_failures
def ls():
"""
List all existing daemons.
"""
from cloudify_agent.shell.main import get_logger
daemons = DaemonFactory().load_all(logger=get_logger())
for daemon in daemons:
click.echo(daemon.name)
@cfy.command()
@click.option('--name',
help='The name of the daemon. [env {0}]'.format(env.AGENT_NAME),
required=True,
envvar=env.AGENT_NAME)
@handle_failures
def status(name):
_load_daemon(name).status()
def _load_daemon(name, user=None):
from cloudify_agent.shell.main import get_logger
return DaemonFactory(username=user).load(name, logger=get_logger())
def _save_daemon(daemon):
DaemonFactory(username=daemon.user).save(daemon)
def _parse_custom_options(options):
parsed = {}
for option_string in options:
parts = option_string.split('=')
key = parts[0][2:].replace('-', '_') # options start with '--'
if len(parts) == 1:
# flag given
value = True
else:
value = parts[1]
parsed[key] = value
return parsed
| 37.200466
| 79
| 0.612131
|
051cb68d86a9353a0a28d7d80f94f12cde503ae3
| 3,623
|
py
|
Python
|
ProcessMaker_PMIO/models/result_success.py
|
ProcessMaker/pmio-sdk-python
|
49ddf9e6444c77a35ce51aa052059b254e0f5299
|
[
"Apache-2.0"
] | 2
|
2017-11-10T05:10:44.000Z
|
2020-05-14T14:20:01.000Z
|
ProcessMaker_PMIO/models/result_success.py
|
ProcessMaker/pmio-sdk-python
|
49ddf9e6444c77a35ce51aa052059b254e0f5299
|
[
"Apache-2.0"
] | null | null | null |
ProcessMaker_PMIO/models/result_success.py
|
ProcessMaker/pmio-sdk-python
|
49ddf9e6444c77a35ce51aa052059b254e0f5299
|
[
"Apache-2.0"
] | 4
|
2017-07-01T22:04:18.000Z
|
2020-05-14T14:33:41.000Z
|
# coding: utf-8
"""
ProcessMaker API
This ProcessMaker I/O API provides access to a BPMN 2.0 compliant workflow engine api that is designed to be used as a microservice to support enterprise cloud applications. The current Alpha 1.0 version supports most of the descriptive class of the BPMN 2.0 specification.
OpenAPI spec version: 1.0.0
Contact: support@processmaker.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
import re
class ResultSuccess(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, meta=None):
"""
ResultSuccess - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'meta': 'ResultSuccessMeta'
}
self.attribute_map = {
'meta': 'meta'
}
self._meta = meta
@property
def meta(self):
"""
Gets the meta of this ResultSuccess.
:return: The meta of this ResultSuccess.
:rtype: ResultSuccessMeta
"""
return self._meta
@meta.setter
def meta(self, meta):
"""
Sets the meta of this ResultSuccess.
:param meta: The meta of this ResultSuccess.
:type: ResultSuccessMeta
"""
self._meta = meta
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 28.527559
| 278
| 0.577698
|
ae9041f31946a19ee5fb03b77eb88e5d7002b3e5
| 3,436
|
py
|
Python
|
scripts/test_one.py
|
sealionkat/sn-built-up-area
|
44ec9da0bcf314f22f7b0ee27bf8c8c39f97ab73
|
[
"MIT"
] | null | null | null |
scripts/test_one.py
|
sealionkat/sn-built-up-area
|
44ec9da0bcf314f22f7b0ee27bf8c8c39f97ab73
|
[
"MIT"
] | null | null | null |
scripts/test_one.py
|
sealionkat/sn-built-up-area
|
44ec9da0bcf314f22f7b0ee27bf8c8c39f97ab73
|
[
"MIT"
] | null | null | null |
from sys import argv, exit
from PIL import Image
import sys
sys.path.insert(0, '../caffe/python/')
import caffe
PREFIX_DIR = ''
IN_DIR = PREFIX_DIR + 'maps/'
OUT_DIR = PREFIX_DIR + 'data/'
TESTS_DIR = PREFIX_DIR + 'tests/'
TRAIN_DB_FILENAME = OUT_DIR + 'train.txt'
MODEL_FILE = PREFIX_DIR + 'snapshots/_iter_100000.caffemodel'
INPUT_IMAGE_SIZE = 820
WINDOW_SIZE = 20
WINDOWS_COUNT = 2
def set_window_color(pixels, i, j, val):
for x in range(WINDOW_SIZE):
for y in range(WINDOW_SIZE):
pixels[j * WINDOW_SIZE + x, i * WINDOW_SIZE + y] = (val * 255, val * 255, val * 255)
def set_window_from_img(image, i, j, val):
pos = WINDOW_SIZE * WINDOWS_COUNT
posn = pos + WINDOW_SIZE
part = val.crop((pos, pos, posn, posn))
image.paste(part, (j * WINDOW_SIZE, i * WINDOW_SIZE, (j + 1) * WINDOW_SIZE, (i + 1) * WINDOW_SIZE))
def main():
if len(argv) != 3:
print('Usage: test_one.sh <Model path> <MapID>')
exit(1)
MODEL_FILE = PREFIX_DIR + argv[1]
id = argv[2]
with open(TRAIN_DB_FILENAME, 'r') as f:
data = [
(line.split()[0].strip(), line.split()[1].strip())
for line in f.readlines()
if len(line) > 3 and str.format('{0}{1:0>4}__', OUT_DIR, id) in line.strip()
]
parts = []
for part in data:
nums = part[0].split('__')[1].split('.')[0].split('_')
parts.append((id, int(nums[0]), int(nums[1]), part[1], part[0]))
rows = {}
for part in parts:
try:
r = rows[part[1]]
except KeyError:
r = []
rows[part[1]] = r
r.append(part)
caffe.set_mode_cpu()
net = caffe.Net('model/test.prototxt', MODEL_FILE, caffe.TEST)
transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape})
transformer.set_transpose('data', (2,0,1))
net.blobs['data'].reshape(37, 3, 100, 100) # TODO
n = 0
ok = 0
size = INPUT_IMAGE_SIZE - 2 * WINDOW_SIZE * WINDOWS_COUNT
image_photo = Image.new('RGB', (size, size), 'red')
image_original = Image.new('RGB', (size, size), 'red')
image_net = Image.new('RGB', (size, size), 'red')
pixels_photo = image_photo.load()
pixels_original = image_original.load()
pixels_net = image_net.load()
for i in rows:
row = rows[i]
print("Processing row " + str(i))
for j in range(len(row)):
img = caffe.io.load_image(row[j][4], color=True)
net.blobs['data'].data[j] = transformer.preprocess('data', img)
out = net.forward()
for j in range(len(row)):
if out['prob'][j].argmax() == int(row[j][3]):
ok += 1
set_window_color(pixels_original, i, j, int(row[j][3]))
set_window_color(pixels_net, i, j, out['prob'][j].argmax())
set_window_from_img(image_photo, i, j, Image.open(row[j][4]))
n += 1
print(str.format('Accuracy: {0}% ({1} / {2})', (ok * 100.0) / n , ok, n))
image_original.save(str.format('{0}{1:0>4}_orig.png', TESTS_DIR, id))
image_net.save(str.format('{0}{1:0>4}_net.png', TESTS_DIR, id))
image_photo.save(str.format('{0}{1:0>4}_map.png', TESTS_DIR, id))
if __name__ == '__main__':
main()
| 29.118644
| 103
| 0.548312
|
786a09900fcdedcbca480719366d7095c5f3a8ed
| 256
|
py
|
Python
|
Python/TDD/test_even.py
|
JosephAMumford/CodingDojo
|
505be74d18d7a8f41c4b3576ca050b97f840f0a3
|
[
"MIT"
] | 2
|
2018-08-18T15:14:45.000Z
|
2019-10-16T16:14:13.000Z
|
Python/TDD/test_even.py
|
JosephAMumford/CodingDojo
|
505be74d18d7a8f41c4b3576ca050b97f840f0a3
|
[
"MIT"
] | null | null | null |
Python/TDD/test_even.py
|
JosephAMumford/CodingDojo
|
505be74d18d7a8f41c4b3576ca050b97f840f0a3
|
[
"MIT"
] | 6
|
2018-05-05T18:13:05.000Z
|
2021-05-20T11:32:48.000Z
|
import unittest
def isEven(n):
return n % 2 == 0
class IsEvenTests(unittest.TestCase):
def testTwo(self):
self.failUnless(isEven(2))
def testThree(self):
self.failIf(isEven(3))
if __name__ == '__main__':
unittest.main()
| 17.066667
| 37
| 0.636719
|
7c71df3cbca1eb38ba9bd581284b996435e8707c
| 197
|
py
|
Python
|
django_libs/utils_email.py
|
Reston/django-libs
|
8c44a0851e3be564a100df50d257c1ce5b30dc25
|
[
"MIT"
] | 30
|
2016-11-24T21:37:36.000Z
|
2021-09-18T11:29:44.000Z
|
django_libs/utils_email.py
|
Reston/django-libs
|
8c44a0851e3be564a100df50d257c1ce5b30dc25
|
[
"MIT"
] | 8
|
2018-12-08T16:58:21.000Z
|
2021-01-05T14:44:50.000Z
|
django_libs/utils_email.py
|
Reston/django-libs
|
8c44a0851e3be564a100df50d257c1ce5b30dc25
|
[
"MIT"
] | 15
|
2018-04-25T07:07:38.000Z
|
2021-08-02T13:17:32.000Z
|
"""Kept for backwards compatibility."""
import warnings
from .utils.email import * # NOQA
warnings.warn('Please import from django_libs.utils.email instead.',
DeprecationWarning)
| 21.888889
| 68
| 0.715736
|
c35f7fedaa9316de57d13fd24e27eb1b2a7b4cae
| 2,260
|
py
|
Python
|
ad_api/api/sb/snapshots.py
|
matt-behrens-1/python-amazon-ad-api
|
257a0ff2366fed04b12f3491e689e3a52994b1ca
|
[
"MIT"
] | null | null | null |
ad_api/api/sb/snapshots.py
|
matt-behrens-1/python-amazon-ad-api
|
257a0ff2366fed04b12f3491e689e3a52994b1ca
|
[
"MIT"
] | null | null | null |
ad_api/api/sb/snapshots.py
|
matt-behrens-1/python-amazon-ad-api
|
257a0ff2366fed04b12f3491e689e3a52994b1ca
|
[
"MIT"
] | 1
|
2022-02-03T18:46:27.000Z
|
2022-02-03T18:46:27.000Z
|
from ad_api.base import Client, sp_endpoint, fill_query_params, ApiResponse
class Snapshots(Client):
"""
Use the Amazon Advertising API for Sponsored Products for campaign, ad group, keyword, negative keyword, and product ad management operations. For more information about Sponsored Products, see the Sponsored Products Support Center. For onboarding information, see the account setup topic.
"""
@sp_endpoint('/v2/hsa/{}/snapshot', method='POST')
def post_snapshot(self, recordType, **kwargs) -> ApiResponse:
"""
Returns:
ApiResponse
"""
return self._request(fill_query_params(kwargs.pop('path'), recordType), data=kwargs.pop('body'), params=kwargs)
@sp_endpoint('/v2/hsa/snapshots/{}', method='GET')
def get_snapshot(self, snapshotId, **kwargs) -> ApiResponse:
r"""
Gets the status of a requested snapshot.
Returns:
ApiResponse
"""
return self._request(fill_query_params(kwargs.pop('path'), snapshotId), params=kwargs)
def download_snapshot(self, **kwargs) -> ApiResponse:
r"""
Downloads the snapshot previously get report specified by location (this is not part of the official Amazon Advertising API, is a helper method to download the snapshot). Take in mind that a direct download of location returned in get_snapshot will return 401 - Unauthorized.
kwarg parameter **file** if not provided will take the default amazon name from path download (add a path with slash / if you want a specific folder, do not add extension as the return will provide the right extension based on format choosed if needed)
kwarg parameter **format** if not provided a format will return a url to download the snapshot (this url has a expiration time)
Keyword Args
| **url** (string): The location obatined from get_snapshot [required]
| **file** (string): The path to save the file if mode is download json, zip or gzip. [optional]
| **format** (string): The mode to download the snapshot: data (list), raw, url, json, zip, gzip. Default (url) [optional]
Returns:
ApiResponse
"""
return self._download(self, params=kwargs)
| 52.55814
| 293
| 0.684956
|
28629d69c5787873f5f59ec5162f2115ac08fbaf
| 135
|
py
|
Python
|
codegen.py
|
macrat/PyIMDB
|
28c8f6f4aa2b8bb875ce42205ecb0ed70970d4f5
|
[
"MIT"
] | 1
|
2021-09-10T01:24:31.000Z
|
2021-09-10T01:24:31.000Z
|
codegen.py
|
macrat/PyIMDB
|
28c8f6f4aa2b8bb875ce42205ecb0ed70970d4f5
|
[
"MIT"
] | null | null | null |
codegen.py
|
macrat/PyIMDB
|
28c8f6f4aa2b8bb875ce42205ecb0ed70970d4f5
|
[
"MIT"
] | 1
|
2021-09-10T01:24:32.000Z
|
2021-09-10T01:24:32.000Z
|
from grpc.tools import protoc
protoc.main({
'',
'-I.',
'--python_out=.',
'--grpc_python_out=.',
'./msg.proto',
})
| 13.5
| 29
| 0.511111
|
2f4d0d6e0679f1eb10e5d7bcaed2abde67ee9e39
| 1,237
|
py
|
Python
|
sea_agents.py
|
newc4/aiTest
|
a6ab4450cc992d0a68a7a3673b63b52ebb020ff1
|
[
"MIT"
] | null | null | null |
sea_agents.py
|
newc4/aiTest
|
a6ab4450cc992d0a68a7a3673b63b52ebb020ff1
|
[
"MIT"
] | null | null | null |
sea_agents.py
|
newc4/aiTest
|
a6ab4450cc992d0a68a7a3673b63b52ebb020ff1
|
[
"MIT"
] | null | null | null |
from env import SeaWarEnv
import numpy as np
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--camp', type=int, default=1, help='None')
args = parser.parse_args()
if args.camp == 1:
env = SeaWarEnv(camp=1)
elif args.camp == 2:
env = SeaWarEnv(camp=2)
else:
raise NotImplementedError
env_info = env.get_env_info()
n_actions = env_info["n_actions"]
n_agents = env_info["n_agents"]
n_episodes = 1
for e in range(n_episodes):
env.reset()
terminated = False
while not terminated:
terminated = env.wait_attack_interval()
if terminated is True:
break
else:
actions = []
# 每次作战筹划时节前,获得每个agent当前环境下可执行的动作列表(目前包括静止、向各个方向移动以及攻击,后续可继续添加动作),并在列表中随机选择一个动作执行
for agent_id in range(n_agents):
avail_actions = env.get_avail_agent_actions(agent_id)
avail_actions_ind = np.nonzero(avail_actions)[0]
action = np.random.choice(avail_actions_ind)
actions.append(action)
env.step(actions)
if __name__ == "__main__":
main()
| 27.488889
| 96
| 0.587712
|
633382653f1688ad4275653226bda6f1ab3eecfd
| 390
|
py
|
Python
|
app/api/service/transform.py
|
tanimutomo/app-on-gpu
|
7a4ca5b962f249be1f0932bd96b2980c2ec9537c
|
[
"MIT"
] | 1
|
2020-09-04T02:38:43.000Z
|
2020-09-04T02:38:43.000Z
|
app/api/service/transform.py
|
tanimutomo/app-on-gpu
|
7a4ca5b962f249be1f0932bd96b2980c2ec9537c
|
[
"MIT"
] | null | null | null |
app/api/service/transform.py
|
tanimutomo/app-on-gpu
|
7a4ca5b962f249be1f0932bd96b2980c2ec9537c
|
[
"MIT"
] | null | null | null |
import torch
from torchvision import transforms
IMAGENET_MEAN = (0.485, 0.456, 0.406)
IMAGENET_STD = (0.229, 0.224, 0.225)
def get_transform():
return transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize(
mean=IMAGENET_MEAN,
std=IMAGENET_STD,
),
])
| 19.5
| 37
| 0.617949
|
ce0da36f21550039552712d2fceb5097bf828bb9
| 14,818
|
py
|
Python
|
endaq/ide/info.py
|
MideTechnology/endaq-python
|
a878efdd65f718c1324d92d467b19fd3b4142cd0
|
[
"MIT"
] | 5
|
2021-12-02T04:41:52.000Z
|
2022-02-01T19:44:41.000Z
|
endaq/ide/info.py
|
MideTechnology/endaq-python
|
a878efdd65f718c1324d92d467b19fd3b4142cd0
|
[
"MIT"
] | 136
|
2021-09-28T17:45:20.000Z
|
2022-03-30T11:35:15.000Z
|
endaq/ide/info.py
|
MideTechnology/endaq-python
|
a878efdd65f718c1324d92d467b19fd3b4142cd0
|
[
"MIT"
] | 2
|
2021-11-08T19:22:17.000Z
|
2021-12-15T20:25:04.000Z
|
"""
Functions for retrieving summary data from a dataset.
"""
from __future__ import annotations
import typing
from collections import defaultdict
import datetime
import warnings
import numpy as np
import pandas as pd
import pandas.io.formats.style
import idelib.dataset
from .measurement import MeasurementType, ANY, get_channels
from .files import get_doc
from .util import parse_time
__all__ = [
"get_channel_table",
"to_pandas",
"get_primary_sensor_data",
]
# ============================================================================
# Display formatting functions
# ============================================================================
def format_channel_id(ch: idelib.dataset.Channel) -> str:
""" Function for formatting an `idelib.dataset.Channel` or `SubChannel`
for display. Renders as only the channel and subchannel IDs (the other
information is shown in the rest of the table).
:param ch: The `idelib.dataset.Channel` or `idelib.dataset.SubChannel`
to format.
:return: A formatted "channel.subchannel" string.
"""
try:
if ch.parent:
return f"{ch.parent.id}.{ch.id}"
else:
return f"{ch.id}.*"
except (AttributeError, TypeError, ValueError) as err:
warnings.warn(f"format_channel_id({ch!r}) raised {type(err).__name__}: {err}")
return str(ch)
def format_timedelta(val: typing.Union[int, float, datetime.datetime, datetime.timedelta]) -> str:
""" Function for formatting microsecond timestamps (e.g., start, end,
or duration) as times. Somewhat more condensed than the standard
`DataFrame` formatting of `datetime.timedelta`.
:param val: The `pandas.Timedelta` or `datetime.timedelta` to format.
Will also work with microseconds as `float` or `int`.
:return: A formatted time 'duration' string.
"""
try:
if isinstance(val, datetime.timedelta):
td = pd.Timedelta(val)
else:
td = pd.Timedelta(microseconds=val)
# NOTE: `components` attr only exists in pandas `Timedelta`
c = td.components
s = f"{c.minutes:02d}:{c.seconds:02d}.{c.milliseconds:04d}"
if c.hours or c.days:
s = f"{c.hours:02d}:{s}"
if c.days:
s = f"{c.days}d {s}"
return s
except (AttributeError, TypeError, ValueError) as err:
warnings.warn(f"format_timedelta({val!r}) raised {type(err).__name__}: {err}")
return str(val)
def format_timestamp(ts: typing.Union[int, float]) -> str:
""" Function for formatting start/end timestamps. Somewhat more condensed
than the standard Pandas formatting.
:param ts: The timestamps in microseconds. Rendered as integers, since
`idelib` timestamps have whole microsecond resolution.
:return: A formatted timestamp string, with units.
"""
try:
return f"{int(ts)} µs"
except (TypeError, ValueError) as err:
warnings.warn(f"format_timestamp({ts!r}) raised {type(err).__name__}: {err}")
return str(ts)
# ============================================================================
#
# ============================================================================
""" The default table formatting. """
TABLE_FORMAT = {
'channel': format_channel_id,
'start': format_timedelta,
'end': format_timedelta,
'duration': format_timedelta,
'rate': "{:.2f} Hz",
}
def get_channel_table(dataset: typing.Union[idelib.dataset.Dataset, list],
measurement_type=ANY,
start: typing.Union[int, float, str, datetime.datetime, datetime.timedelta] = 0,
end: typing.Optional[int, float, str, datetime.datetime, datetime.timedelta] = None,
formatting: typing.Optional[dict] = None,
index: bool = True,
precision: int = 4,
timestamps: bool = False,
**kwargs) -> typing.Union[pd.DataFrame, pd.io.formats.style.Styler]:
""" Get summary data for all `SubChannel` objects in a `Dataset` that
contain one or more type of sensor data. By using the optional
`start` and `end` parameters, information can be retrieved for a
specific interval of time.
The `start` and `end` times, if used, may be specified in several
ways:
* `int`/`float` (Microseconds from the recording start)
* `str` (formatted as a time from the recording start, e.g., `MM:SS`,
`HH:MM:SS`, `DDd HH:MM:SS`). More examples:
* ``":01"`` or ``":1"`` or ``"1s"`` (1 second)
* ``"22:11"`` (22 minutes, 11 seconds)
* ``"3:22:11"`` (3 hours, 22 minutes, 11 seconds)
* ``"1d 3:22:11"`` (1 day, 3 hours, 22 minutes, 11 seconds)
* `datetime.timedelta` or `pandas.Timedelta` (time from the
recording start)
* `datetime.datetime` (an explicit UTC time)
:param dataset: A `idelib.dataset.Dataset` or a list of
channels/subchannels from which to build the table.
:param measurement_type: A :py:class:`~endaq.ide.MeasurementType`, a
measurement type 'key' string, or a string of multiple keys
generated by adding and/or subtracting
:py:class:`~endaq.ide.MeasurementType` objects to filter the
results. Any 'subtracted' types will be excluded.
:param start: The starting time. Defaults to the start of the
recording.
:param end: The ending time. Defaults to the end of the recording.
:param formatting: A dictionary of additional style/formatting items
(see `pandas.DataFrame.style.format()`). If `False`, no additional
formatting is applied.
:param index: If `True`, show the index column on the left.
:param precision: The default decimal precision to display. Can be
changed later.
:param timestamps: If `True`, show the start and end as raw
microsecond timestamps.
:returns: A table (`pandas.io.formats.style.Styler`) of summary data.
:rtype: pandas.DataFrame
"""
# We don't support multiple sessions on current Slam Stick/enDAQ recorders,
# but in the event we ever do, this allows one to be specified like so:
# :param session: A `Session` or session ID to retrieve from a
# multi-session recording.
# Leave out of docstring until we ever support it.
session = kwargs.get('session', None)
if session:
session = getattr(session, 'sessionId', session)
if hasattr(dataset, 'getPlots'):
sources = get_channels(dataset, measurement_type)
else:
sources = dataset
result = defaultdict(list)
for source in sources:
range_start = range_end = duration = rate = session_start = None
samples = 0
data = source.getSession(session)
if data.session.utcStartTime:
session_start = datetime.datetime.utcfromtimestamp(data.session.utcStartTime)
start = parse_time(start, session_start)
end = parse_time(end, session_start)
if len(data):
if not start and not end:
start_idx, end_idx = 0, -1
samples = len(data)
else:
start_idx, end_idx = data.getRangeIndices(start, end)
end_idx = min(len(data) - 1, end_idx)
if end_idx < 0:
samples = len(data) - start_idx - 1
else:
samples = end_idx - start_idx
range_start = data[int(start_idx)][0]
range_end = data[int(end_idx)][0]
duration = range_end - range_start
rate = samples / (duration / 10 ** 6)
result['channel'].append(source)
result['name'].append(source.name)
result['type'].append(source.units[0])
result['units'].append(source.units[1])
result['start'].append(range_start)
result['end'].append(range_end)
result['duration'].append(duration)
result['samples'].append(samples)
result['rate'].append(rate)
# # TODO: RESTORE AFTER FIX IN idelib
# dmin, dmean, dmax = data.getRangeMinMeanMax(start, end)
# result['min'].append(dmin)
# result['mean'].append(dmean)
# result['max'].append(dmax)
if formatting is False:
return pd.DataFrame(result).style
style = TABLE_FORMAT.copy()
if timestamps:
style.update({
'start': format_timestamp,
'end': format_timestamp
})
if isinstance(formatting, dict):
style.update(formatting)
styled = pd.DataFrame(result).style.format(style, precision=precision)
if not index:
return styled.hide_index()
else:
return styled
# ============================================================================
#
# ============================================================================
def to_pandas(
channel: typing.Union[idelib.dataset.Channel, idelib.dataset.SubChannel],
time_mode: typing.Literal["seconds", "timedelta", "datetime"] = "datetime",
) -> pd.DataFrame:
""" Read IDE data into a pandas DataFrame.
:param channel: a `Channel` object, as produced from `Dataset.channels`
or :py:func:`endaq.ide.get_channels`
:param time_mode: how to temporally index samples; each mode uses either
relative times (with respect to the start of the recording) or
absolute times (i.e., date-times):
* `"seconds"` - a `pandas.Float64Index` of relative timestamps, in seconds
* `"timedelta"` - a `pandas.TimeDeltaIndex` of relative timestamps
* `"datetime"` - a `pandas.DateTimeIndex` of absolute timestamps
:return: a `pandas.DataFrame` containing the channel's data
"""
data = channel.getSession().arraySlice()
t, data = data[0], data[1:].T
t = (1e3*t).astype("timedelta64[ns]")
if time_mode == "seconds":
t = t / np.timedelta64(1, "s")
elif time_mode == "datetime":
t = t + np.datetime64(channel.dataset.lastUtcTime, "s")
elif time_mode != "timedelta":
raise ValueError(f'invalid time mode "{time_mode}"')
if hasattr(channel, "subchannels"):
columns = [sch.name for sch in channel.subchannels]
else:
columns = [channel.name]
return pd.DataFrame(data, index=pd.Series(t, name="timestamp"), columns=columns)
# ============================================================================
#
# ============================================================================
def get_primary_sensor_data(
name: str = "",
doc: idelib.dataset.Dataset = None,
measurement_type: typing.Union[str, MeasurementType] = ANY,
criteria: typing.Literal["samples", "rate", "duration"] = "samples",
time_mode: typing.Literal["seconds", "timedelta", "datetime"] = "datetime",
least: bool = False,
force_data_return: bool = False
) -> pd.DataFrame:
""" Get the data from the primary sensor in a given .ide file using :py:func:`~endaq.ide.to_pandas()`
:param name: The file location to pull the data from, see :py:func:`~endaq.ide.get_doc()`
for more. This can be a local file location or a URL.
:param doc: An open `Dataset` object, see :py:func:`~endaq.ide.get_doc()`
for more. If one is provided it will not attempt to use `name` to
load a new one.
:param measurement_type: The sensor type to return data from, see :py:mod:`~endaq.ide.measurement`
for more. The default is `"any"`, but to get the primary accelerometer
for example, set this to `"accel"`.
:param criteria: How to determine the "primary" sensor using the summary
information provided by :py:func:`~endaq.ide.get_channel_table()`:
* `"sample"` - the number of samples, default behavior
* `"rate"` - the sampling rate in Hz
* `"duration"` - the duration from start to the end of data from that sensor
:param time_mode: how to temporally index samples; each mode uses either
relative times (with respect to the start of the recording) or
absolute times (i.e., date-times):
* `"seconds"` - a `pandas.Float64Index` of relative timestamps, in seconds
* `"timedelta"` - a `pandas.TimeDeltaIndex` of relative timestamps
* `"datetime"` - a `pandas.DateTimeIndex` of absolute timestamps
:param least: If set to `True` it will return the channels ranked lowest by
the given criteria.
:param force_data_return: If set to `True` and the specified `measurement_type`
is not included in the file, it will return the data from any sensor
instead of raising an error which is the default behavior.
:return: a `pandas.DataFrame` containing the sensor's data
Here are some examples:
.. code:: python3
#Get sensor with the most samples, may return data of mixed units
data = get_primary_sensor_data('https://info.endaq.com/hubfs/data/All-Channels.ide')
#Instead get just the primary accelerometer data defined by number of samples
accel = get_primary_sensor_data('https://info.endaq.com/hubfs/data/All-Channels.ide', measurement_type='accel')
"""
#Get the doc object if it isn't provided
if doc is None:
doc = get_doc(name)
#Get Channels of the Measurement Type
channels = get_channel_table(doc, measurement_type).data
#Raise error if measurement type isn't in the file
if len(channels) == 0:
error_str = f'measurement type "{measurement_type!r}" is not included in this file'
if force_data_return:
warnings.warn(error_str)
channels = get_channel_table(doc, "any").data
else:
raise ValueError(error_str)
#Filter based on criteria
criteria = str(criteria).lower()
if (criteria in ["samples", "rate", "duration"]):
if least:
channels = channels[channels[criteria] == channels[criteria].min()]
else:
channels = channels[channels[criteria] == channels[criteria].max()]
else:
raise ValueError(f'invalid criteria "{criteria!r}"')
#Get parent channel
parent = channels.iloc[0].channel.parent
#Get parent channel data
data = to_pandas(parent, time_mode=time_mode)
#Return only the subchannels with right units
return data[channels.name]
| 40.376022
| 123
| 0.597112
|
c296255648a97e7a9fbf38aa4342cdeb1bd66de1
| 2,498
|
py
|
Python
|
contrail-opserver/setup.py
|
toha10/contrail-analytics
|
f5c84afd13ecb10062f960d02b2c4f10a5c3f11b
|
[
"Apache-2.0"
] | null | null | null |
contrail-opserver/setup.py
|
toha10/contrail-analytics
|
f5c84afd13ecb10062f960d02b2c4f10a5c3f11b
|
[
"Apache-2.0"
] | 2
|
2018-12-04T02:20:52.000Z
|
2018-12-22T06:16:30.000Z
|
contrail-opserver/setup.py
|
toha10/contrail-analytics
|
f5c84afd13ecb10062f960d02b2c4f10a5c3f11b
|
[
"Apache-2.0"
] | 1
|
2018-12-04T02:07:47.000Z
|
2018-12-04T02:07:47.000Z
|
#
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import os
import re
from setuptools import setup, find_packages, Command
import distutils
class RunTestsCommand(Command):
description = "Test command to run testr in virtualenv"
user_options = [
('coverage', 'c', "Generate code coverage report"),
('testrun=', None, "Run a specific test"),
]
boolean_options = ['coverage']
def initialize_options(self):
self.cwd = None
self.coverage = False
self.testrun = None
def finalize_options(self):
self.cwd = os.getcwd()
if self.testrun:
self.announce('Running test: %s' % str(self.testrun),
level=distutils.log.INFO)
def run(self):
logfname = 'test.log'
args = '-V'
if self.coverage:
logfname = 'coveragetest.log'
args += ' -c'
if self.testrun:
logfname = self.testrun + '.log'
args = self.testrun
os.system('./run_tests.sh %s' % args)
with open(logfname) as f:
if not re.search('\nOK', ''.join(f.readlines())):
os._exit(1)
def requirements(filename):
with open(filename) as f:
lines = f.read().splitlines()
c = re.compile(r'\s*#.*')
return filter(bool, map(lambda y: c.sub('', y).strip(), lines))
setup(
name='opserver',
version='0.1dev',
packages=find_packages(exclude=["node_mgr", "node_mgr.*", "test.*",
"build.*", "plugins.*"]),
package_data={'': ['*.html', '*.css', '*.xml']},
zip_safe=False,
include_package_data=True,
long_description="VNC Analytics API Implementation",
install_requires=requirements('requirements.txt'),
entry_points = {
# Please update sandesh/common/vns.sandesh on process name change
'console_scripts' : [
'contrail-analytics-api = opserver.opserver:main',
'contrail-alarm-gen = opserver.alarmgen:main',
'contrail-logs = opserver.log:main',
'contrail-stats = opserver.stats:main',
'contrail-flows = opserver.flow:main',
'contrail-sessions = opserver.sessions:main',
'contrail-logs-api-audit = opserver.api_log:main',
'contrail-db = opserver.db:main',
'contrail-alarm-notify = opserver.alarm_notify:main'
],
},
cmdclass={
'run_tests': RunTestsCommand,
},
)
| 32.441558
| 73
| 0.577262
|
0aadc299d3f0e19ba5bf1f043b65975adf7929c8
| 2,809
|
py
|
Python
|
src/model/common.py
|
sheepc/DRBAESR-PyTorch-master
|
fab85e73a368b487691887c393a261208fc55d4b
|
[
"MIT"
] | 40
|
2019-12-04T09:28:31.000Z
|
2022-01-03T09:08:15.000Z
|
src/model/common.py
|
sheepc/DRBAESR-PyTorch-master
|
fab85e73a368b487691887c393a261208fc55d4b
|
[
"MIT"
] | 3
|
2020-01-06T13:26:51.000Z
|
2021-11-11T05:36:59.000Z
|
src/model/common.py
|
sheepc/DRBAESR-PyTorch-master
|
fab85e73a368b487691887c393a261208fc55d4b
|
[
"MIT"
] | 8
|
2019-12-13T12:17:28.000Z
|
2021-08-06T19:18:37.000Z
|
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
def default_conv(in_channels, out_channels, kernel_size, bias=True):
return nn.Conv2d(
in_channels, out_channels, kernel_size,
padding=(kernel_size // 2), bias=bias)
class MeanShift(nn.Conv2d):
def __init__(
self, rgb_range,
rgb_mean=(0.4488, 0.4371, 0.4040), rgb_std=(1.0, 1.0, 1.0), sign=-1):
super(MeanShift, self).__init__(3, 3, kernel_size=1)
std = torch.Tensor(rgb_std)
self.weight.data = torch.eye(3).view(3, 3, 1, 1) / std.view(3, 1, 1, 1)
self.bias.data = sign * rgb_range * torch.Tensor(rgb_mean) / std
for p in self.parameters():
p.requires_grad = False
class BasicBlock(nn.Sequential):
def __init__(
self, conv, in_channels, out_channels, kernel_size, stride=1, bias=False,
bn=True, act=nn.ReLU(True)):
m = [conv(in_channels, out_channels, kernel_size, bias=bias)]
if bn:
m.append(nn.BatchNorm2d(out_channels))
if act is not None:
m.append(act)
super(BasicBlock, self).__init__(*m)
class ResBlock(nn.Module):
def __init__(
self, conv, n_feats, kernel_size,
bias=True, bn=False, act=nn.ReLU(True), res_scale=1):
super(ResBlock, self).__init__()
m = []
for i in range(2):
m.append(conv(n_feats, n_feats, kernel_size, bias=bias))
if bn:
m.append(nn.BatchNorm2d(n_feats))
if i == 0:
m.append(act)
self.body = nn.Sequential(*m)
self.res_scale = res_scale
def forward(self, x):
res = self.body(x).mul(self.res_scale)
res += x
return res
class Upsampler(nn.Sequential):
def __init__(self, conv, scale, n_feats, bn=False, act=False, bias=True):
m = []
if (scale & (scale - 1)) == 0: # Is scale = 2^n?
for _ in range(int(math.log(scale, 2))):
m.append(conv(n_feats, 4 * n_feats, 3, bias))
m.append(nn.PixelShuffle(2))
if bn:
m.append(nn.BatchNorm2d(n_feats))
if act == 'relu':
m.append(nn.ReLU(True))
elif act == 'prelu':
m.append(nn.PReLU(n_feats))
elif scale == 3:
m.append(conv(n_feats, 9 * n_feats, 3, bias))
m.append(nn.PixelShuffle(3))
if bn:
m.append(nn.BatchNorm2d(n_feats))
if act == 'relu':
m.append(nn.ReLU(True))
elif act == 'prelu':
m.append(nn.PReLU(n_feats))
else:
raise NotImplementedError
super(Upsampler, self).__init__(*m)
| 30.532609
| 85
| 0.543254
|
77b3553aafc4553d34e0064cbcecac4a5bf75387
| 16,523
|
py
|
Python
|
vunit/vhdl/check/tools/generate_check_match.py
|
eataesierp/vunit
|
96a24b38ba1553f520beb81f26753ca85dafb707
|
[
"Artistic-2.0",
"Apache-2.0"
] | 507
|
2016-02-10T16:45:16.000Z
|
2022-03-26T13:22:51.000Z
|
vunit/vhdl/check/tools/generate_check_match.py
|
eataesierp/vunit
|
96a24b38ba1553f520beb81f26753ca85dafb707
|
[
"Artistic-2.0",
"Apache-2.0"
] | 572
|
2016-01-27T11:31:10.000Z
|
2022-03-23T11:46:56.000Z
|
vunit/vhdl/check/tools/generate_check_match.py
|
eataesierp/vunit
|
96a24b38ba1553f520beb81f26753ca85dafb707
|
[
"Artistic-2.0",
"Apache-2.0"
] | 224
|
2016-01-29T09:09:53.000Z
|
2022-03-28T12:10:56.000Z
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2014-2021, Lars Asplund lars.anders.asplund@gmail.com
from pathlib import Path
from string import Template
from vunit.vhdl.check.tools.generate_check_equal import replace_region
api_template = """ procedure check_match(
constant got : in $got_type;
constant expected : in $expected_type;
constant msg : in string := check_result_tag;
constant level : in log_level_t := null_log_level;
constant path_offset : in natural := 0;
constant line_num : in natural := 0;
constant file_name : in string := "");
procedure check_match(
variable pass : out boolean;
constant got : in $got_type;
constant expected : in $expected_type;
constant msg : in string := check_result_tag;
constant level : in log_level_t := null_log_level;
constant path_offset : in natural := 0;
constant line_num : in natural := 0;
constant file_name : in string := "");
procedure check_match(
constant checker : in checker_t;
variable pass : out boolean;
constant got : in $got_type;
constant expected : in $expected_type;
constant msg : in string := check_result_tag;
constant level : in log_level_t := null_log_level;
constant path_offset : in natural := 0;
constant line_num : in natural := 0;
constant file_name : in string := "");
procedure check_match(
constant checker : in checker_t;
constant got : in $got_type;
constant expected : in $expected_type;
constant msg : in string := check_result_tag;
constant level : in log_level_t := null_log_level;
constant path_offset : in natural := 0;
constant line_num : in natural := 0;
constant file_name : in string := "");
impure function check_match(
constant got : in $got_type;
constant expected : in $expected_type;
constant msg : in string := check_result_tag;
constant level : in log_level_t := null_log_level;
constant path_offset : in natural := 0;
constant line_num : in natural := 0;
constant file_name : in string := "")
return boolean;
impure function check_match(
constant checker : in checker_t;
constant got : in $got_type;
constant expected : in $expected_type;
constant msg : in string := check_result_tag;
constant level : in log_level_t := null_log_level;
constant path_offset : in natural := 0;
constant line_num : in natural := 0;
constant file_name : in string := "")
return boolean;
"""
impl_template = """ procedure check_match(
constant got : in $got_type;
constant expected : in $expected_type;
constant msg : in string := check_result_tag;
constant level : in log_level_t := null_log_level;
constant path_offset : in natural := 0;
constant line_num : in natural := 0;
constant file_name : in string := "") is
variable pass : boolean;
begin
-- pragma translate_off
check_match(default_checker, pass, got, expected, msg, level, path_offset + 1, line_num, file_name);
-- pragma translate_on
end;
procedure check_match(
variable pass : out boolean;
constant got : in $got_type;
constant expected : in $expected_type;
constant msg : in string := check_result_tag;
constant level : in log_level_t := null_log_level;
constant path_offset : in natural := 0;
constant line_num : in natural := 0;
constant file_name : in string := "") is
begin
-- pragma translate_off
check_match(default_checker, pass, got, expected, msg, level, path_offset + 1, line_num, file_name);
-- pragma translate_on
end;
procedure check_match(
constant checker : in checker_t;
variable pass : out boolean;
constant got : in $got_type;
constant expected : in $expected_type;
constant msg : in string := check_result_tag;
constant level : in log_level_t := null_log_level;
constant path_offset : in natural := 0;
constant line_num : in natural := 0;
constant file_name : in string := "") is
begin
-- pragma translate_off
if std_match(got, expected) then
pass := true;
if is_pass_visible(checker) then
passing_check(
checker,
std_msg(
"Match check passed", msg,
"Got " & $got_str & ". " &
"Expected " & $expected_str & "."),
path_offset + 1, line_num, file_name);
else
passing_check(checker);
end if;
else
pass := false;
failing_check(
checker,
std_msg(
"Match check failed", msg,
"Got " & $got_str & ". " &
"Expected " & $expected_str & "."),
level, path_offset + 1, line_num, file_name);
end if;
-- pragma translate_on
end;
procedure check_match(
constant checker : in checker_t;
constant got : in $got_type;
constant expected : in $expected_type;
constant msg : in string := check_result_tag;
constant level : in log_level_t := null_log_level;
constant path_offset : in natural := 0;
constant line_num : in natural := 0;
constant file_name : in string := "") is
variable pass : boolean;
begin
-- pragma translate_off
check_match(checker, pass, got, expected, msg, level, path_offset + 1, line_num, file_name);
-- pragma translate_on
end;
impure function check_match(
constant got : in $got_type;
constant expected : in $expected_type;
constant msg : in string := check_result_tag;
constant level : in log_level_t := null_log_level;
constant path_offset : in natural := 0;
constant line_num : in natural := 0;
constant file_name : in string := "")
return boolean is
variable pass : boolean;
begin
-- pragma translate_off
check_match(default_checker, pass, got, expected, msg, level, path_offset + 1, line_num, file_name);
-- pragma translate_on
return pass;
end;
impure function check_match(
constant checker : in checker_t;
constant got : in $got_type;
constant expected : in $expected_type;
constant msg : in string := check_result_tag;
constant level : in log_level_t := null_log_level;
constant path_offset : in natural := 0;
constant line_num : in natural := 0;
constant file_name : in string := "")
return boolean is
variable pass : boolean;
begin
-- pragma translate_off
check_match(checker, pass, got, expected, msg, level, path_offset + 1, line_num, file_name);
-- pragma translate_on
return pass;
end;
"""
test_template = """
$first_if run("Test should pass on $left_type matching $right_type") then
get_checker_stat(stat);
check_match($left_pass, $right_pass);
check_match($left_pass_dc, $right_pass);
check_match($left_pass, $right_pass_dc);
check_match($left_pass_dc, $right_pass_dc);
check_match(passed, $left_pass, $right_pass);
assert_true(passed, "Should return pass = true on passing check");
passed := check_match($left_pass, $right_pass);
assert_true(passed, "Should return pass = true on passing check");
verify_passed_checks(stat, 6);
get_checker_stat(my_checker, stat);
check_match(my_checker, $left_pass, $right_pass);
check_match(my_checker, passed, $left_pass, $right_pass);
assert_true(passed, "Should return pass = true on passing check");
passed := check_match(my_checker, $left_pass, $right_pass);
assert_true(passed, "Should return pass = true on passing check");
verify_passed_checks(my_checker,stat, 3);
elsif run("Test pass message for $left_type matching $right_type") then
mock(check_logger);
check_match($left_pass, $right_pass);
check_only_log(check_logger, "Match check passed - Got $pass_str. Expected $pass_str.", pass);
check_match($left_pass_dc, $right_pass, "");
check_only_log(check_logger, "Got $pass_dc_str. Expected $pass_str.", pass);
check_match($left_pass, $right_pass_dc, "Checking my data");
check_only_log(check_logger, "Checking my data - Got $pass_str. Expected $pass_dc_str.", pass);
check_match($left_pass_dc, $right_pass_dc, result("for my data"));
check_only_log(check_logger,
"Match check passed for my data - Got $pass_dc_str. Expected $pass_dc_str.",
pass);
unmock(check_logger);
elsif run("Test should fail on $left_type not matching $right_type") then
get_checker_stat(stat);
mock(check_logger);
check_match($left_pass, $right_fail_dc);
check_only_log(check_logger, "Match check failed - Got $pass_str. Expected $fail_dc_str.", default_level);
check_match($left_pass, $right_fail_dc, "");
check_only_log(check_logger, "Got $pass_str. Expected $fail_dc_str.", default_level);
check_match(passed, $left_pass, $right_fail, "Checking my data");
assert_true(not passed, "Should return pass = false on failing check");
check_only_log(check_logger, "Checking my data - Got $pass_str. Expected $fail_str.", default_level);
passed := check_match($left_pass, $right_fail, result("for my data"));
assert_true(not passed, "Should return pass = false on failing check");
check_only_log(check_logger, "Match check failed for my data - Got $pass_str. Expected $fail_str.",
default_level);
unmock(check_logger);
verify_passed_checks(stat, 0);
verify_failed_checks(stat, 4);
reset_checker_stat;
get_checker_stat(my_checker, stat);
mock(my_logger);
check_match(my_checker, $left_pass, $right_fail);
check_only_log(my_logger, "Match check failed - Got $pass_str. Expected $fail_str.", default_level);
check_match(my_checker, passed, $left_pass, $right_fail);
assert_true(not passed, "Should return pass = false on failing check");
check_only_log(my_logger, "Match check failed - Got $pass_str. Expected $fail_str.", default_level);
passed := check_match(my_checker, $left_pass, $right_fail, result("for my data"));
assert_true(not passed, "Should return pass = false on failing check");
check_only_log(my_logger, "Match check failed for my data - Got $pass_str. Expected $fail_str.",
default_level);
unmock(my_logger);
verify_passed_checks(my_checker, stat, 0);
verify_failed_checks(my_checker, stat, 3);
reset_checker_stat(my_checker);
"""
combinations = [
(
"unsigned",
"unsigned",
"""unsigned'(X"A5")""",
"""unsigned'(X"A5")""",
"""unsigned'("1010----")""",
"""unsigned'("1010----")""",
"""unsigned'(X"5A")""",
"""unsigned'("0101----")""",
"1010_0101 (165)",
"0101_1010 (90)",
"1010_---- (NaN)",
"0101_---- (NaN)",
),
(
"std_logic_vector",
"std_logic_vector",
"""std_logic_vector'(X"A5")""",
"""std_logic_vector'(X"A5")""",
"""std_logic_vector'("1010----")""",
"""std_logic_vector'("1010----")""",
"""std_logic_vector'(X"5A")""",
"""std_logic_vector'("0101----")""",
"1010_0101 (165)",
"0101_1010 (90)",
"1010_---- (NaN)",
"0101_---- (NaN)",
),
(
"signed",
"signed",
"""signed'(X"A5")""",
"""signed'(X"A5")""",
"""signed'("1010----")""",
"""signed'("1010----")""",
"""signed'(X"5A")""",
"""signed'("0101----")""",
"1010_0101 (-91)",
"0101_1010 (90)",
"1010_---- (NaN)",
"0101_---- (NaN)",
),
(
"std_logic",
"std_logic",
"std_logic'('1')",
"'1'",
"'-'",
"'-'",
"'0'",
"'0'",
"1",
"0",
"-",
"0",
),
]
def generate_api():
api = ""
for c in combinations:
t = Template(api_template)
api += t.substitute(got_type=c[0], expected_type=c[1])
return api
def dual_format(base_type, got_or_expected):
expected_or_got = "expected" if got_or_expected == "got" else "got"
if base_type in ["unsigned", "signed", "std_logic_vector"]:
return 'to_nibble_string(%s) & " (" & ' % got_or_expected + "to_integer_string(%s) & " % got_or_expected + '")"'
return (
'to_string(%s) & " (" & ' % got_or_expected
+ "to_nibble_string(to_sufficient_%s(%s, %s'length)) & "
% (
("signed" if base_type == "integer" else "unsigned"),
got_or_expected,
expected_or_got,
)
+ '")"'
)
def generate_impl():
impl = ""
for c in combinations:
t = Template(impl_template)
if (c[0] in ["unsigned", "signed", "std_logic_vector"]) or (c[1] in ["unsigned", "signed", "std_logic_vector"]):
got_str = dual_format(c[0], "got")
expected_str = dual_format(c[1], "expected")
else:
got_str = "to_string(got)"
expected_str = "to_string(expected)"
impl += t.substitute(
got_type=c[0],
expected_type=c[1],
got_str=got_str,
expected_str=expected_str,
)
return impl
def generate_test():
test = """\
-- This test suite verifies the check_equal checker.
--
-- This Source Code Form is subject to the terms of the Mozilla Public
-- License, v. 2.0. If a copy of the MPL was not distributed with this file,
-- You can obtain one at http://mozilla.org/MPL/2.0/.
--
-- Copyright (c) 2014-2021, Lars Asplund lars.anders.asplund@gmail.com
-- vunit: run_all_in_same_sim
library ieee;
use ieee.std_logic_1164.all;
use ieee.numeric_std.all;
library vunit_lib;
use vunit_lib.log_levels_pkg.all;
use vunit_lib.logger_pkg.all;
use vunit_lib.checker_pkg.all;
use vunit_lib.check_pkg.all;
use vunit_lib.run_types_pkg.all;
use vunit_lib.run_pkg.all;
use work.test_support.all;
entity tb_check_match is
generic (
runner_cfg : string);
end entity tb_check_match;
architecture test_fixture of tb_check_match is
begin
check_match_runner : process
variable stat : checker_stat_t;
variable my_checker : checker_t := new_checker("my_checker");
constant my_logger : logger_t := get_logger(my_checker);
variable passed : boolean;
constant default_level : log_level_t := error;
begin
test_runner_setup(runner, runner_cfg);
while test_suite loop"""
for idx, c in enumerate(combinations):
t = Template(test_template)
test += t.substitute(
first_if="if" if idx == 0 else "elsif",
left_type=c[0],
right_type=c[1],
left_pass=c[2],
right_pass=c[3],
left_pass_dc=c[4],
right_pass_dc=c[5],
right_fail=c[6],
right_fail_dc=c[7],
pass_str=c[8],
fail_str=c[9],
pass_dc_str=c[10],
fail_dc_str=c[11],
)
test += """
end if;
end loop;
test_runner_cleanup(runner);
wait;
end process;
test_runner_watchdog(runner, 2 us);
end test_fixture;
"""
return test
def main():
check_api_file_name = str(Path(__file__).parent.parent / "src" / "check_api.vhd")
replace_region("check_match", check_api_file_name, generate_api())
check_file_name = str(Path(__file__).parent.parent / "src" / "check.vhd")
replace_region("check_match", check_file_name, generate_impl())
with (Path(__file__).parent.parent / "test" / "tb_check_match.vhd").open("wb") as fptr:
fptr.write(generate_test().encode())
if __name__ == "__main__":
main()
| 35.230277
| 120
| 0.604733
|
2e5ba1d17ad984e74356f1269202bb07d05a6d3a
| 6,680
|
py
|
Python
|
src/transformers/models/herbert/tokenization_herbert_fast.py
|
JadeMaveric/transformers
|
fb2b89840bf2ab9f74702bf83af8ddf92b61efb3
|
[
"Apache-2.0"
] | 34
|
2021-07-05T02:44:31.000Z
|
2022-03-28T14:39:57.000Z
|
src/transformers/models/herbert/tokenization_herbert_fast.py
|
JadeMaveric/transformers
|
fb2b89840bf2ab9f74702bf83af8ddf92b61efb3
|
[
"Apache-2.0"
] | 3
|
2021-07-22T15:49:44.000Z
|
2022-03-19T08:46:27.000Z
|
src/transformers/models/herbert/tokenization_herbert_fast.py
|
JadeMaveric/transformers
|
fb2b89840bf2ab9f74702bf83af8ddf92b61efb3
|
[
"Apache-2.0"
] | 6
|
2021-07-05T02:44:32.000Z
|
2022-02-14T10:10:13.000Z
|
# coding=utf-8
# Copyright 2020 The Google AI Language Team Authors, Allegro.pl, Facebook Inc. and the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List, Optional, Tuple
from ...tokenization_utils_fast import PreTrainedTokenizerFast
from ...utils import logging
from .tokenization_herbert import HerbertTokenizer
logger = logging.get_logger(__name__)
VOCAB_FILES_NAMES = {
"vocab_file": "vocab.json",
"merges_file": "merges.txt",
}
PRETRAINED_VOCAB_FILES_MAP = {
"vocab_file": {
"allegro/herbert-base-cased": "https://huggingface.co/allegro/herbert-base-cased/resolve/main/vocab.json"
},
"merges_file": {
"allegro/herbert-base-cased": "https://huggingface.co/allegro/herbert-base-cased/resolve/main/merges.txt"
},
}
PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {"allegro/herbert-base-cased": 514}
PRETRAINED_INIT_CONFIGURATION = {}
class HerbertTokenizerFast(PreTrainedTokenizerFast):
"""
Construct a "Fast" BPE tokenizer for HerBERT (backed by HuggingFace's `tokenizers` library).
Peculiarities:
- uses BERT's pre-tokenizer: BertPreTokenizer splits tokens on spaces, and also on punctuation. Each occurrence of
a punctuation character will be treated separately.
This tokenizer inherits from :class:`~transformers.PreTrainedTokenizer` which contains most of the methods. Users
should refer to the superclass for more information regarding methods.
Args:
vocab_file (:obj:`str`):
Path to the vocabulary file.
merges_file (:obj:`str`):
Path to the merges file.
"""
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
slow_tokenizer_class = HerbertTokenizer
def __init__(self, vocab_file, merges_file, tokenizer_file=None, **kwargs):
kwargs["cls_token"] = "<s>"
kwargs["unk_token"] = "<unk>"
kwargs["pad_token"] = "<pad>"
kwargs["mask_token"] = "<mask>"
kwargs["sep_token"] = "</s>"
super().__init__(
vocab_file,
merges_file,
tokenizer_file=tokenizer_file,
**kwargs,
)
def build_inputs_with_special_tokens(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) -> List[int]:
"""
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and
adding special tokens. An HerBERT, like BERT sequence has the following format:
- single sequence: ``<s> X </s>``
- pair of sequences: ``<s> A </s> B </s>``
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs to which the special tokens will be added.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: List of `input IDs <../glossary.html#input-ids>`__ with the appropriate special tokens.
"""
cls = [self.cls_token_id]
sep = [self.sep_token_id]
if token_ids_1 is None:
return cls + token_ids_0 + sep
return cls + token_ids_0 + sep + token_ids_1 + sep
def get_special_tokens_mask(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False
) -> List[int]:
"""
Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding
special tokens using the tokenizer ``prepare_for_model`` method.
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
already_has_special_tokens (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not the token list is already formatted with special tokens for the model.
Returns:
:obj:`List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token.
"""
if already_has_special_tokens:
if token_ids_1 is not None:
raise ValueError(
"You should not supply a second sequence if the provided sequence of "
"ids is already formatted with special tokens for the model."
)
return list(map(lambda x: 1 if x in [self.sep_token_id, self.cls_token_id] else 0, token_ids_0))
if token_ids_1 is None:
return [1] + ([0] * len(token_ids_0)) + [1]
return [1] + ([0] * len(token_ids_0)) + [1] + ([0] * len(token_ids_1)) + [1]
def create_token_type_ids_from_sequences(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) -> List[int]:
"""
Create a mask from the two sequences passed to be used in a sequence-pair classification task. HerBERT, like
BERT sequence pair mask has the following format:
::
0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1
| first sequence | second sequence |
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: List of `token type IDs <../glossary.html#token-type-ids>`_ according to the given
sequence(s).
"""
sep = [self.sep_token_id]
cls = [self.cls_token_id]
if token_ids_1 is None:
return len(cls + token_ids_0 + sep) * [0]
return len(cls + token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1]
def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]:
files = self._tokenizer.model.save(save_directory, name=filename_prefix)
return tuple(files)
| 38.837209
| 119
| 0.642964
|
c743a54a27af11f0962aa19e49053476ab1b8815
| 29,997
|
py
|
Python
|
konrad/cloud.py
|
Stella-B/konrad
|
195e242f9ef20acd6cd8eb6068ae07632b66fe83
|
[
"MIT"
] | null | null | null |
konrad/cloud.py
|
Stella-B/konrad
|
195e242f9ef20acd6cd8eb6068ae07632b66fe83
|
[
"MIT"
] | null | null | null |
konrad/cloud.py
|
Stella-B/konrad
|
195e242f9ef20acd6cd8eb6068ae07632b66fe83
|
[
"MIT"
] | null | null | null |
"""This module contains a choice of clouds, which can be used either in the RCE
simulations or simply for radiative flux or heating rate calculations.
Depending on the choice of cloud, a certain set-up of the RRTMG radiation scheme
must be used.
**In an RCE simulation**
Create an instance of a cloud class, *e.g.* a :py:class:`DirectInputCloud`,
create an appropriate radiation model, and run an RCE simulation:
>>> import konrad
>>> cloudy_cloud = konrad.cloud.DirectInputCloud(
>>> numlevels=..., cloud_fraction=..., lw_optical_thickness=...,
>>> sw_optical_thickness=...)
>>> rt = konrad.radiation.RRTMG(
>>> mcica=True, cloud_optical_properties='direct_input')
>>> rce = konrad.RCE(atmosphere=..., cloud=cloudy_cloud, radiation=rt)
>>> rce.run()
**Calculating radiative fluxes or heating rates**
Create an instance of a cloud class, *e.g.* a :py:class:`PhysicalCloud`,
create an appropriate radiation model and run radiative transfer:
>>> import konrad
>>> another_cloud = konrad.cloud.PhysicalCloud(
>>> numlevels=..., cloud_fraction=..., mass_ice=..., mass_water=...,
>>> ice_particle_size=..., droplet_radius=...)
>>> rt = konrad.radiation.RRTMG(
>>> mcica=True, cloud_optical_properties='liquid_and_ice_clouds')
>>> rt.calc_radiation(atmosphere=..., surface=..., cloud=another_cloud)
"""
import abc
import logging
import numbers
import numpy as np
from scipy.interpolate import interp1d
from sympl import DataArray
from konrad.component import Component
from konrad import utils
from konrad.cloudoptics import EchamCloudOptics
logger = logging.getLogger(__name__)
__all__ = [
'get_rectangular_profile',
'Cloud',
'ClearSky',
'DirectInputCloud',
'PhysicalCloud',
'HighCloud',
'MidLevelCloud',
'LowCloud',
'ConceptualCloud',
'CloudEnsemble',
]
def get_rectangular_profile(z, value, ztop, depth):
"""Produce a rectangular profile, an array containing zeros and the value
'value' corresponding to a certain height range.
Parameters:
z (ndarray): height
value (int/float): non-zero value / thickness of rectangle
ztop (int/float): height, indicating the top of the rectangle
depth (int/float): height, indicating the depth of the rectangle
ztop - depth gives the base of the rectangle
"""
p = np.zeros(z.shape)
inrectangle = np.logical_and(z < ztop, z > ztop - depth)
p[inrectangle] = value
return p
class Cloud(Component, metaclass=abc.ABCMeta):
"""Base class to define abstract methods for all cloud handlers.
Default properties include a cloud area fraction equal to zero everywhere
(ie no cloud)."""
#: number of longwave bands used in the radiation scheme
num_longwave_bands = 16
#: number of shortwave bands used in the radiation scheme
num_shortwave_bands = 14
def __init__(self, numlevels, cloud_fraction=0, mass_ice=0, mass_water=0,
ice_particle_size=20, droplet_radius=10,
lw_optical_thickness=0, sw_optical_thickness=0,
forward_scattering_fraction=0, asymmetry_parameter=0.85,
single_scattering_albedo=0.9,
rrtmg_cloud_optical_properties='liquid_and_ice_clouds',
rrtmg_cloud_ice_properties='ebert_curry_two',
):
"""Create a cloud. Which of the input parameters are used and which
ignored depends on the set-up of the radiation scheme.
Parameters:
numlevels (int): Number of atmospheric levels.
cloud_fraction (float / ndarray / DataArray): cloud area fraction
mass_ice (float / ndarray / DataArray): mass content of cloud ice
[kg m-2]
mass_water (float / ndarray / DataArray): mass content of cloud
liquid water [kg m-2]
ice_particle_size (float / ndarray / DataArray): cloud ice particle
size [micrometers]
droplet_radius (float / ndarray / DataArray): cloud water droplet
radius [micrometers]
lw_optical_thickness (float / DataArray): longwave optical
thickness of the cloud
sw_optical_thickness (float / DataArray): shortwave optical
thickness of the cloud
forward_scattering_fraction (float / DataArray): cloud forward
scattering fraction (for the shortwave component of RRTMG)
This is a scaling factor for the other shortwave parameters,
if it is set to 0, no scaling is applied.
asymmetry_parameter (float / DataArray): cloud asymmetry parameter
(for the shortwave component of RRTMG)
single_scattering_albedo (float / DataArray): single scattering
albedo due to cloud (for the shortwave component of RRTMG)
rrtmg_cloud_optical_properties (str):
Choose how cloud properties are calculated by RRTMG.
* :code:`direct_input`
Both cloud fraction and optical depth must be
input directly to the :py:mod:`konrad.cloud` instance.
Other cloud properties are irrelevant.
* :code:`single_cloud_type`
Cloud fraction (1 or 0 at each level) and
cloud physical properties are required as input. Ice and
liquid water clouds are treated together, with a constant
value of cloud absorptivity. Not available with mcica.
* :code:`liquid_and_ice_clouds`
Cloud fraction and cloud physical properties are required
as input. Ice and liquid clouds are treated separately.
Cloud optical depth is calculated from the cloud ice and
water particle sizes and the mass content of cloud and
water.
rrtmg_cloud_ice_properties (str):
Choose which method is used to calculate the cloud optical
properties of ice clouds from their physical properties.
* :code:`ebert_curry_one`
* :code:`ebert_curry_two`
* :code:`key_streamer_manual`
* :code:`fu`
"""
self.numlevels = numlevels
self.coords = {
'mid_levels': np.arange(self.numlevels),
'num_longwave_bands': np.arange(self.num_longwave_bands),
'num_shortwave_bands': np.arange(self.num_shortwave_bands),
}
physical_props = {
'mass_content_of_cloud_liquid_water_in_atmosphere_layer':
(mass_water, 'kg m^-2'),
'mass_content_of_cloud_ice_in_atmosphere_layer':
(mass_ice, 'kg m^-2'),
'cloud_area_fraction_in_atmosphere_layer':
(cloud_fraction, 'dimensionless'),
'cloud_ice_particle_size':
(ice_particle_size, 'micrometers'),
'cloud_water_droplet_radius':
(droplet_radius, 'micrometers'),
}
for name, (var, unit) in physical_props.items():
dataarray = self.get_p_data_array(var, units=unit)
self[name] = dataarray.dims, dataarray
cloud_optics = {
'longwave_optical_thickness_due_to_cloud':
(lw_optical_thickness, 'dimensionless', False),
'cloud_forward_scattering_fraction':
(forward_scattering_fraction, 'dimensionless', True),
'cloud_asymmetry_parameter':
(asymmetry_parameter, 'dimensionless', True),
'shortwave_optical_thickness_due_to_cloud':
(sw_optical_thickness, 'dimensionless', True),
'single_scattering_albedo_due_to_cloud':
(single_scattering_albedo, 'dimensionless', True),
}
for name, (var, unit, is_sw) in cloud_optics.items():
dataarray = self.get_waveband_data_array(var, units=unit, sw=is_sw)
self[name] = dataarray.dims, dataarray
self._rrtmg_cloud_optical_properties = rrtmg_cloud_optical_properties
self._rrtmg_cloud_ice_properties = rrtmg_cloud_ice_properties
def get_p_data_array(self, values, units='kg m^-2'):
"""Return a DataArray of values."""
if isinstance(values, DataArray):
return values
elif isinstance(values, np.ndarray):
if values.shape != (self.numlevels,):
raise ValueError(
'shape mismatch: Shape of cloud parameter input array '
f'{values.shape} is not compatible with number of model '
f'levels ({self.numlevels},).'
)
elif isinstance(values, numbers.Number):
values = values * np.ones(self.numlevels,)
else:
raise TypeError(
'Cloud variable input must be a single value, '
'`numpy.ndarray` or a `sympl.DataArray`'
)
return DataArray(values, dims=('mid_levels',), attrs={'units': units})
def get_waveband_data_array(self, values, units='dimensionless', sw=True):
"""Return a DataArray of values."""
if isinstance(values, DataArray):
return values
if sw:
dims = ('mid_levels', 'num_shortwave_bands')
numbands = self.num_shortwave_bands
else:
dims = ('mid_levels', 'num_longwave_bands')
numbands = self.num_longwave_bands
if isinstance(values, numbers.Number):
values = values * np.ones((self.numlevels, numbands))
elif isinstance(values, np.ndarray):
if values.shape == (self.numlevels,):
values = np.repeat(
values[:, np.newaxis], numbands, axis=1)
elif values.shape == (numbands,):
values = np.repeat(
values[np.newaxis, :], self.numlevels, axis=0)
elif not values.shape == (self.numlevels, numbands):
raise ValueError(
f'shape mismatch: input array of shape {values.shape} '
'is not supported. Allowed shapes are: '
f'({self.numlevels},), ({numbands},), or '
f'({self.numlevels}, {numbands}).'
)
else:
raise TypeError(
'Cloud variable input must be a single value, '
'`numpy.ndarray` or a `sympl.DataArray`'
)
return DataArray(values, dims=dims, attrs={'units': units})
@classmethod
def from_atmosphere(cls, atmosphere, **kwargs):
"""Initialize a cloud component matching the given atmosphere.
Parameters:
atmosphere (``konrad.atmosphere.Atmosphere``):
Atmosphere component.
"""
return cls(numlevels=atmosphere['plev'].size, **kwargs)
@abc.abstractmethod
def update_cloud_profile(self, atmosphere, convection, radiation,
**kwargs):
"""Return the cloud parameters for the radiation scheme.
Parameters:
atmosphere (konrad.atmosphere.Atmosphere): atmosphere model
convection (konrad.convection): convection scheme
radiation (konrad.radiation): radiation scheme
"""
def overcast(self):
"""Set cloud fraction in cloud layers to ``1`` (full overcast)."""
cloud_fraction = self['cloud_area_fraction_in_atmosphere_layer'][:]
cloud_mask = (cloud_fraction > 0).astype(float)
self['cloud_area_fraction_in_atmosphere_layer'][:] = cloud_mask
class ClearSky(Cloud):
"""No cloud.
"""
def update_cloud_profile(self, *args, **kwargs):
return
class PhysicalCloud(Cloud):
"""Define a cloud based on physical properties.
The physical properties are cloud ice and liquid mass (per model level)
and particle size. To be used with
cloud_optical_properties='liquid_and_ice_clouds' in climt/RRTMG.
"""
def __init__(self, numlevels, cloud_fraction, mass_water, mass_ice,
ice_particle_size, droplet_radius):
"""Initialize a cloud component.
Parameters:
numlevels (int): Number of atmospheric levels.
cloud_fraction (float / ndarray / DataArray): cloud area fraction
mass_ice (float / ndarray / DataArray): mass content of cloud ice
[kg m-2]
mass_water (float / ndarray / DataArray): mass content of cloud
liquid water [kg m-2]
ice_particle_size (float / ndarray / DataArray): cloud ice particle
size [micrometers]
droplet_radius (float / ndarray / DataArray): cloud water droplet
radius [micrometers]
"""
super().__init__(
numlevels=numlevels,
cloud_fraction=cloud_fraction,
mass_ice=mass_ice,
mass_water=mass_water,
ice_particle_size=ice_particle_size,
droplet_radius=droplet_radius,
rrtmg_cloud_optical_properties='liquid_and_ice_clouds'
)
def update_cloud_profile(self, *args, **kwargs):
"""Keep the cloud fixed with pressure. """
return
class DirectInputCloud(Cloud):
""" To be used with cloud_optical_properties='direct_input' in climt/RRTMG.
"""
direct_input_parameters = {
'cloud_area_fraction_in_atmosphere_layer',
'longwave_optical_thickness_due_to_cloud',
'cloud_forward_scattering_fraction',
'cloud_asymmetry_parameter',
'shortwave_optical_thickness_due_to_cloud',
'single_scattering_albedo_due_to_cloud',
}
def __init__(self, numlevels, cloud_fraction, lw_optical_thickness,
sw_optical_thickness, coupling='convective_top',
forward_scattering_fraction=0, asymmetry_parameter=0.85,
single_scattering_albedo=0.9, norm_index=None):
"""Define a cloud based on properties that are directly used by the
radiation scheme, namely cloud optical depth and scattering parameters.
Parameters:
numlevels (int): Number of atmospheric levels.
coupling (str): Mechanism with which the cloud is coupled to the
atmospheric profile:
* 'convective_top': Coupling to the convective top
* 'freezing_level': Coupling to the freezing level
* 'subsidence_divergence': Coupling to the level of
maximum subsidence divergence
* 'pressure': Fixed at pressure (no coupling)
cloud_fraction (float / ndarray / DataArray): cloud area fraction
lw_optical_thickness (float / DataArray): longwave optical
thickness of the cloud
sw_optical_thickness (float / DataArray): shortwave optical
thickness of the cloud
forward_scattering_fraction (float / DataArray): cloud forward
scattering fraction (for the shortwave component of RRTMG)
This is a scaling factor for the other shortwave parameters,
if it is set to 0, no scaling is applied.
asymmetry_parameter (float / DataArray): cloud asymmetry parameter
(for the shortwave component of RRTMG)
single_scattering_albedo (float / DataArray): single scattering
albedo due to cloud (for the shortwave component of RRTMG)
norm_index (int / None): model level index for coupling the cloud
"""
super().__init__(
numlevels=numlevels,
cloud_fraction=cloud_fraction,
lw_optical_thickness=lw_optical_thickness,
sw_optical_thickness=sw_optical_thickness,
forward_scattering_fraction=forward_scattering_fraction,
asymmetry_parameter=asymmetry_parameter,
single_scattering_albedo=single_scattering_albedo,
rrtmg_cloud_optical_properties='direct_input'
)
self._norm_index = norm_index
self._interp_cache = {}
self.coupling = coupling
def __add__(self, other):
"""Define the superposition of two clouds in a layer."""
name_map = (
('cloud_fraction',
'cloud_area_fraction_in_atmosphere_layer'),
('lw_optical_thickness',
'longwave_optical_thickness_due_to_cloud'),
('sw_optical_thickness',
'shortwave_optical_thickness_due_to_cloud'),
('forward_scattering_fraction',
'cloud_forward_scattering_fraction'),
('asymmetry_parameter',
'cloud_asymmetry_parameter'),
('single_scattering_albedo',
'single_scattering_albedo_due_to_cloud'),
)
# The superposition of two clouds is implemented following a
# "The winner takes it all"-approach:
# For each cloud layer, the properties of the bigger cloud (in terms
# of cloud fraction) is used.
other_is_bigger = (
other['cloud_area_fraction_in_atmosphere_layer']
> self['cloud_area_fraction_in_atmosphere_layer']
)
kwargs = {}
for kwname, varname in name_map:
arr = self[varname].values.copy()
arr[other_is_bigger] = other[varname][other_is_bigger]
kwargs[kwname] = arr
summed_cloud = DirectInputCloud(numlevels=self.numlevels, **kwargs)
return summed_cloud
def interpolation_function(self, cloud_parameter):
""" Calculate the interpolation function, to be used to maintain the
cloud optical properties and keep the cloud attached to a normalisation
level (self._norm_index). A separate interpolation function is required
for each cloud parameter that needs to be interpolated.
Parameters:
cloud_parameter (DataArray): property to be interpolation
Returns:
scipy.interpolate.interpolate.interp1d
"""
normed_levels = np.arange(0, self.numlevels) - self._norm_index
interpolation_f = interp1d(
normed_levels,
cloud_parameter.values,
fill_value=0,
bounds_error=False,
axis=0,
)
return interpolation_f
def shift_property(self, cloud_parameter, interpolation_f, norm_new):
"""Shift the cloud area fraction according to a normalisation level.
Parameters:
cloud_parameter (DataArray): cloud property to be shifted
interpolation_f (scipy.interpolate.interpolate.interp1d):
interpolation object calculated by interpolation_function
norm_new (int): normalisation index [model level]
Returns:
DataArray: shifted cloud property
"""
levels = np.arange(0, self.numlevels)
if not np.isnan(norm_new):
# Move the cloud to the new normalisation level, if there is one.
cloud_parameter.values = interpolation_f(levels - norm_new)
else:
# Otherwise keep the cloud where it is.
cloud_parameter.values = interpolation_f(levels - self._norm_index)
return cloud_parameter
def shift_cloud_profile(self, norm_new):
if self._norm_index is None:
self._norm_index = norm_new
for varname in self.direct_input_parameters:
if varname not in self._interp_cache:
self._interp_cache[varname] = self.interpolation_function(
cloud_parameter=self[varname])
self[varname][:] = self.shift_property(
cloud_parameter=self[varname],
interpolation_f=self._interp_cache[varname],
norm_new=norm_new,
)
def update_cloud_profile(self, atmosphere, convection, radiation,
**kwargs):
"""Keep the cloud profile fixed with model level (pressure). """
if self.coupling == 'convective_top':
self.shift_cloud_profile(
norm_new=convection.get('convective_top_index')[0]
)
elif self.coupling == 'freezing_level':
self.shift_cloud_profile(
norm_new=atmosphere.get_triple_point_index(),
)
elif self.coupling == 'subsidence_divergence':
Qr = radiation['net_htngrt_clr'][-1]
self.shift_cloud_profile(
norm_new=atmosphere.get_subsidence_convergence_max_index(Qr),
)
elif self.coupling == 'pressure':
return
else:
raise ValueError(
'The cloud class has been initialized with an invalid '
'cloud coupling mechanism.'
)
class HighCloud(DirectInputCloud):
"""Representation of a high-level cloud.
High-level clouds are coupled to the convective top by default. Another
reasonable option is a coupling to the level of maximum diabatic
subsidence divergence (`"subsidence_divergence"`).
"""
def __init__(self, *args, coupling='convective_top', **kwargs):
super().__init__(*args, coupling=coupling, **kwargs)
class MidLevelCloud(DirectInputCloud):
"""Representation of a mid-level cloud.
Mid-level clouds are coupled to the freezing level.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, coupling='freezing_level', **kwargs)
class LowCloud(DirectInputCloud):
"""Representation of a low-level cloud.
Low-level clouds are fixed in pressure coordinates.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, coupling='pressure', **kwargs)
class ConceptualCloud(DirectInputCloud):
def __init__(
self,
atmosphere,
cloud_top,
depth,
cloud_fraction,
water_path=100e-3,
particle_size=100.,
phase='ice',
coupling='pressure'
):
"""Initialize a conceptual cloud.
Parameters:
atmosphere (konrad.atmosphere.Atmosphere): Atmosphere model.
cloud_top (float): Pressure at cloud top [Pa].
depth (float): Cloud depths in pressure units [Pa].
cloud_fraction (float): Cloud fraction [0-1].
water_path (float): Integrated water path [g m^-2].
particle_size (float): Cloud particle size [microns].
phase (str): Phase of cloud particles, either "ice" or "liquid".
coupling (str): Mechanism with which the cloud top is coupled to
the atmosphere profile:
* "pressure": Fixed at given pressure.
* "convective_top": Coupled to the convectio top.
* "freezing_level": Coupled to the freezing level.
* "subsidence_divergence: Coupled to the maximum subsidence
divergence.
* "temperature:TTT": Coupled to the level where the
temperature falls below `TTT` K.
"""
super().__init__(
numlevels=atmosphere['plev'].size,
cloud_fraction=np.nan,
lw_optical_thickness=np.nan,
sw_optical_thickness=np.nan,
)
self["cloud_top"] = ("time",), np.array([cloud_top])
self["cloud_top_temperature"] = ("time",), np.array([np.nan])
self.depth = depth
self.coupling = coupling
self.cloud_fraction = cloud_fraction
self.water_path = water_path
self.particle_size = particle_size
self.phase = phase
self.update_cloud_profile(atmosphere)
def get_cloud_optical_properties(self, water_content):
cld_opt_props = EchamCloudOptics()
return cld_opt_props.get_cloud_properties(
self.particle_size, water_content, self.phase)
@classmethod
def from_atmosphere(cls, atmosphere, **kwargs):
return cls(atmosphere['plev'].size, **kwargs)
def update_cloud_top_plev(self, atmosphere, convection=None, radiation=None):
"""Determine cloud top pressure depending on coupling mechanism."""
if self.coupling.lower() == 'pressure':
return
elif self.coupling.lower() == 'convective_top':
if convection is not None:
self.cloud_top = convection.get('convective_top_plev')[0]
elif self.coupling.lower() == 'freezing_level':
self["cloud_top"][:] = atmosphere.get_triple_point_plev()
self["cloud_top"][:] -= self.depth / 2 # Center around freezing level
elif self.coupling.lower() == 'subsidence_divergence':
if radiation is not None:
Qr = radiation['net_htngrt_clr'][-1]
self["cloud_top"][:] = atmosphere.get_subsidence_convergence_max_plev(Qr)
elif self.coupling.lower().startswith('temperature'):
# Retrieve target temperature from keyword.
threshold = float(self.coupling.split(":")[-1])
# Because of the atmospheric temperature profile values around 220K
# are ambiguous. Therefore, we are limiting the possible search
# range to the troposphere
cold_point = atmosphere.get_cold_point_plev()
is_troposphere = atmosphere["plev"] > cold_point
idx = np.abs(atmosphere["T"][-1, is_troposphere] - threshold).argmin()
self["cloud_top"][:] = atmosphere["plev"][idx]
else:
raise ValueError(
'The cloud class has been initialized with an invalid '
'cloud coupling mechanism.'
)
def update_cloud_top_temperature(self, atmosphere):
"""Determine the cloud top temperature"""
T = atmosphere["T"][-1]
p = atmosphere["plev"]
self["cloud_top_temperature"][:] = T[np.abs(p - self["cloud_top"]).argmin()]
def update_cloud_profile(self, atmosphere, convection=None, radiation=None, **kwargs):
"""Update the cloud profile depending on the atmospheric state."""
self.update_cloud_top_plev(atmosphere, convection, radiation)
self.update_cloud_top_temperature(atmosphere)
is_cloud = np.logical_and(
atmosphere['plev'] > self["cloud_top"],
atmosphere['plev'] < self["cloud_top"] + self.depth,
).astype(bool)
self['cloud_area_fraction_in_atmosphere_layer'][:] = (
self.cloud_fraction * is_cloud
)
water_content_per_Layer = self.water_path / np.sum(is_cloud)
cloud_optics = self.get_cloud_optical_properties(
water_content=water_content_per_Layer)
for name in cloud_optics.data_vars:
self[name][:, :] = 0
self[name][is_cloud, :] = cloud_optics[name]
class CloudEnsemble(DirectInputCloud):
"""Wrapper to combine several clouds into a cloud ensemble.
Warning: For now, overlapping clouds are handled very poorly!
A cloud ensemble can consist of an arbitrary number of clouds.
After its initialization it is handled like a normal `Cloud`:
>>> cloud1 = HighCloud(...)
>>> cloud2 = LowCloud(...)
>>> cloud_ensemble = CloudEnsemble(cloud1, cloud2)
>>> cloud_ensemble.cloud_area_fraction_in_atmosphere_layer
"""
def __init__(self, *args):
if not all([isinstance(a, DirectInputCloud) for a in args]):
raise ValueError(
'Only `DirectInputCloud`s can be combined in an ensemble.')
else:
self._clouds = np.asarray(args)
self._superposition = None
self.superpose()
self.coords = self._superposition.coords
def __getattr__(self, name):
if name.startswith('__'):
raise AttributeError
return getattr(self._superposition, name)
def __getitem__(self, name):
return self._superposition[name]
def superpose(self):
"""Update the superposed cloud profile."""
self._superposition = np.sum(self._clouds)
@property
def attrs(self):
"""Dictionary containing all attributes."""
return self._superposition._attrs
@property
def data_vars(self):
"""Dictionary containing all data variables and their dimensions."""
return self._superposition._data_vars
@property
def netcdf_subgroups(self):
"""Dynamically create a netCDF subgroup for each cloud."""
return {f"cloud-{i}": cloud for i, cloud in enumerate( self._clouds)}
def update_cloud_profile(self, *args, **kwargs):
"""Update every cloud in the cloud ensemble."""
for cloud in self._clouds:
cloud.update_cloud_profile(*args, **kwargs)
self.superpose()
def get_combinations(self):
"""Get all combinations of overlapping cloud layers."""
if not all([isinstance(c, ConceptualCloud) for c in self._clouds]):
raise TypeError(
'Only `ConceptualCloud`s can be combined.'
)
bool_index, combined_weights = utils.calculate_combined_weights(
weights=[cld.cloud_fraction for cld in self._clouds]
)
clouds = []
for (i, p) in zip(bool_index, combined_weights):
if not any(i):
clouds.append(DirectInputCloud(
numlevels=self.coords['mid_levels'].size,
cloud_fraction=0.,
lw_optical_thickness=0.,
sw_optical_thickness=0.,
))
else:
composed_clouds = np.sum(self._clouds[i])
composed_clouds.overcast()
clouds.append(composed_clouds)
return combined_weights, clouds
| 39.007802
| 90
| 0.619895
|
8fa3e72cd56d9b3f31d9d7c2df89c23cc23b5974
| 1,605
|
py
|
Python
|
handsome/opencl_api.py
|
bracket/handsome
|
c93d34f94d0eea24f5514efc9bc423eb28b44a6b
|
[
"BSD-2-Clause"
] | null | null | null |
handsome/opencl_api.py
|
bracket/handsome
|
c93d34f94d0eea24f5514efc9bc423eb28b44a6b
|
[
"BSD-2-Clause"
] | null | null | null |
handsome/opencl_api.py
|
bracket/handsome
|
c93d34f94d0eea24f5514efc9bc423eb28b44a6b
|
[
"BSD-2-Clause"
] | null | null | null |
__all__ = [
'fill_micropolygon_mesh'
]
from pathlib import Path
from .util import memoize
import numpy as np
import pyopencl as cl
def fill_micropolygon_mesh(mesh, tile):
from .capi import generate_numpy_begin, print_vertices
rows, columns = mesh.buffer.shape
buffer_ptr = generate_numpy_begin(mesh.buffer)
print_vertices(buffer_ptr, rows * columns)
mf = cl.mem_flags
context = cl_context()
queue = cl_queue()
mesh_buffer_g = cl.Buffer(context, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=mesh.buffer)
mesh_bounds_g = cl.Buffer(context, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=mesh.bounds)
coordinate_g = cl.Buffer(context, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=tile.coordinate_image)
tile_g = cl.Buffer(context, mf.WRITE_ONLY, tile.buffer.nbytes)
program = cl_program()
mesh_rows, mesh_columns = mesh.buffer.shape
kernel = program.fill_micropolygon_mesh
kernel.set_scalar_arg_dtypes([ None, None, np.int32, np.int32, None, None ])
kernel(queue, tile.buffer.shape, None,
mesh_buffer_g, mesh_bounds_g,
mesh_rows, mesh_columns,
coordinate_g, tile_g
);
cl.enqueue_copy(queue, tile.buffer, tile_g)
@memoize
def cl_context():
return cl.create_some_context()
@memoize
def cl_queue():
return cl.CommandQueue(cl_context())
@memoize
def cl_program():
here = Path(__file__).parent
source_path = here / 'opencl_src' / 'opencl_api.cl'
with source_path.open() as fd:
source = fd.read()
program = cl.Program(cl_context(), source).build()
return program
| 22.928571
| 102
| 0.706542
|
d86f3671864ec9ef1ee9d5888433b7c3df8f2c5d
| 1,988
|
py
|
Python
|
examples/spend_p2wsh_to_p2wpkh.py
|
doersf/python-bitcoin-utils
|
ab558513aba706d0215463fffc615772a955a142
|
[
"MIT"
] | null | null | null |
examples/spend_p2wsh_to_p2wpkh.py
|
doersf/python-bitcoin-utils
|
ab558513aba706d0215463fffc615772a955a142
|
[
"MIT"
] | null | null | null |
examples/spend_p2wsh_to_p2wpkh.py
|
doersf/python-bitcoin-utils
|
ab558513aba706d0215463fffc615772a955a142
|
[
"MIT"
] | 2
|
2021-09-20T23:55:53.000Z
|
2021-12-14T16:06:25.000Z
|
# Copyright (C) 2018-2020 The python-bitcoin-utils developers
#
# This file is part of python-bitcoin-utils
#
# It is subject to the license terms in the LICENSE file found in the top-level
# directory of this distribution.
#
# No part of python-bitcoin-utils, including this file, may be copied,
# modified, propagated, or distributed except according to the terms contained
# in the LICENSE file.
from bitcoinutils.setup import setup
from bitcoinutils.utils import to_satoshis
from bitcoinutils.transactions import Transaction, TxInput, TxOutput
from bitcoinutils.keys import PrivateKey, P2wshAddress, P2wpkhAddress
from bitcoinutils.script import Script
def main():
# always remember to setup the network
setup('testnet')
priv1 = PrivateKey("cN1XE3ESGgdvr4fWsB7L3BcqXncUauF8Fo8zzv4Sm6WrkiGrsxrG")
priv2 = PrivateKey("cR8AkcbL2pgBswrHp28AftEznHPPLA86HiTog8MpNCibxwrsUcZ4")
p2wsh_witness_script = Script(
['OP_1', priv1.get_public_key().to_hex(), priv2.get_public_key().to_hex(),'OP_2', 'OP_CHECKMULTISIG'])
fromAddress = P2wshAddress.from_script(p2wsh_witness_script)
toAddress = P2wpkhAddress.from_address("tb1qtstf97nhk2gycz7vl37esddjpxwt3ut30qp5pn")
# set values
txid = '2042195c40a92353f2ffe30cd0df8d177698560e81807e8bf9174a9c0e98e6c2'
vout = 0
amount = to_satoshis('0.01')
# create transaction input from tx id of UTXO
txin = TxInput(txid, vout)
txOut1 = TxOutput(to_satoshis('0.0001'), toAddress.to_script_pub_key())
txOut2 = TxOutput(to_satoshis('0.0098'), fromAddress.to_script_pub_key())
tx = Transaction([txin], [txOut1, txOut2], has_segwit=True)
sig1 = priv1.sign_segwit_input(tx, 0, p2wsh_witness_script, amount)
tx.witnesses.append(Script(['OP_0', sig1, p2wsh_witness_script.to_hex()]))
# print raw signed transaction ready to be broadcasted
print("\nRaw signed transaction:\n" + tx.serialize())
print("\nTxId:", tx.get_txid())
if __name__ == "__main__":
main()
| 35.5
| 110
| 0.753018
|
2db2955a69b511f0c7b8e3f01bf69012fa848255
| 2,520
|
py
|
Python
|
mpcontribs-api/mpcontribs/api/notebooks/__init__.py
|
rkingsbury/MPContribs
|
8427cb09a389706e4a4249374afa2faa51bb340e
|
[
"MIT"
] | null | null | null |
mpcontribs-api/mpcontribs/api/notebooks/__init__.py
|
rkingsbury/MPContribs
|
8427cb09a389706e4a4249374afa2faa51bb340e
|
[
"MIT"
] | null | null | null |
mpcontribs-api/mpcontribs/api/notebooks/__init__.py
|
rkingsbury/MPContribs
|
8427cb09a389706e4a4249374afa2faa51bb340e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from uuid import uuid1
from flask import current_app
from tornado.escape import json_encode, json_decode
from mpcontribs.api import create_kernel_connection, get_logger
logger = get_logger(__name__)
def run_cells(kernel_id, cid, cells):
logger.debug(f"running {cid} on {kernel_id}")
ws = create_kernel_connection(kernel_id)
outputs = {}
for idx, cell in enumerate(cells):
if cell["cell_type"] == "code":
ws.send(
json_encode(
{
"header": {
"username": cid,
"version": "5.3",
"session": "",
"msg_id": f"{cid}-{idx}-{uuid1()}",
"msg_type": "execute_request",
},
"parent_header": {},
"channel": "shell",
"content": {
"code": cell["source"],
"silent": False,
"store_history": False,
"user_expressions": {},
"allow_stdin": False,
"stop_on_error": True,
},
"metadata": {},
"buffers": [],
}
)
)
outputs[idx] = []
status = None
while status is None or status == "busy" or not len(outputs[idx]):
msg = ws.recv()
msg = json_decode(msg)
msg_type = msg["msg_type"]
if msg_type == "status":
status = msg["content"]["execution_state"]
elif msg_type in ["stream", "display_data", "execute_result"]:
# display_data/execute_result required fields:
# "output_type", "data", "metadata"
# stream required fields: "output_type", "name", "text"
output = msg["content"]
output.pop("transient", None)
output["output_type"] = msg_type
msg_idx = msg["parent_header"]["msg_id"].split("-")[1]
outputs[int(msg_idx)].append(output)
elif msg_type == "error":
tb = msg["content"]["traceback"]
raise ValueError(tb)
ws.close()
return outputs
| 38.181818
| 78
| 0.424206
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.