hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8f2ce5cc295cddd6ecc639e0c58afe5b7996aae9
| 1,108
|
py
|
Python
|
day17/lib.py
|
heijp06/AoC-2021
|
f6afead5e1fe9a839d608a5792f84e54803742c1
|
[
"MIT"
] | null | null | null |
day17/lib.py
|
heijp06/AoC-2021
|
f6afead5e1fe9a839d608a5792f84e54803742c1
|
[
"MIT"
] | null | null | null |
day17/lib.py
|
heijp06/AoC-2021
|
f6afead5e1fe9a839d608a5792f84e54803742c1
|
[
"MIT"
] | null | null | null |
import re
from math import sqrt
def part1(data):
_, (y_min, _) = get_positions(data)
speed = abs(y_min) - 1
return speed * (speed + 1) // 2
def part2(data): # sourcery skip: use-assigned-variable
(x_min, x_max), (y_min, y_max) = get_positions(data)
vx_min = int(sqrt(2 * x_min))
vx_max = x_max
vy_min = y_min
vy_max = abs(y_min) - 1
initial_velocities = set()
for vx0 in range(vx_min, vx_max + 1):
for vy0 in range(vy_min, vy_max + 1):
vx = vx0
vy = vy0
x, y = 0, 0
while x <= x_max and y >= y_min:
if x >= x_min and y <= y_max:
initial_velocities.add((vx0, vy0))
x += vx
y += vy
vx = max(0, vx - 1)
vy -= 1
return len(initial_velocities)
def get_positions(data):
match = re.match(r"target area: x=(\d+)..(\d+), y=-(\d+)..-(\d+)", data)
x0, x1, y0, y1 = match.groups()
x_min = int(x0)
x_max = int(x1)
y_min = -int(y0)
y_max = -int(y1)
return (x_min, x_max), (y_min, y_max)
| 27.02439
| 76
| 0.509025
|
02a56f4d47edd71f1d33c4f629a2dd5c30eaf9d4
| 6,114
|
py
|
Python
|
idb/cli/commands/target.py
|
Rezduan83/idb
|
a43b499302a37ada164cd183b9edb0e3a4699a1d
|
[
"MIT"
] | null | null | null |
idb/cli/commands/target.py
|
Rezduan83/idb
|
a43b499302a37ada164cd183b9edb0e3a4699a1d
|
[
"MIT"
] | 5
|
2021-09-02T15:20:04.000Z
|
2022-02-27T09:50:05.000Z
|
idb/cli/commands/target.py
|
Rezduan83/idb
|
a43b499302a37ada164cd183b9edb0e3a4699a1d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import json
import os
from argparse import SUPPRESS, ArgumentParser, Namespace
from typing import Union
import idb.common.plugin as plugin
from idb.cli.commands.base import CompanionCommand, ManagementCommand
from idb.common.format import human_format_target_info, json_format_target_info
from idb.common.types import Address, IdbClient, IdbException, IdbManagementClient
from idb.common.udid import is_udid
class DestinationCommandException(Exception):
pass
class ConnectCommandException(Exception):
pass
class DisconnectCommandException(Exception):
pass
def get_destination(args: Namespace) -> Union[Address, str]:
if is_udid(args.companion):
return args.companion
elif args.port and args.companion:
return Address(host=args.companion, port=args.port)
else:
raise DestinationCommandException(
"provide either a UDID or the host and port of the companion"
)
class TargetConnectCommand(ManagementCommand):
@property
def description(self) -> str:
return "Connect to a companion"
@property
def name(self) -> str:
return "connect"
def add_parser_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument(
"companion",
help="Host the companion is running on. or the UDID of the target",
type=str,
)
parser.add_argument(
"port",
help="Port the companion is running on",
type=int,
nargs="?",
default=None,
)
# not used and suppressed. remove after the removal of thrift is deployed everywhere
parser.add_argument(
"grpc_port", help=SUPPRESS, type=int, nargs="?", default=None
)
super().add_parser_arguments(parser)
async def run_with_client(
self, args: Namespace, client: IdbManagementClient
) -> None:
try:
destination = get_destination(args=args)
connect_response = await client.connect(
destination=destination,
metadata={
key: value
for (key, value) in plugin.resolve_metadata(self.logger).items()
if isinstance(value, str)
},
)
if connect_response:
if args.json:
print(
json.dumps(
{
"udid": connect_response.udid,
"is_local": connect_response.is_local,
}
)
)
else:
print(
f"udid: {connect_response.udid} is_local: {connect_response.is_local}"
)
except IdbException:
raise ConnectCommandException(
f"""Could not connect to {args.companion:}:{args.port}.
Make sure both host and port are correct and reachable"""
)
class TargetDisconnectCommand(ManagementCommand):
@property
def description(self) -> str:
return "Disconnect a companion"
@property
def name(self) -> str:
return "disconnect"
def add_parser_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument(
"companion",
help="Host the companion is running on or the udid of the target",
type=str,
)
parser.add_argument(
"port",
help="Port the companion is running on",
type=int,
nargs="?",
default=None,
)
super().add_parser_arguments(parser)
async def run_with_client(
self, args: Namespace, client: IdbManagementClient
) -> None:
try:
destination = get_destination(args=args)
await client.disconnect(destination=destination)
except IdbException:
raise DisconnectCommandException(
f"Could not disconnect from {args.companion:}:{args.port}"
)
class TargetDescribeCommand(CompanionCommand):
@property
def description(self) -> str:
return "Describes the Target"
@property
def name(self) -> str:
return "describe"
async def run_with_client(self, args: Namespace, client: IdbClient) -> None:
description = await client.describe()
print(description)
class TargetListCommand(ManagementCommand):
@property
def description(self) -> str:
return "List the connected targets"
@property
def name(self) -> str:
return "list-targets"
async def run_with_client(
self, args: Namespace, client: IdbManagementClient
) -> None:
targets = await client.list_targets()
if len(targets) == 0:
if not args.json:
print("No available targets")
return
targets = sorted(targets, key=lambda target: target.name)
formatter = human_format_target_info
if args.json:
formatter = json_format_target_info
for target in targets:
print(formatter(target))
class TargetBootCommand(ManagementCommand):
def add_parser_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument(
"--udid",
help="Udid of target, can also be set with the IDB_UDID env var",
required=True,
default=os.environ.get("IDB_UDID"),
)
super().add_parser_arguments(parser)
@property
def description(self) -> str:
return "Boots a simulator (only works on mac)"
@property
def name(self) -> str:
return "boot"
async def run_with_client(
self, args: Namespace, client: IdbManagementClient
) -> None:
await client.boot(udid=args.udid)
| 30.118227
| 94
| 0.596827
|
b55c086bce27dc1e1428db225e66cee23d18b342
| 5,072
|
py
|
Python
|
telescope/server/twitch.py
|
tonywu7/telescope
|
25791180010a225e493fb68a96e66bfda7a07bce
|
[
"Apache-2.0"
] | null | null | null |
telescope/server/twitch.py
|
tonywu7/telescope
|
25791180010a225e493fb68a96e66bfda7a07bce
|
[
"Apache-2.0"
] | null | null | null |
telescope/server/twitch.py
|
tonywu7/telescope
|
25791180010a225e493fb68a96e66bfda7a07bce
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Tony Wu +https://github.com/tonywu7/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import time
from contextlib import asynccontextmanager
from typing import List, Optional
import aiohttp
from ..util.urlkit import URLParam
class TwitchApp:
def __init__(self, config, *args, **kwargs):
self.config = config
self.log = logging.getLogger('twitch')
self._session = aiohttp.ClientSession()
self._token: AccessToken = None
self.users = {}
def _helix_endpoint(self, endpoint: str, data=None):
data = data or {}
data = URLParam(data)
query = f'?{data.query_string()}' if data else ''
return f'https://api.twitch.tv/helix{endpoint}{query}'
@property
def access_token(self):
if not self._token:
raise ValueError('No token is currently available')
if self._token.expired:
raise ValueError('Current token has expired')
return self._token.access
async def close(self):
await self.revoke()
await self._session.close()
async def authenticate(self):
self.log.info('Obtaining access token ...')
async with self._session.post(
url='https://id.twitch.tv/oauth2/token',
data={
'client_id': self.config['CLIENT_ID'],
'client_secret': self.config['CLIENT_SECRET'],
'grant_type': 'client_credentials',
},
) as res:
self._token = AccessToken(await res.json())
self.log.info(f'New access token expires at {self._token.exp}')
async def revoke(self):
self.log.info('Revoking current access token ...')
async with self._session.post(
url='https://id.twitch.tv/oauth2/revoke',
data={
'client_id': self.config['CLIENT_ID'],
'token': self.access_token,
},
):
self._token = None
@asynccontextmanager
async def request(self, endpoint: str, *, method='GET', data=None, query=True):
self.log.debug(f'Fetching {endpoint} with HTTP {method}')
endpoint = self._helix_endpoint(endpoint)
if (method == 'GET' or query) and data:
endpoint = URLParam(data).update_url(endpoint)
data = None
headers = {
'Authorization': f'Bearer {self.access_token}',
'client-id': self.config['CLIENT_ID'],
}
async with self._session.request(
method=method, url=endpoint,
json=data, headers=headers,
) as res:
try:
yield res
finally:
return
async def _json_response(self, res: aiohttp.ClientResponse):
if res.status == 401:
raise ValueError('Twitch returned HTTP 401 Unauthorized')
if res.status == 429:
raise ValueError('Twitch returned HTTP 429 Too Many Requests')
data = await res.json()
if 'error' in data:
raise ValueError(data)
return data
async def get_users(self, *, user_ids: Optional[List[int]] = None,
user_logins: Optional[List[str]] = None):
if not user_ids and not user_logins:
raise ValueError('Must supplie user IDs and/or usernames')
user_ids = user_ids or []
user_logins = user_logins or []
params = URLParam()
for k, ls in (('id', user_ids), ('login', user_logins)):
for info in ls:
params.add(k, info)
async with self.request('/users', data=params) as res:
data = (await self._json_response(res))['data']
for user in data:
self.users[user['id']] = user
return data
async def get_games(self, *, game_ids: List[int]):
params = URLParam()
for gid in game_ids:
params.add('id', gid)
async with self.request('/games', data=params) as res:
return (await self._json_response(res))['data']
async def list_subscriptions(self):
async with self.request('/webhooks/subscriptions') as res:
return await res.json()
class AccessToken:
def __init__(self, token):
self.access = token['access_token']
self.iat = time.time()
self.exp = self.iat + token['expires_in']
self.refresh = token.get('refresh_token')
self.scopes = token.get('scopes')
@property
def expired(self):
return time.time() > self.exp
| 34.040268
| 83
| 0.599369
|
490f5857e6f9638342c60099b6817a25cb1f318a
| 917
|
py
|
Python
|
pywal/backends/colorz.py
|
esp10mm/pywal
|
2372bc38b7893221ee3d7e1ffe3276d727d884a9
|
[
"MIT"
] | null | null | null |
pywal/backends/colorz.py
|
esp10mm/pywal
|
2372bc38b7893221ee3d7e1ffe3276d727d884a9
|
[
"MIT"
] | null | null | null |
pywal/backends/colorz.py
|
esp10mm/pywal
|
2372bc38b7893221ee3d7e1ffe3276d727d884a9
|
[
"MIT"
] | null | null | null |
"""
Generate a colorscheme using Colorz.
"""
import logging
import shutil
import subprocess
import sys
from .. import colors
from .. import util
def gen_colors(img):
"""Generate a colorscheme using Colorz."""
cmd = ["colorz", "-n", "6", "--bold", "0", "--no-preview"]
return subprocess.check_output([*cmd, img]).splitlines()
def adjust(cols, light):
"""Create palette."""
bg = util.blend_color("#555555", cols[1])
raw_colors = [bg, *cols, "#FFFFFF",
"#333333", *cols, "#FFFFFF"]
return colors.generic_adjust(raw_colors, light)
def get(img, light=False):
"""Get colorscheme."""
if not shutil.which("colorz"):
logging.error("Colorz wasn't found on your system.")
logging.error("Try another backend. (wal --backend)")
sys.exit(1)
cols = [col.decode('UTF-8').split()[0] for col in gen_colors(img)]
return adjust(cols, light)
| 24.131579
| 70
| 0.623773
|
7ff9312c2d44467c7dbbb26059de0e64b5ca501e
| 1,759
|
py
|
Python
|
anonymise/vcf_edit.py
|
MelbourneGenomics/anonymise
|
6ab3b6399cd51f23cce2a70861ea6ffb12482b02
|
[
"BSD-3-Clause"
] | null | null | null |
anonymise/vcf_edit.py
|
MelbourneGenomics/anonymise
|
6ab3b6399cd51f23cce2a70861ea6ffb12482b02
|
[
"BSD-3-Clause"
] | 1
|
2019-03-21T05:14:03.000Z
|
2019-03-21T05:16:58.000Z
|
anonymise/vcf_edit.py
|
MelbourneGenomics/anonymise
|
6ab3b6399cd51f23cce2a70861ea6ffb12482b02
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
'''
Replace text in the header of a VCF file in certain fields for the purposes
of anonymisation.
Replaces --old with --new in the following places:
- VCF header up until and including the column header line
We assume that only header lines start with '#'.
Usage:
vcf_edit.py --old oldtext --new newtext --input example_input.vcf --output example_output.vcf
Authors: Bernie Pope, Gayle Philip
'''
from argparse import ArgumentParser
def parse_args():
"""Replace old text with new text in the header of a VCF file"""
parser = ArgumentParser(description="Replace old text with new text in the header of a VCF file")
parser.add_argument("--old", required=True, type=str, help="old string (to be replaced)")
parser.add_argument("--new", required=True, type=str, help="new string (to replace old)")
parser.add_argument("--output", required=True, type=str, help="output VCF file path")
parser.add_argument("--input", required=True, type=str, help="input VCF file path")
return parser.parse_args()
def vcf_edit(old, new, input_filename, output_filename):
with open(input_filename) as input_file, \
open(output_filename, "w") as output_file:
for line in input_file:
if line.startswith('#'):
# this replaces all occurrences of old with new
# on the input line
# We assume header lines start with at least one '#'
# character.
new_line = line.replace(old, new)
else:
new_line = line
output_file.write(new_line)
def main():
args = parse_args()
vcf_edit(args.old, args.new, args.input, args.output)
if __name__ == '__main__':
main()
| 33.826923
| 101
| 0.660034
|
d38652c22df89f856ab2697377f30f754f08c007
| 102
|
py
|
Python
|
function_views/apps.py
|
hackersandslackers/django-views-tutorial
|
71cd7fdb0846a61fcff7f1822d58b0e862be352a
|
[
"MIT"
] | 18
|
2020-04-03T11:42:30.000Z
|
2021-12-12T07:00:41.000Z
|
function_views/apps.py
|
hackersandslackers/django-views-tutorial
|
71cd7fdb0846a61fcff7f1822d58b0e862be352a
|
[
"MIT"
] | 48
|
2020-06-05T17:54:00.000Z
|
2021-10-18T19:21:11.000Z
|
function_views/apps.py
|
hackersandslackers/django-views-tutorial
|
71cd7fdb0846a61fcff7f1822d58b0e862be352a
|
[
"MIT"
] | 7
|
2020-05-29T10:19:13.000Z
|
2022-02-03T09:34:18.000Z
|
from django.apps import AppConfig
class FunctionViewsConfig(AppConfig):
name = "function_views"
| 17
| 37
| 0.784314
|
7d1e273ea7dc7bd0b38ff79094ae6c272437be2d
| 2,948
|
py
|
Python
|
nova/tests/unit/api/openstack/compute/legacy_v2/extensions/foxinsocks.py
|
ebalduf/nova-backports
|
6bf97ec73467de522d34ab7a17ca0e0874baa7f9
|
[
"Apache-2.0"
] | 7
|
2015-09-22T11:27:16.000Z
|
2015-11-02T12:33:46.000Z
|
nova/tests/unit/api/openstack/compute/legacy_v2/extensions/foxinsocks.py
|
ebalduf/nova-backports
|
6bf97ec73467de522d34ab7a17ca0e0874baa7f9
|
[
"Apache-2.0"
] | 11
|
2017-06-19T01:28:55.000Z
|
2017-06-23T02:01:47.000Z
|
nova/tests/unit/api/openstack/compute/legacy_v2/extensions/foxinsocks.py
|
ebalduf/nova-backports
|
6bf97ec73467de522d34ab7a17ca0e0874baa7f9
|
[
"Apache-2.0"
] | 13
|
2015-05-05T09:34:04.000Z
|
2017-11-08T02:03:46.000Z
|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob.exc
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
class FoxInSocksController(object):
def index(self, req):
return "Try to say this Mr. Knox, sir..."
class FoxInSocksServerControllerExtension(wsgi.Controller):
@wsgi.action('add_tweedle')
def _add_tweedle(self, req, id, body):
return "Tweedle Beetle Added."
@wsgi.action('delete_tweedle')
def _delete_tweedle(self, req, id, body):
return "Tweedle Beetle Deleted."
@wsgi.action('fail')
def _fail(self, req, id, body):
raise webob.exc.HTTPBadRequest(explanation='Tweedle fail')
class FoxInSocksFlavorGooseControllerExtension(wsgi.Controller):
@wsgi.extends
def show(self, req, resp_obj, id):
# NOTE: This only handles JSON responses.
# You can use content type header to test for XML.
resp_obj.obj['flavor']['googoose'] = req.GET.get('chewing')
class FoxInSocksFlavorBandsControllerExtension(wsgi.Controller):
@wsgi.extends
def show(self, req, resp_obj, id):
# NOTE: This only handles JSON responses.
# You can use content type header to test for XML.
resp_obj.obj['big_bands'] = 'Pig Bands!'
class Foxinsocks(extensions.ExtensionDescriptor):
"""The Fox In Socks Extension."""
name = "Fox In Socks"
alias = "FOXNSOX"
namespace = "http://www.fox.in.socks/api/ext/pie/v1.0"
updated = "2011-01-22T13:25:27-06:00"
def __init__(self, ext_mgr):
ext_mgr.register(self)
def get_resources(self):
resources = []
resource = extensions.ResourceExtension('foxnsocks',
FoxInSocksController())
resources.append(resource)
return resources
def get_controller_extensions(self):
extension_list = []
extension_set = [
(FoxInSocksServerControllerExtension, 'servers'),
(FoxInSocksFlavorGooseControllerExtension, 'flavors'),
(FoxInSocksFlavorBandsControllerExtension, 'flavors'),
]
for klass, collection in extension_set:
controller = klass()
ext = extensions.ControllerExtension(self, collection, controller)
extension_list.append(ext)
return extension_list
| 31.698925
| 78
| 0.670624
|
78d4269e346e36d6bd4235d182eef09b9c03b79d
| 2,613
|
py
|
Python
|
scout/commands/update/user.py
|
CHRUdeLille/scout
|
0f70bec32e078d1825ebf20237f4a4979585dffb
|
[
"BSD-3-Clause"
] | null | null | null |
scout/commands/update/user.py
|
CHRUdeLille/scout
|
0f70bec32e078d1825ebf20237f4a4979585dffb
|
[
"BSD-3-Clause"
] | null | null | null |
scout/commands/update/user.py
|
CHRUdeLille/scout
|
0f70bec32e078d1825ebf20237f4a4979585dffb
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
import click
LOG = logging.getLogger(__name__)
@click.command('user', short_help='Update a user')
@click.option('--user-id', '-u',
help="A email adress that identifies the user",
required=True
)
@click.option('--update-role', '-r',
# There will be more roles in the future
type=click.Choice(['admin']),
help="Add a role to the user",
)
@click.option('--remove-admin',
is_flag=True,
help="Remove admin rights from user",
)
@click.option('--add-institute', '-i',
multiple=True,
help="Specify the institutes to add",
)
@click.option('--remove-institute',
multiple=True,
help="Specify the institutes to remove",
)
@click.pass_context
def user(context, user_id, update_role, add_institute, remove_admin, remove_institute):
"""
Update a user in the database
"""
adapter = context.obj['adapter']
user_obj = adapter.user(user_id)
if not user_obj:
LOG.warning("User %s could not be found", user_id)
context.abort()
existing_roles = set(user_obj['roles'])
if update_role:
if not update_role in user_obj['roles']:
existing_roles = set(user_obj['roles'])
existing_roles.add(update_role)
LOG.info("Adding role %s to user", update_role)
else:
LOG.warning("User already have role %s", update_role)
if remove_admin:
try:
existing_roles.remove('admin')
LOG.info("Removing admin rights from user %s", user_id)
except KeyError as err:
LOG.info("User %s does not have admin rights", user_id)
user_obj['roles'] = list(existing_roles)
existing_institutes = set(user_obj['institutes'])
for institute_id in add_institute:
institute_obj = adapter.institute(institute_id)
if not institute_obj:
LOG.warning("Institute %s could not be found", institute_id)
else:
existing_institutes.add(institute_id)
LOG.info("Adding institute %s to user", institute_id)
for institute_id in remove_institute:
try:
existing_institutes.remove(institute_id)
LOG.info("Removing institute %s from user", institute_id)
except KeyError as err:
LOG.info("User does not have access to institute %s", institute_id)
user_obj['institutes'] = list(existing_institutes)
updated_user = adapter.update_user(user_obj)
| 32.6625
| 87
| 0.605434
|
451c04853a69e8eec3c544373eb05e6059f4ecc6
| 28,343
|
py
|
Python
|
service/models/account.py
|
OA-DeepGreen/jper
|
042719a790a34f877050a32f896b947ce4407b4e
|
[
"Apache-2.0"
] | null | null | null |
service/models/account.py
|
OA-DeepGreen/jper
|
042719a790a34f877050a32f896b947ce4407b4e
|
[
"Apache-2.0"
] | 1
|
2022-02-03T12:35:18.000Z
|
2022-02-03T12:35:18.000Z
|
service/models/account.py
|
OA-DeepGreen/jper
|
042719a790a34f877050a32f896b947ce4407b4e
|
[
"Apache-2.0"
] | 3
|
2016-07-15T07:29:33.000Z
|
2020-02-03T11:20:34.000Z
|
import uuid
from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
from werkzeug.datastructures import TypeConversionDict
from octopus.core import app
from octopus.lib import dataobj
from service import dao
from esprit import raw
class Account(dataobj.DataObj, dao.AccountDAO, UserMixin):
'''
{
"id" : "<unique persistent account id>",
"created_date" : "<date account created>",
"last_updated" : "<date account last modified>",
"email" : "<account contact email>",
"contact_name" : "<name of key contact>",
"password" : "<hashed password for ui login>",
"api_key" : "<api key for api auth>",
"role" : ["<account role: repository, publisher, admin, passive, active, subject_repository>"],
"repository" : {
"name" : "<name of the repository>",
"url" : "<url for the repository>",
"software" : "<name of the software>",
"bibid": "<bibid for the repository>",
"sigel": ["<seal for the repository>"]
},
"publisher" : {
"name" : "<name of the publisher>",
"url" : "<url for the main publisher weg page>"
},
# "sword_repository" : {
"sword" : {
"username" : "<username for the router to authenticate with the repository>",
"password" : "<reversibly encrypted password for the router to authenticate with the repository>",
"collection" : "<url for deposit collection to receive content from the router>"
},
"packaging" : [
"<identifier - in order of preference - that should be available for this repo. Esp. via sword interface>"
],
"embargo" : {
"duration" : "<length of default embargo>",
"from" : "<reference to field in data to measure embargo from>"
},
"license_ref" : {
"title" : "<license title>",
"type" : "<license type>",
"url" : "<license url>",
"version" : "<license version>"
}
}
'''
# def __init__(self, raw):
# """
# Create a new instance of the Account object, optionally around the
# raw python dictionary.
#
# If supplied, the raw dictionary will be validated against the allowed structure of this
# object, and an exception will be raised if it does not validate
#
# :param raw: python dict object containing the data
# """
# struct = {
# "fields" : {
# "id" : {"coerce" : "unicode"},
# "created_date" : {"coerce" : "unicode"},
# "last_updated" : {"coerce" : "unicode"},
# "email" : {"coerce" : "unicode"},
# "contact_name": {"coerce" : "unicode"},
# "password": {"coerce" : "unicode"},
# "api_key": {"coerce" : "unicode"},
# "repository": {"contains" : "object"},
# "publisher": {"contains": "object"},
# "sword": {"contains": "object"},
# "embargo": {"contains": "object"},
# "license": {"contains": "object"}
# },
# "lists" : {
# "role" : {"contains" : "field", "coerce" : "unicode"},
# "packaging": {"contains" : "field", "coerce" : "unicode"},
# },
# "structs" : {
# "repository" : {
# "fields" : {
# "name" : {"coerce" : "unicode"},
# "url" : {"coerce" : "unicode"},
# "software": {"coerce": "unicode"},
# "bibid": {"coerce": "unicode"},
# "sigel": {"coerce": "unicode"},
# }
# },
# "publisher": {
# "fields": {
# "name": {"coerce": "unicode"},
# "url": {"coerce": "unicode"}
# }
# },
# "sword": {
# "fields": {
# "username": {"coerce": "unicode"},
# "password": {"coerce": "unicode"},
# "collection": {"coerce": "unicode"}
# }
# },
# "embargo": {
# "fields": {
# "duration": {"coerce": "unicode"},
# "from": {"coerce": "unicode"}
# }
# },
# "license": {
# "fields": {
# "title": {"coerce": "unicode"},
# "type": {"coerce": "unicode"},
# "url": {"coerce": "unicode"},
# "version": {"coerce": "unicode"},
# }
# },
# }
# }
# self._add_struct(struct)
# super(Account, self).__init__(raw=raw)
@property
def password(self):
return self._get_single("password", coerce=self._utf8_unicode())
@password.setter
def password(self, val):
coerced = self._utf8_unicode()(val)
self._set_single("password", generate_password_hash(coerced), coerce=self._utf8_unicode())
@property
def hashed_password(self):
return self._get_single("password", coerce=self._utf8_unicode())
@hashed_password.setter
def hashed_password(self, val):
self._set_single("password", val, coerce=self._utf8_unicode())
def set_password(self, password):
coerced = self._utf8_unicode()(password)
self._set_single("password", generate_password_hash(coerced), coerce=self._utf8_unicode())
def check_password(self, password):
coerced = self._utf8_unicode()(password)
existing = self.hashed_password
if existing is None:
return False
return check_password_hash(existing, coerced)
def clear_password(self):
self._delete("password")
@property
def email(self):
return self._get_single("email", coerce=self._utf8_unicode())
@email.setter
def email(self, val):
self._set_single("email", val, coerce=self._utf8_unicode())
@property
def contact_name(self):
return self._get_single("contact_name", coerce=self._utf8_unicode())
@contact_name.setter
def contact_name(self, val):
self._set_single("contact_name", val, coerce=self._utf8_unicode())
@property
def api_key(self):
return self._get_single("api_key", coerce=self._utf8_unicode())
@api_key.setter
def api_key(self, val):
self._set_single("api_key", val, coerce=self._utf8_unicode())
def set_api_key(self, key):
self._set_single("api_key", key, coerce=dataobj.to_unicode())
@property
def role(self):
return self._get_list("role", coerce=self._utf8_unicode())
@role.setter
def role(self, role):
self._set_list("role", role, coerce=self._utf8_unicode())
def add_role(self, role):
# admin, publisher, repository, passive, active, subject_repository
if role in ['admin', 'publisher', 'repository', 'passive', 'active', 'subject_repository']:
self._add_to_list("role", role, coerce=self._utf8_unicode(), unique=True)
def remove_role(self, role):
self._delete_from_list("role", role)
@property
def is_passive(self):
return self.has_role('passive')
def set_active(self):
if self.has_role('passive'):
self.remove_role('passive')
# 2019-06-04 TD : no active support of role 'active'
# (so 'passive' will be more prominent on screen, for example)
# if not self.has_role('active'):
# self.add_role('active')
def set_passive(self):
if self.has_role('active'):
self.remove_role('active')
if not self.has_role('passive'):
self.add_role('passive')
@property
def is_super(self):
return self.has_role(app.config["ACCOUNT_SUPER_USER_ROLE"])
def has_role(self, role):
return role in self.role
@property
def packaging(self):
return self._get_list("packaging", coerce=self._utf8_unicode())
@packaging.setter
def packaging(self, packaging):
self._set_list("packaging", packaging, coerce=self._utf8_unicode())
def add_packaging(self, val):
self._add_to_list("packaging", val, coerce=self._utf8_unicode(), unique=True)
@property
def repository(self):
"""
The repository information for the account
The returned object is as follows:
::
{
"name" : "<name of repository>",
"url" : "<url>",
"software" : "<software>",
"bibid": "<bibid>",
"sigel": ["<seal>"]
}
:return: The repository information as a python dict object
"""
return self._get_single("repository")
@repository.setter
def repository(self, obj):
"""
Set the repository object
The object will be validated and types coerced as needed.
The supplied object should be structured as follows:
::
{
"name" : "<name of repository>",
"url" : "<url>",
"software" : "<software>",
}
:param obj: the repository object as a dict
:return:
"""
# validate the object structure quickly
allowed = ["name", "url", "software", "bibid", "sigel"]
for k in list(obj.keys()):
if k not in allowed:
raise dataobj.DataSchemaException("Repository object must only contain the following keys: {x}".format(x=", ".join(allowed)))
# coerce the values of the keys
uc = dataobj.to_unicode()
allowed.remove('sigel')
for k in allowed:
if obj.get(k, None):
obj[k] = self._coerce(obj[k], uc)
# set list for sigel
if obj.get('sigel', []):
obj['sigel'] = [self._coerce(v, self._utf8_unicode()) for v in obj['sigel'] if v is not None]
# finally write it
self._set_single("repository", obj)
@property
def repository_software(self):
return self._get_single("repository.software", coerce=self._utf8_unicode())
@repository_software.setter
def repository_software(self, val):
self._set_single("repository.software", val, coerce=self._utf8_unicode())
@property
def repository_name(self):
return self._get_single("repository.name", coerce=self._utf8_unicode())
@repository_name.setter
def repository_name(self, val):
self._set_single("repository.name", val, coerce=self._utf8_unicode())
@property
def repository_bibid(self):
return self._get_single("repository.bibid", coerce=self._utf8_unicode())
@property
def publisher(self):
"""
The publisher information for the account
The returned object is as follows:
::
{
"name" : "<name of publisher>",
"url" : "<url>",
}
:return: The publisher information as a python dict object
"""
return self._get_single("publisher")
@publisher.setter
def publisher(self, obj):
"""
Set the publisher object
The object will be validated and types coerced as needed.
The supplied object should be structured as follows:
::
{
"name" : "<name of publisher>",
"url" : "<url>",
}
:param obj: the publisher object as a dict
:return:
"""
# validate the object structure quickly
allowed = ["name", "url"]
for k in list(obj.keys()):
if k not in allowed:
raise dataobj.DataSchemaException("Publisher object must only contain the following keys: {x}".format(x=", ".join(allowed)))
# coerce the values of the keys
uc = dataobj.to_unicode()
for k in allowed:
if k in obj:
obj[k] = self._coerce(obj[k], uc)
# finally write it
self._set_single("publisher", obj)
# 2020-02-20 TD : add convenience setter and getter for extra pub infos
@property
def publisher_name(self):
return self._get_single("publisher.name", coerce=self._utf8_unicode())
@publisher_name.setter
def publisher_name(self, val):
self._set_single("publisher.name", val, coerce=self._utf8_unicode())
@property
def publisher_url(self):
return self._get_single("publisher.url", coerce=self._utf8_unicode())
@publisher_url.setter
def publisher_url(self, val):
self._set_single("publisher.url", val, coerce=self._utf8_unicode())
# 2020-02-20 TD : end of convenience setter and getter for extra pub infos
@property
def sword(self):
"""
The sword information for the repository
The returned object is as follows:
::
{
"username" : "<username>",
"password" : "<password>",
"collection" : "<name of collection>"
}
:return: The sword information as a python dict object
"""
return self._get_single("sword")
@sword.setter
def sword(self, obj):
"""
Set the sword object
The object will be validated and types coerced as needed.
The supplied object should be structured as follows:
::
{
"username" : "<username>",
"password" : "<password>",
"collection" : "<name of collection>"
}
:param obj: the sword object as a dict
:return:
"""
# validate the object structure quickly
allowed = ["username", "password", "collection"]
for k in list(obj.keys()):
if k not in allowed:
raise dataobj.DataSchemaException("Sword object must only contain the following keys: {x}".format(x=", ".join(allowed)))
# coerce the values of the keys
uc = dataobj.to_unicode()
for k in allowed:
if k in obj:
obj[k] = self._coerce(obj[k], uc)
# finally write it
self._set_single("sword", obj)
@property
def sword_collection(self):
return self._get_single("sword.collection", coerce=self._utf8_unicode())
# 2017-05-18 TD : fixed an unnoticed inconsistency up to now: change of "sword_repository" to "sword"
#
@property
def sword_username(self):
return self._get_single("sword.username", coerce=self._utf8_unicode())
# 2017-05-18 TD : fixed an unnoticed inconsistency up to now: change of "sword_repository" to "sword"
#
@property
def sword_password(self):
return self._get_single("sword.password", coerce=self._utf8_unicode())
# 2017-05-18 TD : fixed an unnoticed inconsistency up to now: change of "sword_repository" to "sword"
def add_sword_credentials(self, username, password, collection):
self._set_single("sword.username", username, coerce=self._utf8_unicode())
self._set_single("sword.password", password, coerce=self._utf8_unicode())
self._set_single("sword.collection", collection, coerce=self._utf8_unicode())
@property
def embargo(self):
"""
The embargo information for the work represented by this account
The returned object is as follows:
::
{
"duration" : "<duration>",
"from" : "<the field to start embargo from>"
}
:return: The embargo information as a python dict object
"""
return self._get_single("embargo")
@embargo.setter
def embargo(self, obj):
"""
Set the embargo object
The object will be validated and types coerced as needed.
The supplied object should be structured as follows:
::
{
"duration" : "<duration>",
"from" : "<the field to start embargo from>"
}
:param obj: the embargo object as a dict
:return:
"""
# validate the object structure quickly
allowed = ["duration", "from"]
for k in list(obj.keys()):
if k not in allowed:
raise dataobj.DataSchemaException("embargo object must only contain the following keys: {x}".format(x=", ".join(allowed)))
# coerce the values of the keys
uc = dataobj.to_unicode()
for k in allowed:
if k in obj:
obj[k] = self._coerce(obj[k], uc)
# finally write it
self._set_single("embargo", obj)
@property
def license(self):
"""
The license information for the work represented by this account
The returned object is as follows:
::
{
"title" : "<name of licence>",
"type" : "<type>",
"url" : "<url>",
"version" : "<version>",
}
:return: The license information as a python dict object
"""
return self._get_single("license")
@license.setter
def license(self, obj):
"""
Set the licence object
The object will be validated and types coerced as needed.
The supplied object should be structured as follows:
::
{
"title" : "<name of licence>",
"type" : "<type>",
"url" : "<url>",
"version" : "<version>",
}
:param obj: the license object as a dict
:return:
"""
# validate the object structure quickly
allowed = ["title", "type", "url", "version"]
for k in list(obj.keys()):
if k not in allowed:
raise dataobj.DataSchemaException("License object must only contain the following keys: {x}".format(x=", ".join(allowed)))
# coerce the values of the keys
uc = dataobj.to_unicode()
for k in allowed:
if k in obj:
obj[k] = self._coerce(obj[k], uc)
# finally write it
self._set_single("license", obj)
def add_account(self, account_hash):
account_hash = coerce_account_hash(account_hash)
acc_id = account_hash.get('id', None) or account_hash.get('username', None)
if self.id and acc_id != self.id:
app.logger.warn("Account params have a different id. Ignoring id in params")
elif not self.id and acc_id:
self.id = acc_id
password = account_hash.get('password', None)
if password:
if not self.password:
app.logger.info('Password has been set for account {id}'.format(id=acc_id))
else:
app.logger.warn('Password has been changed for account {id}'.format(id=acc_id))
self.password = password
elif not self.password:
raise dataobj.DataSchemaException("Account has to contain password")
if account_hash.get('email', None):
self.email = account_hash.get('email')
if account_hash.get('contact_name', None):
self.contact_name = account_hash.get('contact_name')
if account_hash.get('api_key', None):
self.api_key = account_hash.get('api_key')
if account_hash.get('role', []):
self.role = account_hash.get('role')
if account_hash.get('packaging', []):
self.packaging = account_hash.get('packaging')
if account_hash.get('repository', {}):
self.repository = account_hash.get('repository')
if account_hash.get('publisher', {}):
self.publisher = account_hash.get('publisher')
if account_hash.get('sword', {}):
self.sword = account_hash.get('sword')
if account_hash.get('embargo', {}):
self.embargo = account_hash.get('embargo')
if account_hash.get('license', {}):
self.license = account_hash.get('license')
def can_log_in(self):
return True
# 2019-03-21 TD : Sometimes, ALL items of a 'key' are wanted ...
@classmethod
def pull_all_by_key(cls,key,value):
res = cls.query(q={"query":{"query_string":{"query":value,"default_field":key,"default_operator":"AND"}}})
n = res.get('hits',{}).get('total',{}).get('value', 0)
# 2019-06-11 TD : re-query necessary as a precautionary measure because len(res) seems
# to be restricted to 10 records only per default...
if n > 10:
res = cls.query(q={"query":{"query_string":{"query":value,"default_field":key,"default_operator":"AND"}}},size=n)
return [ cls.pull( res['hits']['hits'][k]['_source']['id'] ) for k in range(n) ]
@classmethod
def pull_all_repositories(cls):
q = {
"query": {
"bool": {
"must": {
"match": {
"role": "repository"
}
}
}
}
}
conn = cls.__conn__
types = cls.get_read_types(None)
r = raw.search(conn, types, q)
res = r.json()
# res = cls.query(q=q)
n = res.get('hits',{}).get('total',{}).get('value', 0)
if n > 10:
q["size"] = n
r = raw.search(conn, types, q)
res = r.json()
ans = []
for hit in res['hits']['hits']:
ans.append(hit.get('_source', {}).get('repository', {}).get('bibid', u"*****").lstrip('a'))
return ans
@classmethod
def pull_all_subject_repositories(cls):
q = {
"query": {
"bool": {
"must": {
"match": {
"role": "repository",
},
"match": {
"role": "subject_repository",
},
}
}
}
}
conn = cls.__conn__
types = cls.get_read_types(None)
r = raw.search(conn, types, q)
res = r.json()
# res = cls.query(q=q)
n = res.get('hits',{}).get('total',{}).get('value', 0)
if n > 10:
q["size"] = n
r = raw.search(conn, types, q)
res = r.json()
ans = []
for hit in res['hits']['hits']:
ans.append(hit.get('_source', {}).get('repository', {}).get('bibid', u"*****").lstrip('a'))
return ans
@classmethod
def pull_all_non_subject_repositories(cls):
q = {
"query": {
"bool": {
"filter": {
"bool": {
"must_not": [
{
"match": {
"role": "subject_repository"
}
}
]
}
},
"must": {
"match": {
"role": "repository"
}
}
}
}
}
conn = cls.__conn__
types = cls.get_read_types(None)
r = raw.search(conn, types, q)
res = r.json()
# res = cls.query(q=q)
n = res.get('hits',{}).get('total',{}).get('value', 0)
if n > 10:
q["size"] = n
r = raw.search(conn, types, q)
res = r.json()
ans = []
for hit in res['hits']['hits']:
ans.append(hit.get('_source', {}).get('repository', {}).get('bibid', u"*****").lstrip('a'))
return ans
@classmethod
def pull_all_by_email(cls,email):
return cls.pull_all_by_key('email',email)
# 2019-03-21 TD : (* end-of-addition *)
@classmethod
def pull_by_key(cls,key,value):
res = cls.query(q={"query":{"query_string":{"query":value,"default_field":key,"default_operator":"AND"}}})
if res.get('hits',{}).get('total',{}).get('value', 0) == 1:
return cls.pull( res['hits']['hits'][0]['_source']['id'] )
else:
return None
@classmethod
def pull_by_email(cls,email):
return cls.pull_by_key('email',email)
def remove(self):
if self.has_role('publisher'):
un = self.id
try:
import os, subprocess
fl = os.path.dirname(os.path.abspath(__file__)) + '/deleteFTPuser.sh'
subprocess.call(['sudo',fl,un])
app.logger.info(str(self.id) + ' calling deleteFTPuser subprocess')
except:
app.logger.error(str(self.id) + ' failed deleteFTPuser subprocess')
self.delete()
def become_publisher(self):
# create an FTP user for the account, if it is a publisher
# TODO / NOTE: if the service has to be scaled up to run on multiple machines,
# the ftp users should only be created on the machine that the ftp address points to.
# so the create user scripts should be triggered on that machine. Alternatively the user
# accounts could be created on every machine - but that leaves more potential security holes.
# Better to restrict the ftp upload to one machine that is configured to accept them. Then
# when it runs the schedule, it will check the ftp folder locations and send any to the API
# endpoints, so the heavy lifting would still be distributed across machines.
#un = self.data['email'].replace('@','_')
un = self.id
try:
import os, subprocess
fl = os.path.dirname(os.path.abspath(__file__)) + '/createFTPuser.sh'
print("subprocessing " + fl)
subprocess.call( [ 'sudo', fl, un, self.data['api_key'] ] )
print("creating FTP user for " + un)
except:
print("could not create an FTP user for " + un)
self.add_role('publisher')
self.save()
def cease_publisher(self):
un = self.id
try:
import os, subprocess
fl = os.path.dirname(os.path.abspath(__file__)) + '/deleteFTPuser.sh'
print("subprocessing " + fl)
subprocess.call(['sudo',fl,un])
print("deleting FTP user for " + un)
except:
print("could not delete an FTP user for " + un)
self.remove_role('publisher')
self.save()
def coerce_account_hash(account_hash):
if isinstance(account_hash, TypeConversionDict):
account_hash = account_hash.to_dict()
# set api_key if missing
if not account_hash.get('api_key', None):
account_hash['api_key'] = str(uuid.uuid4())
# nested properties
nested_properties = {
'repository': ['repository_name', 'repository_software', 'repository_url', 'repository_bibid', 'repository_sigel'],
'sword': ['sword_username', 'sword_password', 'sword_collection'],
'embargo': ['embargo_duration',],
'license': ['license_title', 'license_type', 'license_url', 'license_version']
}
for parent, props in nested_properties.items():
parent_hash = account_hash.pop(parent, {})
for prop in props:
label = prop.split('_')[-1]
val = account_hash.pop(prop, None)
if not val:
continue
if label == 'bibid':
val = val.upper()
elif label == 'sigel':
val = val.split(',')
parent_hash[label] = val
if parent_hash:
account_hash[parent] = parent_hash
# role
role = account_hash.pop('radio', None)
if role:
account_hash['role'] = [role]
# packaging
packaging = account_hash.pop('packaging', None)
if packaging:
account_hash['packaging'] = packaging.split(',')
return account_hash
| 34.776687
| 141
| 0.536288
|
9c39f852949b38c74a6145b05cbd330f9acfe84a
| 442
|
py
|
Python
|
ch03/jinja_filter.py
|
PacktPublishing/Python-Networking-Cookbook
|
26945c781a51fe72cc01409df6b5c5fa7df53f4c
|
[
"MIT"
] | 5
|
2021-06-11T11:24:04.000Z
|
2022-03-22T03:22:57.000Z
|
ch03/jinja_filter.py
|
PacktPublishing/Python-Networking-Cookbook
|
26945c781a51fe72cc01409df6b5c5fa7df53f4c
|
[
"MIT"
] | null | null | null |
ch03/jinja_filter.py
|
PacktPublishing/Python-Networking-Cookbook
|
26945c781a51fe72cc01409df6b5c5fa7df53f4c
|
[
"MIT"
] | 10
|
2021-04-18T12:31:14.000Z
|
2022-03-28T07:21:16.000Z
|
import ipaddress
from jinja2 import Environment, FileSystemLoader
loader = FileSystemLoader("templates")
environment = Environment(loader=loader)
def to_ipv6(addr):
raw_ipv4 = "2002::" + str(addr)
ipv6 = ipaddress.IPv6Address(raw_ipv4)
return str(ipv6)
environment.filters["toIPv6"] = to_ipv6
tpl = environment.get_template("filter.conf.tpl")
addresses = ["10.10.2.10", "10.10.2.40"]
out = tpl.render(addresses=addresses)
print(out)
| 23.263158
| 49
| 0.751131
|
d5b15f630e3d9966827805399ea554d64770bc9a
| 1,043
|
py
|
Python
|
setup.py
|
arturtamborski/grcpass
|
ea799d78900a1942bac891e077d9329af8f3fe96
|
[
"MIT"
] | null | null | null |
setup.py
|
arturtamborski/grcpass
|
ea799d78900a1942bac891e077d9329af8f3fe96
|
[
"MIT"
] | null | null | null |
setup.py
|
arturtamborski/grcpass
|
ea799d78900a1942bac891e077d9329af8f3fe96
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(
name='grcpass',
version='1.0.1',
description='Simple script which scrapes off password from www.grc.com/passwords.htm',
url='https://github.com/arturtamborski/grcpass',
download_url='https://github.com/arturtamborski/grcpass/archive/1.0.1.tar.gz',
keywords='password generator',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Security',
],
author='Artur Tamborski',
author_email='tamborskiartur@gmail.com',
license='MIT',
packages=['grcpass'],
entry_points={'console_scripts':['grcpass=grcpass.command_line:main']},
include_package_data=True,
zip_safe=False
)
| 34.766667
| 90
| 0.647172
|
a13d2ca8d5639a5137a1f2029e2308f06bfa5926
| 10,618
|
py
|
Python
|
homeassistant/components/sensor/citybikes.py
|
maihde/home-assistant
|
688d70644949a658e0607e52b3896a2c4c8a85e7
|
[
"Apache-2.0"
] | 2
|
2020-02-20T18:47:55.000Z
|
2021-11-09T11:33:28.000Z
|
homeassistant/components/sensor/citybikes.py
|
maihde/home-assistant
|
688d70644949a658e0607e52b3896a2c4c8a85e7
|
[
"Apache-2.0"
] | 5
|
2022-03-01T06:31:03.000Z
|
2022-03-31T07:20:45.000Z
|
homeassistant/components/sensor/citybikes.py
|
maihde/home-assistant
|
688d70644949a658e0607e52b3896a2c4c8a85e7
|
[
"Apache-2.0"
] | 3
|
2018-08-27T10:08:30.000Z
|
2020-07-04T10:07:03.000Z
|
"""
Sensor for the CityBikes data.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.citybikes/
"""
import logging
from datetime import timedelta
import asyncio
import aiohttp
import async_timeout
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_NAME, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS,
ATTR_ATTRIBUTION, ATTR_LOCATION, ATTR_LATITUDE, ATTR_LONGITUDE,
ATTR_FRIENDLY_NAME, STATE_UNKNOWN, LENGTH_METERS, LENGTH_FEET,
ATTR_ID)
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.util import location, distance
_LOGGER = logging.getLogger(__name__)
ATTR_EMPTY_SLOTS = 'empty_slots'
ATTR_EXTRA = 'extra'
ATTR_FREE_BIKES = 'free_bikes'
ATTR_NAME = 'name'
ATTR_NETWORK = 'network'
ATTR_NETWORKS_LIST = 'networks'
ATTR_STATIONS_LIST = 'stations'
ATTR_TIMESTAMP = 'timestamp'
ATTR_UID = 'uid'
CONF_NETWORK = 'network'
CONF_STATIONS_LIST = 'stations'
DEFAULT_ENDPOINT = 'https://api.citybik.es/{uri}'
DOMAIN = 'citybikes'
MONITORED_NETWORKS = 'monitored-networks'
NETWORKS_URI = 'v2/networks'
REQUEST_TIMEOUT = 5 # In seconds; argument to asyncio.timeout
SCAN_INTERVAL = timedelta(minutes=5) # Timely, and doesn't suffocate the API
STATIONS_URI = 'v2/networks/{uid}?fields=network.stations'
CITYBIKES_ATTRIBUTION = "Information provided by the CityBikes Project "\
"(https://citybik.es/#about)"
PLATFORM_SCHEMA = vol.All(
cv.has_at_least_one_key(CONF_RADIUS, CONF_STATIONS_LIST),
PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=''): cv.string,
vol.Optional(CONF_NETWORK): cv.string,
vol.Inclusive(CONF_LATITUDE, 'coordinates'): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, 'coordinates'): cv.longitude,
vol.Optional(CONF_RADIUS, 'station_filter'): cv.positive_int,
vol.Optional(CONF_STATIONS_LIST, 'station_filter'):
vol.All(cv.ensure_list, vol.Length(min=1), [cv.string])
}))
NETWORK_SCHEMA = vol.Schema({
vol.Required(ATTR_ID): cv.string,
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_LOCATION): vol.Schema({
vol.Required(ATTR_LATITUDE): cv.latitude,
vol.Required(ATTR_LONGITUDE): cv.longitude,
}, extra=vol.REMOVE_EXTRA),
}, extra=vol.REMOVE_EXTRA)
NETWORKS_RESPONSE_SCHEMA = vol.Schema({
vol.Required(ATTR_NETWORKS_LIST): [NETWORK_SCHEMA],
})
STATION_SCHEMA = vol.Schema({
vol.Required(ATTR_FREE_BIKES): cv.positive_int,
vol.Required(ATTR_EMPTY_SLOTS): vol.Any(cv.positive_int, None),
vol.Required(ATTR_LATITUDE): cv.latitude,
vol.Required(ATTR_LONGITUDE): cv.longitude,
vol.Required(ATTR_ID): cv.string,
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_TIMESTAMP): cv.string,
vol.Optional(ATTR_EXTRA):
vol.Schema({vol.Optional(ATTR_UID): cv.string}, extra=vol.REMOVE_EXTRA)
}, extra=vol.REMOVE_EXTRA)
STATIONS_RESPONSE_SCHEMA = vol.Schema({
vol.Required(ATTR_NETWORK): vol.Schema({
vol.Required(ATTR_STATIONS_LIST): [STATION_SCHEMA]
}, extra=vol.REMOVE_EXTRA)
})
class CityBikesRequestError(Exception):
"""Error to indicate a CityBikes API request has failed."""
pass
@asyncio.coroutine
def async_citybikes_request(hass, uri, schema):
"""Perform a request to CityBikes API endpoint, and parse the response."""
try:
session = async_get_clientsession(hass)
with async_timeout.timeout(REQUEST_TIMEOUT, loop=hass.loop):
req = yield from session.get(DEFAULT_ENDPOINT.format(uri=uri))
json_response = yield from req.json()
return schema(json_response)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Could not connect to CityBikes API endpoint")
except ValueError:
_LOGGER.error("Received non-JSON data from CityBikes API endpoint")
except vol.Invalid as err:
_LOGGER.error("Received unexpected JSON from CityBikes"
" API endpoint: %s", err)
raise CityBikesRequestError
# pylint: disable=unused-argument
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices,
discovery_info=None):
"""Set up the CityBikes platform."""
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {MONITORED_NETWORKS: {}}
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
network_id = config.get(CONF_NETWORK)
stations_list = set(config.get(CONF_STATIONS_LIST, []))
radius = config.get(CONF_RADIUS, 0)
name = config.get(CONF_NAME)
if not hass.config.units.is_metric:
radius = distance.convert(radius, LENGTH_FEET, LENGTH_METERS)
if not network_id:
network_id = yield from CityBikesNetwork.get_closest_network_id(
hass, latitude, longitude)
if network_id not in hass.data[DOMAIN][MONITORED_NETWORKS]:
network = CityBikesNetwork(hass, network_id)
hass.data[DOMAIN][MONITORED_NETWORKS][network_id] = network
hass.async_add_job(network.async_refresh)
async_track_time_interval(hass, network.async_refresh,
SCAN_INTERVAL)
else:
network = hass.data[DOMAIN][MONITORED_NETWORKS][network_id]
yield from network.ready.wait()
devices = []
for station in network.stations:
dist = location.distance(latitude, longitude,
station[ATTR_LATITUDE],
station[ATTR_LONGITUDE])
station_id = station[ATTR_ID]
station_uid = str(station.get(ATTR_EXTRA, {}).get(ATTR_UID, ''))
if radius > dist or stations_list.intersection((station_id,
station_uid)):
devices.append(CityBikesStation(network, station_id, name))
async_add_devices(devices, True)
class CityBikesNetwork:
"""Thin wrapper around a CityBikes network object."""
NETWORKS_LIST = None
NETWORKS_LIST_LOADING = asyncio.Condition()
@classmethod
@asyncio.coroutine
def get_closest_network_id(cls, hass, latitude, longitude):
"""Return the id of the network closest to provided location."""
try:
yield from cls.NETWORKS_LIST_LOADING.acquire()
if cls.NETWORKS_LIST is None:
networks = yield from async_citybikes_request(
hass, NETWORKS_URI, NETWORKS_RESPONSE_SCHEMA)
cls.NETWORKS_LIST = networks[ATTR_NETWORKS_LIST]
networks_list = cls.NETWORKS_LIST
network = networks_list[0]
result = network[ATTR_ID]
minimum_dist = location.distance(
latitude, longitude,
network[ATTR_LOCATION][ATTR_LATITUDE],
network[ATTR_LOCATION][ATTR_LONGITUDE])
for network in networks_list[1:]:
network_latitude = network[ATTR_LOCATION][ATTR_LATITUDE]
network_longitude = network[ATTR_LOCATION][ATTR_LONGITUDE]
dist = location.distance(latitude, longitude,
network_latitude, network_longitude)
if dist < minimum_dist:
minimum_dist = dist
result = network[ATTR_ID]
return result
except CityBikesRequestError:
raise PlatformNotReady
finally:
cls.NETWORKS_LIST_LOADING.release()
def __init__(self, hass, network_id):
"""Initialize the network object."""
self.hass = hass
self.network_id = network_id
self.stations = []
self.ready = asyncio.Event()
@asyncio.coroutine
def async_refresh(self, now=None):
"""Refresh the state of the network."""
try:
network = yield from async_citybikes_request(
self.hass, STATIONS_URI.format(uid=self.network_id),
STATIONS_RESPONSE_SCHEMA)
self.stations = network[ATTR_NETWORK][ATTR_STATIONS_LIST]
self.ready.set()
except CityBikesRequestError:
if now is not None:
self.ready.clear()
else:
raise PlatformNotReady
class CityBikesStation(Entity):
"""CityBikes API Sensor."""
def __init__(self, network, station_id, base_name=''):
"""Initialize the sensor."""
self._network = network
self._station_id = station_id
self._station_data = {}
self._base_name = base_name
@property
def state(self):
"""Return the state of the sensor."""
return self._station_data.get(ATTR_FREE_BIKES, STATE_UNKNOWN)
@property
def name(self):
"""Return the name of the sensor."""
if self._base_name:
return "{} {} {}".format(self._network.network_id, self._base_name,
self._station_id)
return "{} {}".format(self._network.network_id, self._station_id)
@asyncio.coroutine
def async_update(self):
"""Update station state."""
if self._network.ready.is_set():
for station in self._network.stations:
if station[ATTR_ID] == self._station_id:
self._station_data = station
break
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._station_data:
return {
ATTR_ATTRIBUTION: CITYBIKES_ATTRIBUTION,
ATTR_UID: self._station_data.get(ATTR_EXTRA, {}).get(ATTR_UID),
ATTR_LATITUDE: self._station_data[ATTR_LATITUDE],
ATTR_LONGITUDE: self._station_data[ATTR_LONGITUDE],
ATTR_EMPTY_SLOTS: self._station_data[ATTR_EMPTY_SLOTS],
ATTR_FRIENDLY_NAME: self._station_data[ATTR_NAME],
ATTR_TIMESTAMP: self._station_data[ATTR_TIMESTAMP],
}
return {ATTR_ATTRIBUTION: CITYBIKES_ATTRIBUTION}
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return 'bikes'
@property
def icon(self):
"""Return the icon."""
return 'mdi:bike'
| 36.115646
| 79
| 0.662743
|
badb198fbfbc73c36d9ff46894cfba8bce69f3d5
| 1,092
|
py
|
Python
|
src/pretix/base/services/quotas.py
|
bsod85/pretix
|
d86b3a217352f7ad24008685393f9af18fcf6e6c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/pretix/base/services/quotas.py
|
bsod85/pretix
|
d86b3a217352f7ad24008685393f9af18fcf6e6c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/pretix/base/services/quotas.py
|
bsod85/pretix
|
d86b3a217352f7ad24008685393f9af18fcf6e6c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
from datetime import timedelta
from django.db import models
from django.db.models import F, Max, OuterRef, Q, Subquery
from django.dispatch import receiver
from django.utils.timezone import now
from pretix.base.models import LogEntry, Quota
from pretix.celery_app import app
from ..signals import periodic_task
@receiver(signal=periodic_task)
def build_all_quota_caches(sender, **kwargs):
refresh_quota_caches.apply_async()
@app.task
def refresh_quota_caches():
last_activity = LogEntry.objects.filter(
event=OuterRef('event_id'),
).order_by().values('event').annotate(
m=Max('datetime')
).values(
'm'
)
quotas = Quota.objects.annotate(
last_activity=Subquery(last_activity, output_field=models.DateTimeField())
).filter(
Q(cached_availability_time__isnull=True) |
Q(cached_availability_time__lt=F('last_activity')) |
Q(cached_availability_time__lt=now() - timedelta(hours=2), last_activity__gt=now() - timedelta(days=7))
).select_related('subevent')
for q in quotas:
q.availability()
| 29.513514
| 111
| 0.722527
|
95213bbbf8856f1c5b3cb632417d741b05b29eca
| 2,177
|
py
|
Python
|
cdk/permissions_boundary.py
|
MAAP-Project/ept-tools
|
8e051188bc09f144753ccba999daab588709ff23
|
[
"MIT"
] | null | null | null |
cdk/permissions_boundary.py
|
MAAP-Project/ept-tools
|
8e051188bc09f144753ccba999daab588709ff23
|
[
"MIT"
] | null | null | null |
cdk/permissions_boundary.py
|
MAAP-Project/ept-tools
|
8e051188bc09f144753ccba999daab588709ff23
|
[
"MIT"
] | null | null | null |
from typing import Union
import jsii
from aws_cdk import aws_iam, core
from jsii._reference_map import _refs
from jsii._utils import Singleton
@jsii.implements(core.IAspect)
class PermissionsBoundaryAspect:
"""
This aspect finds all aws_iam.Role objects in a node (ie. CDK stack) and
sets permissions boundary to the given ARN.
https://github.com/aws/aws-cdk/issues/3242#issuecomment-553815373
"""
def __init__(self, permission_boundary: Union[aws_iam.ManagedPolicy, str]) -> None:
"""
:param permission_boundary: Either aws_iam.ManagedPolicy object or
managed policy's ARN string
"""
self.permission_boundary = permission_boundary
def visit(self, construct_ref: core.IConstruct) -> None:
"""
construct_ref only contains a string reference to an object. To get the
actual object, we need to resolve it using JSII mapping.
:param construct_ref: ObjRef object with string reference to the actual object.
:return: None
"""
if isinstance(construct_ref, jsii._kernel.ObjRef) and hasattr(
construct_ref, "ref"
):
kernel = Singleton._instances[
jsii._kernel.Kernel
] # The same object is available as: jsii.kernel
resolve = _refs.resolve(kernel, construct_ref)
else:
resolve = construct_ref
def _walk(obj):
if isinstance(obj, aws_iam.Role):
cfn_role = obj.node.find_child("Resource")
policy_arn = (
self.permission_boundary
if isinstance(self.permission_boundary, str)
else self.permission_boundary.managed_policy_arn
)
cfn_role.add_property_override("PermissionsBoundary", policy_arn)
else:
if hasattr(obj, "permissions_node"):
for c in obj.permissions_node.children:
_walk(c)
if hasattr(obj, "node") and obj.node.children:
for c in obj.node.children:
_walk(c)
_walk(resolve)
| 37.534483
| 87
| 0.610932
|
8797b1d96b9b68146228240b264dfbb983500bd5
| 16,890
|
py
|
Python
|
tradenity/resources/shipping_method.py
|
tradenity/python-sdk
|
d13fbe23f4d6ff22554c6d8d2deaf209371adaf1
|
[
"Apache-2.0"
] | 1
|
2020-03-19T04:09:17.000Z
|
2020-03-19T04:09:17.000Z
|
tradenity/resources/shipping_method.py
|
tradenity/python-sdk
|
d13fbe23f4d6ff22554c6d8d2deaf209371adaf1
|
[
"Apache-2.0"
] | null | null | null |
tradenity/resources/shipping_method.py
|
tradenity/python-sdk
|
d13fbe23f4d6ff22554c6d8d2deaf209371adaf1
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Tradenity API
Tradenity eCommerce Rest API
Contact: support@tradenity.com
"""
from __future__ import absolute_import
import re
import pprint
# python 2 and python 3 compatibility library
import six
from tradenity.api_client import ApiClient
class ShippingMethod(object):
swagger_types = {
'id': 'str',
'meta': 'InstanceMeta',
'name': 'str',
'slug': 'str',
'message': 'str',
'description': 'str',
'geo_zone': 'GeoZone',
'customer_groups': 'list[CustomerGroup]',
'status': 'str',
'use_discounted_subtotal': 'bool',
'include_taxes': 'bool'
}
attribute_map = {
'id': 'id',
'meta': '__meta',
'name': 'name',
'slug': 'slug',
'message': 'message',
'description': 'description',
'geo_zone': 'geoZone',
'customer_groups': 'customerGroups',
'status': 'status',
'use_discounted_subtotal': 'useDiscountedSubtotal',
'include_taxes': 'includeTaxes'
}
api_client = None
def __init__(self, id=None, meta=None, name=None, slug=None, message=None, description=None, geo_zone=None, customer_groups=None, status=None, use_discounted_subtotal=None, include_taxes=None):
"""ShippingMethod - a model defined in Swagger"""
self._id = id
self._meta = None
self._name = None
self._slug = None
self._message = None
self._description = None
self._geo_zone = None
self._customer_groups = None
self._status = None
self._use_discounted_subtotal = None
self._include_taxes = None
self.discriminator = None
if meta is not None:
self.meta = meta
self.name = name
self.slug = slug
if message is not None:
self.message = message
if description is not None:
self.description = description
self.geo_zone = geo_zone
if customer_groups is not None:
self.customer_groups = customer_groups
self.status = status
if use_discounted_subtotal is not None:
self.use_discounted_subtotal = use_discounted_subtotal
if include_taxes is not None:
self.include_taxes = include_taxes
@property
def id(self):
if self._id:
return self._id
elif self.meta is None:
return None
else:
self._id = self.meta.href.split("/")[-1]
return self._id
@id.setter
def id(self, new_id):
self._id = new_id
@property
def meta(self):
"""Gets the meta of this ShippingMethod.
:return: The meta of this ShippingMethod.
:rtype: InstanceMeta
"""
return self._meta
@meta.setter
def meta(self, meta):
"""Sets the meta of this ShippingMethod.
:param meta: The meta of this ShippingMethod.
:type: InstanceMeta
"""
self._meta = meta
@property
def name(self):
"""Gets the name of this ShippingMethod.
:return: The name of this ShippingMethod.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this ShippingMethod.
:param name: The name of this ShippingMethod.
:type: str
"""
self._name = name
@property
def slug(self):
"""Gets the slug of this ShippingMethod.
:return: The slug of this ShippingMethod.
:rtype: str
"""
return self._slug
@slug.setter
def slug(self, slug):
"""Sets the slug of this ShippingMethod.
:param slug: The slug of this ShippingMethod.
:type: str
"""
self._slug = slug
@property
def message(self):
"""Gets the message of this ShippingMethod.
:return: The message of this ShippingMethod.
:rtype: str
"""
return self._message
@message.setter
def message(self, message):
"""Sets the message of this ShippingMethod.
:param message: The message of this ShippingMethod.
:type: str
"""
self._message = message
@property
def description(self):
"""Gets the description of this ShippingMethod.
:return: The description of this ShippingMethod.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this ShippingMethod.
:param description: The description of this ShippingMethod.
:type: str
"""
self._description = description
@property
def geo_zone(self):
"""Gets the geo_zone of this ShippingMethod.
:return: The geo_zone of this ShippingMethod.
:rtype: GeoZone
"""
return self._geo_zone
@geo_zone.setter
def geo_zone(self, geo_zone):
"""Sets the geo_zone of this ShippingMethod.
:param geo_zone: The geo_zone of this ShippingMethod.
:type: GeoZone
"""
self._geo_zone = geo_zone
@property
def customer_groups(self):
"""Gets the customer_groups of this ShippingMethod.
:return: The customer_groups of this ShippingMethod.
:rtype: list[CustomerGroup]
"""
return self._customer_groups
@customer_groups.setter
def customer_groups(self, customer_groups):
"""Sets the customer_groups of this ShippingMethod.
:param customer_groups: The customer_groups of this ShippingMethod.
:type: list[CustomerGroup]
"""
self._customer_groups = customer_groups
@property
def status(self):
"""Gets the status of this ShippingMethod.
:return: The status of this ShippingMethod.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this ShippingMethod.
:param status: The status of this ShippingMethod.
:type: str
"""
allowed_values = ["enabled", "disabled"]
if status is not None and status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}"
.format(status, allowed_values)
)
self._status = status
@property
def use_discounted_subtotal(self):
"""Gets the use_discounted_subtotal of this ShippingMethod.
:return: The use_discounted_subtotal of this ShippingMethod.
:rtype: bool
"""
return self._use_discounted_subtotal
@use_discounted_subtotal.setter
def use_discounted_subtotal(self, use_discounted_subtotal):
"""Sets the use_discounted_subtotal of this ShippingMethod.
:param use_discounted_subtotal: The use_discounted_subtotal of this ShippingMethod.
:type: bool
"""
self._use_discounted_subtotal = use_discounted_subtotal
@property
def include_taxes(self):
"""Gets the include_taxes of this ShippingMethod.
:return: The include_taxes of this ShippingMethod.
:rtype: bool
"""
return self._include_taxes
@include_taxes.setter
def include_taxes(self, include_taxes):
"""Sets the include_taxes of this ShippingMethod.
:param include_taxes: The include_taxes of this ShippingMethod.
:type: bool
"""
self._include_taxes = include_taxes
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ShippingMethod, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ShippingMethod):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
@classmethod
def get_api_client(cls):
if cls.api_client is None:
cls.api_client = ApiClient.instance()
return cls.api_client
@classmethod
def find_all(cls, **kwargs):
return cls.list_all_shipping_methods(**kwargs)
@classmethod
def find_all_by(cls, **kwargs):
return cls.list_all_shipping_methods(**kwargs)
@classmethod
def find_one_by(cls, **kwargs):
results = cls.list_all_shipping_methods(**kwargs)
if len(results) > 0:
return results[0]
@classmethod
def find_all_for_order(cls, order_id, **kwargs):
"""Find shipping methods for order.
Find all shipping methods suitable for an order.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.find_all_for_order(order_id, async=True)
>>> result = thread.get()
:param async bool
:param str order_id: Order ID to get shipping methods for. (required)
:return: page[ShippingMethod]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._find_all_for_order_with_http_info(order_id, **kwargs)
else:
(data) = cls._find_all_for_order_with_http_info(order_id, **kwargs)
return data
@classmethod
def _find_all_for_order_with_http_info(cls, order_id, **kwargs):
"""Find shipping methods for order.
Find all shipping methods suitable for an order.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.find_all_for_order_with_http_info(order_id, async=True)
>>> result = thread.get()
:param async bool
:param str order_id: Order ID to get shipping methods for. (required)
:return: page[ShippingMethod]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_id']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
# verify the required parameter 'order_id' is set
if ('order_id' not in params or
params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `find_all_for_order`")
collection_formats = {}
path_params = {}
if 'order_id' in params:
path_params['orderId'] = params['order_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/shippingMethods/order/{orderId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='page[ShippingMethod]',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
@classmethod
def list_all_shipping_methods(cls, **kwargs):
"""List ShippingMethods
Return a list of ShippingMethods
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_all_shipping_methods(async=True)
>>> result = thread.get()
:param async bool
:param int page: page number
:param int size: page size
:param str sort: page order
:return: page[ShippingMethod]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._list_all_shipping_methods_with_http_info(**kwargs)
else:
(data) = cls._list_all_shipping_methods_with_http_info(**kwargs)
return data
@classmethod
def _list_all_shipping_methods_with_http_info(cls, **kwargs):
"""List ShippingMethods
Return a list of ShippingMethods
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_all_shipping_methods_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param int page: page number
:param int size: page size
:param str sort: page order
:return: page[ShippingMethod]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'size', 'sort']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
if 'page' in params:
query_params.append(('page', params['page']))
if 'size' in params:
query_params.append(('size', params['size']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/shippingMethods', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='page[ShippingMethod]',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 28.72449
| 197
| 0.596744
|
a31df2140d3bde6423c31b6cd430101440570202
| 72,456
|
py
|
Python
|
intersight/model/boot_cdd_device_relationship.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 5
|
2021-12-16T15:13:32.000Z
|
2022-03-29T16:09:54.000Z
|
intersight/model/boot_cdd_device_relationship.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 4
|
2022-01-25T19:05:51.000Z
|
2022-03-29T20:18:37.000Z
|
intersight/model/boot_cdd_device_relationship.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 2
|
2020-07-07T15:01:08.000Z
|
2022-01-31T04:27:35.000Z
|
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.asset_device_registration_relationship import AssetDeviceRegistrationRelationship
from intersight.model.boot_cdd_device import BootCddDevice
from intersight.model.compute_physical_relationship import ComputePhysicalRelationship
from intersight.model.display_names import DisplayNames
from intersight.model.equipment_fru_relationship import EquipmentFruRelationship
from intersight.model.inventory_device_info_relationship import InventoryDeviceInfoRelationship
from intersight.model.mo_base_mo_relationship import MoBaseMoRelationship
from intersight.model.mo_mo_ref import MoMoRef
from intersight.model.mo_tag import MoTag
from intersight.model.mo_version_context import MoVersionContext
globals()['AssetDeviceRegistrationRelationship'] = AssetDeviceRegistrationRelationship
globals()['BootCddDevice'] = BootCddDevice
globals()['ComputePhysicalRelationship'] = ComputePhysicalRelationship
globals()['DisplayNames'] = DisplayNames
globals()['EquipmentFruRelationship'] = EquipmentFruRelationship
globals()['InventoryDeviceInfoRelationship'] = InventoryDeviceInfoRelationship
globals()['MoBaseMoRelationship'] = MoBaseMoRelationship
globals()['MoMoRef'] = MoMoRef
globals()['MoTag'] = MoTag
globals()['MoVersionContext'] = MoVersionContext
class BootCddDeviceRelationship(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('class_id',): {
'MO.MOREF': "mo.MoRef",
},
('object_type',): {
'AAA.AUDITRECORD': "aaa.AuditRecord",
'AAA.RETENTIONCONFIG': "aaa.RetentionConfig",
'AAA.RETENTIONPOLICY': "aaa.RetentionPolicy",
'ACCESS.POLICY': "access.Policy",
'ADAPTER.CONFIGPOLICY': "adapter.ConfigPolicy",
'ADAPTER.EXTETHINTERFACE': "adapter.ExtEthInterface",
'ADAPTER.HOSTETHINTERFACE': "adapter.HostEthInterface",
'ADAPTER.HOSTFCINTERFACE': "adapter.HostFcInterface",
'ADAPTER.HOSTISCSIINTERFACE': "adapter.HostIscsiInterface",
'ADAPTER.UNIT': "adapter.Unit",
'ADAPTER.UNITEXPANDER': "adapter.UnitExpander",
'APPLIANCE.APPSTATUS': "appliance.AppStatus",
'APPLIANCE.AUTORMAPOLICY': "appliance.AutoRmaPolicy",
'APPLIANCE.BACKUP': "appliance.Backup",
'APPLIANCE.BACKUPPOLICY': "appliance.BackupPolicy",
'APPLIANCE.CERTIFICATESETTING': "appliance.CertificateSetting",
'APPLIANCE.DATAEXPORTPOLICY': "appliance.DataExportPolicy",
'APPLIANCE.DEVICECERTIFICATE': "appliance.DeviceCertificate",
'APPLIANCE.DEVICECLAIM': "appliance.DeviceClaim",
'APPLIANCE.DEVICEUPGRADEPOLICY': "appliance.DeviceUpgradePolicy",
'APPLIANCE.DIAGSETTING': "appliance.DiagSetting",
'APPLIANCE.EXTERNALSYSLOGSETTING': "appliance.ExternalSyslogSetting",
'APPLIANCE.FILEGATEWAY': "appliance.FileGateway",
'APPLIANCE.FILESYSTEMSTATUS': "appliance.FileSystemStatus",
'APPLIANCE.GROUPSTATUS': "appliance.GroupStatus",
'APPLIANCE.IMAGEBUNDLE': "appliance.ImageBundle",
'APPLIANCE.NODEINFO': "appliance.NodeInfo",
'APPLIANCE.NODESTATUS': "appliance.NodeStatus",
'APPLIANCE.RELEASENOTE': "appliance.ReleaseNote",
'APPLIANCE.REMOTEFILEIMPORT': "appliance.RemoteFileImport",
'APPLIANCE.RESTORE': "appliance.Restore",
'APPLIANCE.SETUPINFO': "appliance.SetupInfo",
'APPLIANCE.SYSTEMINFO': "appliance.SystemInfo",
'APPLIANCE.SYSTEMSTATUS': "appliance.SystemStatus",
'APPLIANCE.UPGRADE': "appliance.Upgrade",
'APPLIANCE.UPGRADEPOLICY': "appliance.UpgradePolicy",
'ASSET.CLUSTERMEMBER': "asset.ClusterMember",
'ASSET.DEPLOYMENT': "asset.Deployment",
'ASSET.DEPLOYMENTDEVICE': "asset.DeploymentDevice",
'ASSET.DEVICECLAIM': "asset.DeviceClaim",
'ASSET.DEVICECONFIGURATION': "asset.DeviceConfiguration",
'ASSET.DEVICECONNECTORMANAGER': "asset.DeviceConnectorManager",
'ASSET.DEVICECONTRACTINFORMATION': "asset.DeviceContractInformation",
'ASSET.DEVICECONTRACTNOTIFICATION': "asset.DeviceContractNotification",
'ASSET.DEVICEREGISTRATION': "asset.DeviceRegistration",
'ASSET.SUBSCRIPTION': "asset.Subscription",
'ASSET.SUBSCRIPTIONACCOUNT': "asset.SubscriptionAccount",
'ASSET.SUBSCRIPTIONDEVICECONTRACTINFORMATION': "asset.SubscriptionDeviceContractInformation",
'ASSET.TARGET': "asset.Target",
'BIOS.BOOTDEVICE': "bios.BootDevice",
'BIOS.BOOTMODE': "bios.BootMode",
'BIOS.POLICY': "bios.Policy",
'BIOS.SYSTEMBOOTORDER': "bios.SystemBootOrder",
'BIOS.TOKENSETTINGS': "bios.TokenSettings",
'BIOS.UNIT': "bios.Unit",
'BIOS.VFSELECTMEMORYRASCONFIGURATION': "bios.VfSelectMemoryRasConfiguration",
'BOOT.CDDDEVICE': "boot.CddDevice",
'BOOT.DEVICEBOOTMODE': "boot.DeviceBootMode",
'BOOT.DEVICEBOOTSECURITY': "boot.DeviceBootSecurity",
'BOOT.HDDDEVICE': "boot.HddDevice",
'BOOT.ISCSIDEVICE': "boot.IscsiDevice",
'BOOT.NVMEDEVICE': "boot.NvmeDevice",
'BOOT.PCHSTORAGEDEVICE': "boot.PchStorageDevice",
'BOOT.PRECISIONPOLICY': "boot.PrecisionPolicy",
'BOOT.PXEDEVICE': "boot.PxeDevice",
'BOOT.SANDEVICE': "boot.SanDevice",
'BOOT.SDDEVICE': "boot.SdDevice",
'BOOT.UEFISHELLDEVICE': "boot.UefiShellDevice",
'BOOT.USBDEVICE': "boot.UsbDevice",
'BOOT.VMEDIADEVICE': "boot.VmediaDevice",
'BULK.EXPORT': "bulk.Export",
'BULK.EXPORTEDITEM': "bulk.ExportedItem",
'BULK.MOCLONER': "bulk.MoCloner",
'BULK.MOMERGER': "bulk.MoMerger",
'BULK.REQUEST': "bulk.Request",
'BULK.SUBREQUESTOBJ': "bulk.SubRequestObj",
'CAPABILITY.ADAPTERUNITDESCRIPTOR': "capability.AdapterUnitDescriptor",
'CAPABILITY.CATALOG': "capability.Catalog",
'CAPABILITY.CHASSISDESCRIPTOR': "capability.ChassisDescriptor",
'CAPABILITY.CHASSISMANUFACTURINGDEF': "capability.ChassisManufacturingDef",
'CAPABILITY.CIMCFIRMWAREDESCRIPTOR': "capability.CimcFirmwareDescriptor",
'CAPABILITY.EQUIPMENTPHYSICALDEF': "capability.EquipmentPhysicalDef",
'CAPABILITY.EQUIPMENTSLOTARRAY': "capability.EquipmentSlotArray",
'CAPABILITY.FANMODULEDESCRIPTOR': "capability.FanModuleDescriptor",
'CAPABILITY.FANMODULEMANUFACTURINGDEF': "capability.FanModuleManufacturingDef",
'CAPABILITY.IOCARDCAPABILITYDEF': "capability.IoCardCapabilityDef",
'CAPABILITY.IOCARDDESCRIPTOR': "capability.IoCardDescriptor",
'CAPABILITY.IOCARDMANUFACTURINGDEF': "capability.IoCardManufacturingDef",
'CAPABILITY.PORTGROUPAGGREGATIONDEF': "capability.PortGroupAggregationDef",
'CAPABILITY.PSUDESCRIPTOR': "capability.PsuDescriptor",
'CAPABILITY.PSUMANUFACTURINGDEF': "capability.PsuManufacturingDef",
'CAPABILITY.SERVERMODELSCAPABILITYDEF': "capability.ServerModelsCapabilityDef",
'CAPABILITY.SERVERSCHEMADESCRIPTOR': "capability.ServerSchemaDescriptor",
'CAPABILITY.SIOCMODULECAPABILITYDEF': "capability.SiocModuleCapabilityDef",
'CAPABILITY.SIOCMODULEDESCRIPTOR': "capability.SiocModuleDescriptor",
'CAPABILITY.SIOCMODULEMANUFACTURINGDEF': "capability.SiocModuleManufacturingDef",
'CAPABILITY.SWITCHCAPABILITY': "capability.SwitchCapability",
'CAPABILITY.SWITCHDESCRIPTOR': "capability.SwitchDescriptor",
'CAPABILITY.SWITCHMANUFACTURINGDEF': "capability.SwitchManufacturingDef",
'CERTIFICATEMANAGEMENT.POLICY': "certificatemanagement.Policy",
'CHASSIS.CONFIGCHANGEDETAIL': "chassis.ConfigChangeDetail",
'CHASSIS.CONFIGIMPORT': "chassis.ConfigImport",
'CHASSIS.CONFIGRESULT': "chassis.ConfigResult",
'CHASSIS.CONFIGRESULTENTRY': "chassis.ConfigResultEntry",
'CHASSIS.IOMPROFILE': "chassis.IomProfile",
'CHASSIS.PROFILE': "chassis.Profile",
'CLOUD.AWSBILLINGUNIT': "cloud.AwsBillingUnit",
'CLOUD.AWSKEYPAIR': "cloud.AwsKeyPair",
'CLOUD.AWSNETWORKINTERFACE': "cloud.AwsNetworkInterface",
'CLOUD.AWSORGANIZATIONALUNIT': "cloud.AwsOrganizationalUnit",
'CLOUD.AWSSECURITYGROUP': "cloud.AwsSecurityGroup",
'CLOUD.AWSSUBNET': "cloud.AwsSubnet",
'CLOUD.AWSVIRTUALMACHINE': "cloud.AwsVirtualMachine",
'CLOUD.AWSVOLUME': "cloud.AwsVolume",
'CLOUD.AWSVPC': "cloud.AwsVpc",
'CLOUD.COLLECTINVENTORY': "cloud.CollectInventory",
'CLOUD.REGIONS': "cloud.Regions",
'CLOUD.SKUCONTAINERTYPE': "cloud.SkuContainerType",
'CLOUD.SKUDATABASETYPE': "cloud.SkuDatabaseType",
'CLOUD.SKUINSTANCETYPE': "cloud.SkuInstanceType",
'CLOUD.SKUNETWORKTYPE': "cloud.SkuNetworkType",
'CLOUD.SKUREGIONRATECARDS': "cloud.SkuRegionRateCards",
'CLOUD.SKUVOLUMETYPE': "cloud.SkuVolumeType",
'CLOUD.TFCAGENTPOOL': "cloud.TfcAgentpool",
'CLOUD.TFCORGANIZATION': "cloud.TfcOrganization",
'CLOUD.TFCWORKSPACE': "cloud.TfcWorkspace",
'COMM.HTTPPROXYPOLICY': "comm.HttpProxyPolicy",
'COMPUTE.BIOSPOSTPOLICY': "compute.BiosPostPolicy",
'COMPUTE.BLADE': "compute.Blade",
'COMPUTE.BLADEIDENTITY': "compute.BladeIdentity",
'COMPUTE.BOARD': "compute.Board",
'COMPUTE.MAPPING': "compute.Mapping",
'COMPUTE.PHYSICALSUMMARY': "compute.PhysicalSummary",
'COMPUTE.RACKUNIT': "compute.RackUnit",
'COMPUTE.RACKUNITIDENTITY': "compute.RackUnitIdentity",
'COMPUTE.SERVERPOWERPOLICY': "compute.ServerPowerPolicy",
'COMPUTE.SERVERSETTING': "compute.ServerSetting",
'COMPUTE.VMEDIA': "compute.Vmedia",
'COND.ALARM': "cond.Alarm",
'COND.ALARMAGGREGATION': "cond.AlarmAggregation",
'COND.HCLSTATUS': "cond.HclStatus",
'COND.HCLSTATUSDETAIL': "cond.HclStatusDetail",
'COND.HCLSTATUSJOB': "cond.HclStatusJob",
'CONNECTORPACK.CONNECTORPACKUPGRADE': "connectorpack.ConnectorPackUpgrade",
'CONNECTORPACK.UPGRADEIMPACT': "connectorpack.UpgradeImpact",
'CONVERGEDINFRA.HEALTHCHECKDEFINITION': "convergedinfra.HealthCheckDefinition",
'CONVERGEDINFRA.HEALTHCHECKEXECUTION': "convergedinfra.HealthCheckExecution",
'CONVERGEDINFRA.POD': "convergedinfra.Pod",
'CRD.CUSTOMRESOURCE': "crd.CustomResource",
'DEVICECONNECTOR.POLICY': "deviceconnector.Policy",
'EQUIPMENT.CHASSIS': "equipment.Chassis",
'EQUIPMENT.CHASSISIDENTITY': "equipment.ChassisIdentity",
'EQUIPMENT.CHASSISOPERATION': "equipment.ChassisOperation",
'EQUIPMENT.DEVICESUMMARY': "equipment.DeviceSummary",
'EQUIPMENT.EXPANDERMODULE': "equipment.ExpanderModule",
'EQUIPMENT.FAN': "equipment.Fan",
'EQUIPMENT.FANCONTROL': "equipment.FanControl",
'EQUIPMENT.FANMODULE': "equipment.FanModule",
'EQUIPMENT.FEX': "equipment.Fex",
'EQUIPMENT.FEXIDENTITY': "equipment.FexIdentity",
'EQUIPMENT.FEXOPERATION': "equipment.FexOperation",
'EQUIPMENT.FRU': "equipment.Fru",
'EQUIPMENT.IDENTITYSUMMARY': "equipment.IdentitySummary",
'EQUIPMENT.IOCARD': "equipment.IoCard",
'EQUIPMENT.IOCARDOPERATION': "equipment.IoCardOperation",
'EQUIPMENT.IOEXPANDER': "equipment.IoExpander",
'EQUIPMENT.LOCATORLED': "equipment.LocatorLed",
'EQUIPMENT.PSU': "equipment.Psu",
'EQUIPMENT.PSUCONTROL': "equipment.PsuControl",
'EQUIPMENT.RACKENCLOSURE': "equipment.RackEnclosure",
'EQUIPMENT.RACKENCLOSURESLOT': "equipment.RackEnclosureSlot",
'EQUIPMENT.SHAREDIOMODULE': "equipment.SharedIoModule",
'EQUIPMENT.SWITCHCARD': "equipment.SwitchCard",
'EQUIPMENT.SYSTEMIOCONTROLLER': "equipment.SystemIoController",
'EQUIPMENT.TPM': "equipment.Tpm",
'EQUIPMENT.TRANSCEIVER': "equipment.Transceiver",
'ETHER.HOSTPORT': "ether.HostPort",
'ETHER.NETWORKPORT': "ether.NetworkPort",
'ETHER.PHYSICALPORT': "ether.PhysicalPort",
'ETHER.PORTCHANNEL': "ether.PortChannel",
'EXTERNALSITE.AUTHORIZATION': "externalsite.Authorization",
'FABRIC.APPLIANCEPCROLE': "fabric.AppliancePcRole",
'FABRIC.APPLIANCEROLE': "fabric.ApplianceRole",
'FABRIC.CONFIGCHANGEDETAIL': "fabric.ConfigChangeDetail",
'FABRIC.CONFIGRESULT': "fabric.ConfigResult",
'FABRIC.CONFIGRESULTENTRY': "fabric.ConfigResultEntry",
'FABRIC.ELEMENTIDENTITY': "fabric.ElementIdentity",
'FABRIC.ESTIMATEIMPACT': "fabric.EstimateImpact",
'FABRIC.ETHNETWORKCONTROLPOLICY': "fabric.EthNetworkControlPolicy",
'FABRIC.ETHNETWORKGROUPPOLICY': "fabric.EthNetworkGroupPolicy",
'FABRIC.ETHNETWORKPOLICY': "fabric.EthNetworkPolicy",
'FABRIC.FCNETWORKPOLICY': "fabric.FcNetworkPolicy",
'FABRIC.FCSTORAGEROLE': "fabric.FcStorageRole",
'FABRIC.FCUPLINKPCROLE': "fabric.FcUplinkPcRole",
'FABRIC.FCUPLINKROLE': "fabric.FcUplinkRole",
'FABRIC.FCOEUPLINKPCROLE': "fabric.FcoeUplinkPcRole",
'FABRIC.FCOEUPLINKROLE': "fabric.FcoeUplinkRole",
'FABRIC.FLOWCONTROLPOLICY': "fabric.FlowControlPolicy",
'FABRIC.LINKAGGREGATIONPOLICY': "fabric.LinkAggregationPolicy",
'FABRIC.LINKCONTROLPOLICY': "fabric.LinkControlPolicy",
'FABRIC.MULTICASTPOLICY': "fabric.MulticastPolicy",
'FABRIC.PCMEMBER': "fabric.PcMember",
'FABRIC.PCOPERATION': "fabric.PcOperation",
'FABRIC.PORTMODE': "fabric.PortMode",
'FABRIC.PORTOPERATION': "fabric.PortOperation",
'FABRIC.PORTPOLICY': "fabric.PortPolicy",
'FABRIC.SERVERROLE': "fabric.ServerRole",
'FABRIC.SWITCHCLUSTERPROFILE': "fabric.SwitchClusterProfile",
'FABRIC.SWITCHCONTROLPOLICY': "fabric.SwitchControlPolicy",
'FABRIC.SWITCHPROFILE': "fabric.SwitchProfile",
'FABRIC.SYSTEMQOSPOLICY': "fabric.SystemQosPolicy",
'FABRIC.UPLINKPCROLE': "fabric.UplinkPcRole",
'FABRIC.UPLINKROLE': "fabric.UplinkRole",
'FABRIC.VLAN': "fabric.Vlan",
'FABRIC.VSAN': "fabric.Vsan",
'FAULT.INSTANCE': "fault.Instance",
'FC.PHYSICALPORT': "fc.PhysicalPort",
'FC.PORTCHANNEL': "fc.PortChannel",
'FCPOOL.FCBLOCK': "fcpool.FcBlock",
'FCPOOL.LEASE': "fcpool.Lease",
'FCPOOL.POOL': "fcpool.Pool",
'FCPOOL.POOLMEMBER': "fcpool.PoolMember",
'FCPOOL.UNIVERSE': "fcpool.Universe",
'FEEDBACK.FEEDBACKPOST': "feedback.FeedbackPost",
'FIRMWARE.BIOSDESCRIPTOR': "firmware.BiosDescriptor",
'FIRMWARE.BOARDCONTROLLERDESCRIPTOR': "firmware.BoardControllerDescriptor",
'FIRMWARE.CHASSISUPGRADE': "firmware.ChassisUpgrade",
'FIRMWARE.CIMCDESCRIPTOR': "firmware.CimcDescriptor",
'FIRMWARE.DIMMDESCRIPTOR': "firmware.DimmDescriptor",
'FIRMWARE.DISTRIBUTABLE': "firmware.Distributable",
'FIRMWARE.DISTRIBUTABLEMETA': "firmware.DistributableMeta",
'FIRMWARE.DRIVEDESCRIPTOR': "firmware.DriveDescriptor",
'FIRMWARE.DRIVERDISTRIBUTABLE': "firmware.DriverDistributable",
'FIRMWARE.EULA': "firmware.Eula",
'FIRMWARE.FIRMWARESUMMARY': "firmware.FirmwareSummary",
'FIRMWARE.GPUDESCRIPTOR': "firmware.GpuDescriptor",
'FIRMWARE.HBADESCRIPTOR': "firmware.HbaDescriptor",
'FIRMWARE.IOMDESCRIPTOR': "firmware.IomDescriptor",
'FIRMWARE.MSWITCHDESCRIPTOR': "firmware.MswitchDescriptor",
'FIRMWARE.NXOSDESCRIPTOR': "firmware.NxosDescriptor",
'FIRMWARE.PCIEDESCRIPTOR': "firmware.PcieDescriptor",
'FIRMWARE.PSUDESCRIPTOR': "firmware.PsuDescriptor",
'FIRMWARE.RUNNINGFIRMWARE': "firmware.RunningFirmware",
'FIRMWARE.SASEXPANDERDESCRIPTOR': "firmware.SasExpanderDescriptor",
'FIRMWARE.SERVERCONFIGURATIONUTILITYDISTRIBUTABLE': "firmware.ServerConfigurationUtilityDistributable",
'FIRMWARE.STORAGECONTROLLERDESCRIPTOR': "firmware.StorageControllerDescriptor",
'FIRMWARE.SWITCHUPGRADE': "firmware.SwitchUpgrade",
'FIRMWARE.UNSUPPORTEDVERSIONUPGRADE': "firmware.UnsupportedVersionUpgrade",
'FIRMWARE.UPGRADE': "firmware.Upgrade",
'FIRMWARE.UPGRADEIMPACT': "firmware.UpgradeImpact",
'FIRMWARE.UPGRADEIMPACTSTATUS': "firmware.UpgradeImpactStatus",
'FIRMWARE.UPGRADESTATUS': "firmware.UpgradeStatus",
'FORECAST.CATALOG': "forecast.Catalog",
'FORECAST.DEFINITION': "forecast.Definition",
'FORECAST.INSTANCE': "forecast.Instance",
'GRAPHICS.CARD': "graphics.Card",
'GRAPHICS.CONTROLLER': "graphics.Controller",
'HCL.COMPATIBILITYSTATUS': "hcl.CompatibilityStatus",
'HCL.DRIVERIMAGE': "hcl.DriverImage",
'HCL.EXEMPTEDCATALOG': "hcl.ExemptedCatalog",
'HCL.HYPERFLEXSOFTWARECOMPATIBILITYINFO': "hcl.HyperflexSoftwareCompatibilityInfo",
'HCL.OPERATINGSYSTEM': "hcl.OperatingSystem",
'HCL.OPERATINGSYSTEMVENDOR': "hcl.OperatingSystemVendor",
'HCL.SUPPORTEDDRIVERNAME': "hcl.SupportedDriverName",
'HYPERFLEX.ALARM': "hyperflex.Alarm",
'HYPERFLEX.APPCATALOG': "hyperflex.AppCatalog",
'HYPERFLEX.AUTOSUPPORTPOLICY': "hyperflex.AutoSupportPolicy",
'HYPERFLEX.BACKUPCLUSTER': "hyperflex.BackupCluster",
'HYPERFLEX.CAPABILITYINFO': "hyperflex.CapabilityInfo",
'HYPERFLEX.CLUSTER': "hyperflex.Cluster",
'HYPERFLEX.CLUSTERBACKUPPOLICY': "hyperflex.ClusterBackupPolicy",
'HYPERFLEX.CLUSTERBACKUPPOLICYDEPLOYMENT': "hyperflex.ClusterBackupPolicyDeployment",
'HYPERFLEX.CLUSTERBACKUPPOLICYINVENTORY': "hyperflex.ClusterBackupPolicyInventory",
'HYPERFLEX.CLUSTERHEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.ClusterHealthCheckExecutionSnapshot",
'HYPERFLEX.CLUSTERNETWORKPOLICY': "hyperflex.ClusterNetworkPolicy",
'HYPERFLEX.CLUSTERPROFILE': "hyperflex.ClusterProfile",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICY': "hyperflex.ClusterReplicationNetworkPolicy",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICYDEPLOYMENT': "hyperflex.ClusterReplicationNetworkPolicyDeployment",
'HYPERFLEX.CLUSTERSTORAGEPOLICY': "hyperflex.ClusterStoragePolicy",
'HYPERFLEX.CONFIGRESULT': "hyperflex.ConfigResult",
'HYPERFLEX.CONFIGRESULTENTRY': "hyperflex.ConfigResultEntry",
'HYPERFLEX.DATAPROTECTIONPEER': "hyperflex.DataProtectionPeer",
'HYPERFLEX.DATASTORESTATISTIC': "hyperflex.DatastoreStatistic",
'HYPERFLEX.DEVICEPACKAGEDOWNLOADSTATE': "hyperflex.DevicePackageDownloadState",
'HYPERFLEX.DRIVE': "hyperflex.Drive",
'HYPERFLEX.EXTFCSTORAGEPOLICY': "hyperflex.ExtFcStoragePolicy",
'HYPERFLEX.EXTISCSISTORAGEPOLICY': "hyperflex.ExtIscsiStoragePolicy",
'HYPERFLEX.FEATURELIMITEXTERNAL': "hyperflex.FeatureLimitExternal",
'HYPERFLEX.FEATURELIMITINTERNAL': "hyperflex.FeatureLimitInternal",
'HYPERFLEX.HEALTH': "hyperflex.Health",
'HYPERFLEX.HEALTHCHECKDEFINITION': "hyperflex.HealthCheckDefinition",
'HYPERFLEX.HEALTHCHECKEXECUTION': "hyperflex.HealthCheckExecution",
'HYPERFLEX.HEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.HealthCheckExecutionSnapshot",
'HYPERFLEX.HEALTHCHECKPACKAGECHECKSUM': "hyperflex.HealthCheckPackageChecksum",
'HYPERFLEX.HXDPVERSION': "hyperflex.HxdpVersion",
'HYPERFLEX.LICENSE': "hyperflex.License",
'HYPERFLEX.LOCALCREDENTIALPOLICY': "hyperflex.LocalCredentialPolicy",
'HYPERFLEX.NODE': "hyperflex.Node",
'HYPERFLEX.NODECONFIGPOLICY': "hyperflex.NodeConfigPolicy",
'HYPERFLEX.NODEPROFILE': "hyperflex.NodeProfile",
'HYPERFLEX.PROTECTEDCLUSTER': "hyperflex.ProtectedCluster",
'HYPERFLEX.PROXYSETTINGPOLICY': "hyperflex.ProxySettingPolicy",
'HYPERFLEX.SERVERFIRMWAREVERSION': "hyperflex.ServerFirmwareVersion",
'HYPERFLEX.SERVERFIRMWAREVERSIONENTRY': "hyperflex.ServerFirmwareVersionEntry",
'HYPERFLEX.SERVERMODEL': "hyperflex.ServerModel",
'HYPERFLEX.SERVICEAUTHTOKEN': "hyperflex.ServiceAuthToken",
'HYPERFLEX.SOFTWAREDISTRIBUTIONCOMPONENT': "hyperflex.SoftwareDistributionComponent",
'HYPERFLEX.SOFTWAREDISTRIBUTIONENTRY': "hyperflex.SoftwareDistributionEntry",
'HYPERFLEX.SOFTWAREDISTRIBUTIONVERSION': "hyperflex.SoftwareDistributionVersion",
'HYPERFLEX.SOFTWAREVERSIONPOLICY': "hyperflex.SoftwareVersionPolicy",
'HYPERFLEX.STORAGECONTAINER': "hyperflex.StorageContainer",
'HYPERFLEX.SYSCONFIGPOLICY': "hyperflex.SysConfigPolicy",
'HYPERFLEX.UCSMCONFIGPOLICY': "hyperflex.UcsmConfigPolicy",
'HYPERFLEX.VCENTERCONFIGPOLICY': "hyperflex.VcenterConfigPolicy",
'HYPERFLEX.VMBACKUPINFO': "hyperflex.VmBackupInfo",
'HYPERFLEX.VMIMPORTOPERATION': "hyperflex.VmImportOperation",
'HYPERFLEX.VMRESTOREOPERATION': "hyperflex.VmRestoreOperation",
'HYPERFLEX.VMSNAPSHOTINFO': "hyperflex.VmSnapshotInfo",
'HYPERFLEX.VOLUME': "hyperflex.Volume",
'HYPERFLEX.WITNESSCONFIGURATION': "hyperflex.WitnessConfiguration",
'IAAS.CONNECTORPACK': "iaas.ConnectorPack",
'IAAS.DEVICESTATUS': "iaas.DeviceStatus",
'IAAS.DIAGNOSTICMESSAGES': "iaas.DiagnosticMessages",
'IAAS.LICENSEINFO': "iaas.LicenseInfo",
'IAAS.MOSTRUNTASKS': "iaas.MostRunTasks",
'IAAS.SERVICEREQUEST': "iaas.ServiceRequest",
'IAAS.UCSDINFO': "iaas.UcsdInfo",
'IAAS.UCSDMANAGEDINFRA': "iaas.UcsdManagedInfra",
'IAAS.UCSDMESSAGES': "iaas.UcsdMessages",
'IAM.ACCOUNT': "iam.Account",
'IAM.ACCOUNTEXPERIENCE': "iam.AccountExperience",
'IAM.APIKEY': "iam.ApiKey",
'IAM.APPREGISTRATION': "iam.AppRegistration",
'IAM.BANNERMESSAGE': "iam.BannerMessage",
'IAM.CERTIFICATE': "iam.Certificate",
'IAM.CERTIFICATEREQUEST': "iam.CertificateRequest",
'IAM.DOMAINGROUP': "iam.DomainGroup",
'IAM.ENDPOINTPRIVILEGE': "iam.EndPointPrivilege",
'IAM.ENDPOINTROLE': "iam.EndPointRole",
'IAM.ENDPOINTUSER': "iam.EndPointUser",
'IAM.ENDPOINTUSERPOLICY': "iam.EndPointUserPolicy",
'IAM.ENDPOINTUSERROLE': "iam.EndPointUserRole",
'IAM.IDP': "iam.Idp",
'IAM.IDPREFERENCE': "iam.IdpReference",
'IAM.IPACCESSMANAGEMENT': "iam.IpAccessManagement",
'IAM.IPADDRESS': "iam.IpAddress",
'IAM.LDAPGROUP': "iam.LdapGroup",
'IAM.LDAPPOLICY': "iam.LdapPolicy",
'IAM.LDAPPROVIDER': "iam.LdapProvider",
'IAM.LOCALUSERPASSWORD': "iam.LocalUserPassword",
'IAM.LOCALUSERPASSWORDPOLICY': "iam.LocalUserPasswordPolicy",
'IAM.OAUTHTOKEN': "iam.OAuthToken",
'IAM.PERMISSION': "iam.Permission",
'IAM.PRIVATEKEYSPEC': "iam.PrivateKeySpec",
'IAM.PRIVILEGE': "iam.Privilege",
'IAM.PRIVILEGESET': "iam.PrivilegeSet",
'IAM.QUALIFIER': "iam.Qualifier",
'IAM.RESOURCELIMITS': "iam.ResourceLimits",
'IAM.RESOURCEPERMISSION': "iam.ResourcePermission",
'IAM.RESOURCEROLES': "iam.ResourceRoles",
'IAM.ROLE': "iam.Role",
'IAM.SECURITYHOLDER': "iam.SecurityHolder",
'IAM.SERVICEPROVIDER': "iam.ServiceProvider",
'IAM.SESSION': "iam.Session",
'IAM.SESSIONLIMITS': "iam.SessionLimits",
'IAM.SYSTEM': "iam.System",
'IAM.TRUSTPOINT': "iam.TrustPoint",
'IAM.USER': "iam.User",
'IAM.USERGROUP': "iam.UserGroup",
'IAM.USERPREFERENCE': "iam.UserPreference",
'INVENTORY.DEVICEINFO': "inventory.DeviceInfo",
'INVENTORY.DNMOBINDING': "inventory.DnMoBinding",
'INVENTORY.GENERICINVENTORY': "inventory.GenericInventory",
'INVENTORY.GENERICINVENTORYHOLDER': "inventory.GenericInventoryHolder",
'INVENTORY.REQUEST': "inventory.Request",
'IPMIOVERLAN.POLICY': "ipmioverlan.Policy",
'IPPOOL.BLOCKLEASE': "ippool.BlockLease",
'IPPOOL.IPLEASE': "ippool.IpLease",
'IPPOOL.POOL': "ippool.Pool",
'IPPOOL.POOLMEMBER': "ippool.PoolMember",
'IPPOOL.SHADOWBLOCK': "ippool.ShadowBlock",
'IPPOOL.SHADOWPOOL': "ippool.ShadowPool",
'IPPOOL.UNIVERSE': "ippool.Universe",
'IQNPOOL.BLOCK': "iqnpool.Block",
'IQNPOOL.LEASE': "iqnpool.Lease",
'IQNPOOL.POOL': "iqnpool.Pool",
'IQNPOOL.POOLMEMBER': "iqnpool.PoolMember",
'IQNPOOL.UNIVERSE': "iqnpool.Universe",
'IWOTENANT.TENANTSTATUS': "iwotenant.TenantStatus",
'KUBERNETES.ACICNIAPIC': "kubernetes.AciCniApic",
'KUBERNETES.ACICNIPROFILE': "kubernetes.AciCniProfile",
'KUBERNETES.ACICNITENANTCLUSTERALLOCATION': "kubernetes.AciCniTenantClusterAllocation",
'KUBERNETES.ADDONDEFINITION': "kubernetes.AddonDefinition",
'KUBERNETES.ADDONPOLICY': "kubernetes.AddonPolicy",
'KUBERNETES.ADDONREPOSITORY': "kubernetes.AddonRepository",
'KUBERNETES.BAREMETALNODEPROFILE': "kubernetes.BaremetalNodeProfile",
'KUBERNETES.CATALOG': "kubernetes.Catalog",
'KUBERNETES.CLUSTER': "kubernetes.Cluster",
'KUBERNETES.CLUSTERADDONPROFILE': "kubernetes.ClusterAddonProfile",
'KUBERNETES.CLUSTERPROFILE': "kubernetes.ClusterProfile",
'KUBERNETES.CONFIGRESULT': "kubernetes.ConfigResult",
'KUBERNETES.CONFIGRESULTENTRY': "kubernetes.ConfigResultEntry",
'KUBERNETES.CONTAINERRUNTIMEPOLICY': "kubernetes.ContainerRuntimePolicy",
'KUBERNETES.DAEMONSET': "kubernetes.DaemonSet",
'KUBERNETES.DEPLOYMENT': "kubernetes.Deployment",
'KUBERNETES.INGRESS': "kubernetes.Ingress",
'KUBERNETES.NETWORKPOLICY': "kubernetes.NetworkPolicy",
'KUBERNETES.NODE': "kubernetes.Node",
'KUBERNETES.NODEGROUPPROFILE': "kubernetes.NodeGroupProfile",
'KUBERNETES.POD': "kubernetes.Pod",
'KUBERNETES.SERVICE': "kubernetes.Service",
'KUBERNETES.STATEFULSET': "kubernetes.StatefulSet",
'KUBERNETES.SYSCONFIGPOLICY': "kubernetes.SysConfigPolicy",
'KUBERNETES.TRUSTEDREGISTRIESPOLICY': "kubernetes.TrustedRegistriesPolicy",
'KUBERNETES.VERSION': "kubernetes.Version",
'KUBERNETES.VERSIONPOLICY': "kubernetes.VersionPolicy",
'KUBERNETES.VIRTUALMACHINEINFRACONFIGPOLICY': "kubernetes.VirtualMachineInfraConfigPolicy",
'KUBERNETES.VIRTUALMACHINEINFRASTRUCTUREPROVIDER': "kubernetes.VirtualMachineInfrastructureProvider",
'KUBERNETES.VIRTUALMACHINEINSTANCETYPE': "kubernetes.VirtualMachineInstanceType",
'KUBERNETES.VIRTUALMACHINENODEPROFILE': "kubernetes.VirtualMachineNodeProfile",
'KVM.POLICY': "kvm.Policy",
'KVM.SESSION': "kvm.Session",
'KVM.TUNNEL': "kvm.Tunnel",
'LICENSE.ACCOUNTLICENSEDATA': "license.AccountLicenseData",
'LICENSE.CUSTOMEROP': "license.CustomerOp",
'LICENSE.IKSCUSTOMEROP': "license.IksCustomerOp",
'LICENSE.IKSLICENSECOUNT': "license.IksLicenseCount",
'LICENSE.IWOCUSTOMEROP': "license.IwoCustomerOp",
'LICENSE.IWOLICENSECOUNT': "license.IwoLicenseCount",
'LICENSE.LICENSEINFO': "license.LicenseInfo",
'LICENSE.LICENSERESERVATIONOP': "license.LicenseReservationOp",
'LICENSE.SMARTLICENSETOKEN': "license.SmartlicenseToken",
'LS.SERVICEPROFILE': "ls.ServiceProfile",
'MACPOOL.IDBLOCK': "macpool.IdBlock",
'MACPOOL.LEASE': "macpool.Lease",
'MACPOOL.POOL': "macpool.Pool",
'MACPOOL.POOLMEMBER': "macpool.PoolMember",
'MACPOOL.UNIVERSE': "macpool.Universe",
'MANAGEMENT.CONTROLLER': "management.Controller",
'MANAGEMENT.ENTITY': "management.Entity",
'MANAGEMENT.INTERFACE': "management.Interface",
'MEMORY.ARRAY': "memory.Array",
'MEMORY.PERSISTENTMEMORYCONFIGRESULT': "memory.PersistentMemoryConfigResult",
'MEMORY.PERSISTENTMEMORYCONFIGURATION': "memory.PersistentMemoryConfiguration",
'MEMORY.PERSISTENTMEMORYNAMESPACE': "memory.PersistentMemoryNamespace",
'MEMORY.PERSISTENTMEMORYNAMESPACECONFIGRESULT': "memory.PersistentMemoryNamespaceConfigResult",
'MEMORY.PERSISTENTMEMORYPOLICY': "memory.PersistentMemoryPolicy",
'MEMORY.PERSISTENTMEMORYREGION': "memory.PersistentMemoryRegion",
'MEMORY.PERSISTENTMEMORYUNIT': "memory.PersistentMemoryUnit",
'MEMORY.UNIT': "memory.Unit",
'META.DEFINITION': "meta.Definition",
'NETWORK.ELEMENT': "network.Element",
'NETWORK.ELEMENTSUMMARY': "network.ElementSummary",
'NETWORK.FCZONEINFO': "network.FcZoneInfo",
'NETWORK.VLANPORTINFO': "network.VlanPortInfo",
'NETWORKCONFIG.POLICY': "networkconfig.Policy",
'NIAAPI.APICCCOPOST': "niaapi.ApicCcoPost",
'NIAAPI.APICFIELDNOTICE': "niaapi.ApicFieldNotice",
'NIAAPI.APICHWEOL': "niaapi.ApicHweol",
'NIAAPI.APICLATESTMAINTAINEDRELEASE': "niaapi.ApicLatestMaintainedRelease",
'NIAAPI.APICRELEASERECOMMEND': "niaapi.ApicReleaseRecommend",
'NIAAPI.APICSWEOL': "niaapi.ApicSweol",
'NIAAPI.DCNMCCOPOST': "niaapi.DcnmCcoPost",
'NIAAPI.DCNMFIELDNOTICE': "niaapi.DcnmFieldNotice",
'NIAAPI.DCNMHWEOL': "niaapi.DcnmHweol",
'NIAAPI.DCNMLATESTMAINTAINEDRELEASE': "niaapi.DcnmLatestMaintainedRelease",
'NIAAPI.DCNMRELEASERECOMMEND': "niaapi.DcnmReleaseRecommend",
'NIAAPI.DCNMSWEOL': "niaapi.DcnmSweol",
'NIAAPI.FILEDOWNLOADER': "niaapi.FileDownloader",
'NIAAPI.NIAMETADATA': "niaapi.NiaMetadata",
'NIAAPI.NIBFILEDOWNLOADER': "niaapi.NibFileDownloader",
'NIAAPI.NIBMETADATA': "niaapi.NibMetadata",
'NIAAPI.VERSIONREGEX': "niaapi.VersionRegex",
'NIATELEMETRY.AAALDAPPROVIDERDETAILS': "niatelemetry.AaaLdapProviderDetails",
'NIATELEMETRY.AAARADIUSPROVIDERDETAILS': "niatelemetry.AaaRadiusProviderDetails",
'NIATELEMETRY.AAATACACSPROVIDERDETAILS': "niatelemetry.AaaTacacsProviderDetails",
'NIATELEMETRY.APICAPPPLUGINDETAILS': "niatelemetry.ApicAppPluginDetails",
'NIATELEMETRY.APICCOREFILEDETAILS': "niatelemetry.ApicCoreFileDetails",
'NIATELEMETRY.APICDBGEXPRSEXPORTDEST': "niatelemetry.ApicDbgexpRsExportDest",
'NIATELEMETRY.APICDBGEXPRSTSSCHEDULER': "niatelemetry.ApicDbgexpRsTsScheduler",
'NIATELEMETRY.APICFANDETAILS': "niatelemetry.ApicFanDetails",
'NIATELEMETRY.APICFEXDETAILS': "niatelemetry.ApicFexDetails",
'NIATELEMETRY.APICFLASHDETAILS': "niatelemetry.ApicFlashDetails",
'NIATELEMETRY.APICNTPAUTH': "niatelemetry.ApicNtpAuth",
'NIATELEMETRY.APICPSUDETAILS': "niatelemetry.ApicPsuDetails",
'NIATELEMETRY.APICREALMDETAILS': "niatelemetry.ApicRealmDetails",
'NIATELEMETRY.APICSNMPCLIENTGRPDETAILS': "niatelemetry.ApicSnmpClientGrpDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYACCESSDETAILS': "niatelemetry.ApicSnmpCommunityAccessDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYDETAILS': "niatelemetry.ApicSnmpCommunityDetails",
'NIATELEMETRY.APICSNMPTRAPDETAILS': "niatelemetry.ApicSnmpTrapDetails",
'NIATELEMETRY.APICSNMPTRAPFWDSERVERDETAILS': "niatelemetry.ApicSnmpTrapFwdServerDetails",
'NIATELEMETRY.APICSNMPVERSIONTHREEDETAILS': "niatelemetry.ApicSnmpVersionThreeDetails",
'NIATELEMETRY.APICSYSLOGGRP': "niatelemetry.ApicSysLogGrp",
'NIATELEMETRY.APICSYSLOGSRC': "niatelemetry.ApicSysLogSrc",
'NIATELEMETRY.APICTRANSCEIVERDETAILS': "niatelemetry.ApicTransceiverDetails",
'NIATELEMETRY.APICUIPAGECOUNTS': "niatelemetry.ApicUiPageCounts",
'NIATELEMETRY.APPDETAILS': "niatelemetry.AppDetails",
'NIATELEMETRY.COMMONPOLICIES': "niatelemetry.CommonPolicies",
'NIATELEMETRY.DCNMFANDETAILS': "niatelemetry.DcnmFanDetails",
'NIATELEMETRY.DCNMFEXDETAILS': "niatelemetry.DcnmFexDetails",
'NIATELEMETRY.DCNMMODULEDETAILS': "niatelemetry.DcnmModuleDetails",
'NIATELEMETRY.DCNMPSUDETAILS': "niatelemetry.DcnmPsuDetails",
'NIATELEMETRY.DCNMTRANSCEIVERDETAILS': "niatelemetry.DcnmTransceiverDetails",
'NIATELEMETRY.EPG': "niatelemetry.Epg",
'NIATELEMETRY.FABRICMODULEDETAILS': "niatelemetry.FabricModuleDetails",
'NIATELEMETRY.FABRICPODPROFILE': "niatelemetry.FabricPodProfile",
'NIATELEMETRY.FABRICPODSS': "niatelemetry.FabricPodSs",
'NIATELEMETRY.FAULT': "niatelemetry.Fault",
'NIATELEMETRY.HTTPSACLCONTRACTDETAILS': "niatelemetry.HttpsAclContractDetails",
'NIATELEMETRY.HTTPSACLCONTRACTFILTERMAP': "niatelemetry.HttpsAclContractFilterMap",
'NIATELEMETRY.HTTPSACLEPGCONTRACTMAP': "niatelemetry.HttpsAclEpgContractMap",
'NIATELEMETRY.HTTPSACLEPGDETAILS': "niatelemetry.HttpsAclEpgDetails",
'NIATELEMETRY.HTTPSACLFILTERDETAILS': "niatelemetry.HttpsAclFilterDetails",
'NIATELEMETRY.LC': "niatelemetry.Lc",
'NIATELEMETRY.MSOCONTRACTDETAILS': "niatelemetry.MsoContractDetails",
'NIATELEMETRY.MSOEPGDETAILS': "niatelemetry.MsoEpgDetails",
'NIATELEMETRY.MSOSCHEMADETAILS': "niatelemetry.MsoSchemaDetails",
'NIATELEMETRY.MSOSITEDETAILS': "niatelemetry.MsoSiteDetails",
'NIATELEMETRY.MSOTENANTDETAILS': "niatelemetry.MsoTenantDetails",
'NIATELEMETRY.NEXUSDASHBOARDCONTROLLERDETAILS': "niatelemetry.NexusDashboardControllerDetails",
'NIATELEMETRY.NEXUSDASHBOARDDETAILS': "niatelemetry.NexusDashboardDetails",
'NIATELEMETRY.NEXUSDASHBOARDMEMORYDETAILS': "niatelemetry.NexusDashboardMemoryDetails",
'NIATELEMETRY.NEXUSDASHBOARDS': "niatelemetry.NexusDashboards",
'NIATELEMETRY.NIAFEATUREUSAGE': "niatelemetry.NiaFeatureUsage",
'NIATELEMETRY.NIAINVENTORY': "niatelemetry.NiaInventory",
'NIATELEMETRY.NIAINVENTORYDCNM': "niatelemetry.NiaInventoryDcnm",
'NIATELEMETRY.NIAINVENTORYFABRIC': "niatelemetry.NiaInventoryFabric",
'NIATELEMETRY.NIALICENSESTATE': "niatelemetry.NiaLicenseState",
'NIATELEMETRY.PASSWORDSTRENGTHCHECK': "niatelemetry.PasswordStrengthCheck",
'NIATELEMETRY.PODCOMMPOLICIES': "niatelemetry.PodCommPolicies",
'NIATELEMETRY.PODSNMPPOLICIES': "niatelemetry.PodSnmpPolicies",
'NIATELEMETRY.PODTIMESERVERPOLICIES': "niatelemetry.PodTimeServerPolicies",
'NIATELEMETRY.SITEINVENTORY': "niatelemetry.SiteInventory",
'NIATELEMETRY.SNMPSRC': "niatelemetry.SnmpSrc",
'NIATELEMETRY.SSHVERSIONTWO': "niatelemetry.SshVersionTwo",
'NIATELEMETRY.SUPERVISORMODULEDETAILS': "niatelemetry.SupervisorModuleDetails",
'NIATELEMETRY.SYSLOGREMOTEDEST': "niatelemetry.SyslogRemoteDest",
'NIATELEMETRY.SYSLOGSYSMSG': "niatelemetry.SyslogSysMsg",
'NIATELEMETRY.SYSLOGSYSMSGFACFILTER': "niatelemetry.SyslogSysMsgFacFilter",
'NIATELEMETRY.SYSTEMCONTROLLERDETAILS': "niatelemetry.SystemControllerDetails",
'NIATELEMETRY.TENANT': "niatelemetry.Tenant",
'NOTIFICATION.ACCOUNTSUBSCRIPTION': "notification.AccountSubscription",
'NTP.POLICY': "ntp.Policy",
'OAUTH.ACCESSTOKEN': "oauth.AccessToken",
'OAUTH.AUTHORIZATION': "oauth.Authorization",
'OPRS.DEPLOYMENT': "oprs.Deployment",
'OPRS.SYNCTARGETLISTMESSAGE': "oprs.SyncTargetListMessage",
'ORGANIZATION.ORGANIZATION': "organization.Organization",
'OS.BULKINSTALLINFO': "os.BulkInstallInfo",
'OS.CATALOG': "os.Catalog",
'OS.CONFIGURATIONFILE': "os.ConfigurationFile",
'OS.DISTRIBUTION': "os.Distribution",
'OS.INSTALL': "os.Install",
'OS.OSSUPPORT': "os.OsSupport",
'OS.SUPPORTEDVERSION': "os.SupportedVersion",
'OS.TEMPLATEFILE': "os.TemplateFile",
'OS.VALIDINSTALLTARGET': "os.ValidInstallTarget",
'PCI.COPROCESSORCARD': "pci.CoprocessorCard",
'PCI.DEVICE': "pci.Device",
'PCI.LINK': "pci.Link",
'PCI.SWITCH': "pci.Switch",
'PORT.GROUP': "port.Group",
'PORT.MACBINDING': "port.MacBinding",
'PORT.SUBGROUP': "port.SubGroup",
'POWER.CONTROLSTATE': "power.ControlState",
'POWER.POLICY': "power.Policy",
'PROCESSOR.UNIT': "processor.Unit",
'RACK.UNITPERSONALITY': "rack.UnitPersonality",
'RECOMMENDATION.CAPACITYRUNWAY': "recommendation.CapacityRunway",
'RECOMMENDATION.PHYSICALITEM': "recommendation.PhysicalItem",
'RECOVERY.BACKUPCONFIGPOLICY': "recovery.BackupConfigPolicy",
'RECOVERY.BACKUPPROFILE': "recovery.BackupProfile",
'RECOVERY.CONFIGRESULT': "recovery.ConfigResult",
'RECOVERY.CONFIGRESULTENTRY': "recovery.ConfigResultEntry",
'RECOVERY.ONDEMANDBACKUP': "recovery.OnDemandBackup",
'RECOVERY.RESTORE': "recovery.Restore",
'RECOVERY.SCHEDULECONFIGPOLICY': "recovery.ScheduleConfigPolicy",
'RESOURCE.GROUP': "resource.Group",
'RESOURCE.GROUPMEMBER': "resource.GroupMember",
'RESOURCE.LICENSERESOURCECOUNT': "resource.LicenseResourceCount",
'RESOURCE.MEMBERSHIP': "resource.Membership",
'RESOURCE.MEMBERSHIPHOLDER': "resource.MembershipHolder",
'RESOURCE.RESERVATION': "resource.Reservation",
'RESOURCEPOOL.LEASE': "resourcepool.Lease",
'RESOURCEPOOL.LEASERESOURCE': "resourcepool.LeaseResource",
'RESOURCEPOOL.POOL': "resourcepool.Pool",
'RESOURCEPOOL.POOLMEMBER': "resourcepool.PoolMember",
'RESOURCEPOOL.UNIVERSE': "resourcepool.Universe",
'RPROXY.REVERSEPROXY': "rproxy.ReverseProxy",
'SDCARD.POLICY': "sdcard.Policy",
'SDWAN.PROFILE': "sdwan.Profile",
'SDWAN.ROUTERNODE': "sdwan.RouterNode",
'SDWAN.ROUTERPOLICY': "sdwan.RouterPolicy",
'SDWAN.VMANAGEACCOUNTPOLICY': "sdwan.VmanageAccountPolicy",
'SEARCH.SEARCHITEM': "search.SearchItem",
'SEARCH.TAGITEM': "search.TagItem",
'SECURITY.UNIT': "security.Unit",
'SERVER.CONFIGCHANGEDETAIL': "server.ConfigChangeDetail",
'SERVER.CONFIGIMPORT': "server.ConfigImport",
'SERVER.CONFIGRESULT': "server.ConfigResult",
'SERVER.CONFIGRESULTENTRY': "server.ConfigResultEntry",
'SERVER.PROFILE': "server.Profile",
'SERVER.PROFILETEMPLATE': "server.ProfileTemplate",
'SMTP.POLICY': "smtp.Policy",
'SNMP.POLICY': "snmp.Policy",
'SOFTWARE.APPLIANCEDISTRIBUTABLE': "software.ApplianceDistributable",
'SOFTWARE.DOWNLOADHISTORY': "software.DownloadHistory",
'SOFTWARE.HCLMETA': "software.HclMeta",
'SOFTWARE.HYPERFLEXBUNDLEDISTRIBUTABLE': "software.HyperflexBundleDistributable",
'SOFTWARE.HYPERFLEXDISTRIBUTABLE': "software.HyperflexDistributable",
'SOFTWARE.RELEASEMETA': "software.ReleaseMeta",
'SOFTWARE.SOLUTIONDISTRIBUTABLE': "software.SolutionDistributable",
'SOFTWARE.UCSDBUNDLEDISTRIBUTABLE': "software.UcsdBundleDistributable",
'SOFTWARE.UCSDDISTRIBUTABLE': "software.UcsdDistributable",
'SOFTWAREREPOSITORY.AUTHORIZATION': "softwarerepository.Authorization",
'SOFTWAREREPOSITORY.CACHEDIMAGE': "softwarerepository.CachedImage",
'SOFTWAREREPOSITORY.CATALOG': "softwarerepository.Catalog",
'SOFTWAREREPOSITORY.CATEGORYMAPPER': "softwarerepository.CategoryMapper",
'SOFTWAREREPOSITORY.CATEGORYMAPPERMODEL': "softwarerepository.CategoryMapperModel",
'SOFTWAREREPOSITORY.CATEGORYSUPPORTCONSTRAINT': "softwarerepository.CategorySupportConstraint",
'SOFTWAREREPOSITORY.DOWNLOADSPEC': "softwarerepository.DownloadSpec",
'SOFTWAREREPOSITORY.OPERATINGSYSTEMFILE': "softwarerepository.OperatingSystemFile",
'SOFTWAREREPOSITORY.RELEASE': "softwarerepository.Release",
'SOL.POLICY': "sol.Policy",
'SSH.POLICY': "ssh.Policy",
'STORAGE.CONTROLLER': "storage.Controller",
'STORAGE.DISKGROUP': "storage.DiskGroup",
'STORAGE.DISKSLOT': "storage.DiskSlot",
'STORAGE.DRIVEGROUP': "storage.DriveGroup",
'STORAGE.ENCLOSURE': "storage.Enclosure",
'STORAGE.ENCLOSUREDISK': "storage.EnclosureDisk",
'STORAGE.ENCLOSUREDISKSLOTEP': "storage.EnclosureDiskSlotEp",
'STORAGE.FLEXFLASHCONTROLLER': "storage.FlexFlashController",
'STORAGE.FLEXFLASHCONTROLLERPROPS': "storage.FlexFlashControllerProps",
'STORAGE.FLEXFLASHPHYSICALDRIVE': "storage.FlexFlashPhysicalDrive",
'STORAGE.FLEXFLASHVIRTUALDRIVE': "storage.FlexFlashVirtualDrive",
'STORAGE.FLEXUTILCONTROLLER': "storage.FlexUtilController",
'STORAGE.FLEXUTILPHYSICALDRIVE': "storage.FlexUtilPhysicalDrive",
'STORAGE.FLEXUTILVIRTUALDRIVE': "storage.FlexUtilVirtualDrive",
'STORAGE.HITACHIARRAY': "storage.HitachiArray",
'STORAGE.HITACHICONTROLLER': "storage.HitachiController",
'STORAGE.HITACHIDISK': "storage.HitachiDisk",
'STORAGE.HITACHIHOST': "storage.HitachiHost",
'STORAGE.HITACHIHOSTLUN': "storage.HitachiHostLun",
'STORAGE.HITACHIPARITYGROUP': "storage.HitachiParityGroup",
'STORAGE.HITACHIPOOL': "storage.HitachiPool",
'STORAGE.HITACHIPORT': "storage.HitachiPort",
'STORAGE.HITACHIVOLUME': "storage.HitachiVolume",
'STORAGE.HYPERFLEXSTORAGECONTAINER': "storage.HyperFlexStorageContainer",
'STORAGE.HYPERFLEXVOLUME': "storage.HyperFlexVolume",
'STORAGE.ITEM': "storage.Item",
'STORAGE.NETAPPAGGREGATE': "storage.NetAppAggregate",
'STORAGE.NETAPPBASEDISK': "storage.NetAppBaseDisk",
'STORAGE.NETAPPCLUSTER': "storage.NetAppCluster",
'STORAGE.NETAPPETHERNETPORT': "storage.NetAppEthernetPort",
'STORAGE.NETAPPEXPORTPOLICY': "storage.NetAppExportPolicy",
'STORAGE.NETAPPFCINTERFACE': "storage.NetAppFcInterface",
'STORAGE.NETAPPFCPORT': "storage.NetAppFcPort",
'STORAGE.NETAPPINITIATORGROUP': "storage.NetAppInitiatorGroup",
'STORAGE.NETAPPIPINTERFACE': "storage.NetAppIpInterface",
'STORAGE.NETAPPLICENSE': "storage.NetAppLicense",
'STORAGE.NETAPPLUN': "storage.NetAppLun",
'STORAGE.NETAPPLUNMAP': "storage.NetAppLunMap",
'STORAGE.NETAPPNODE': "storage.NetAppNode",
'STORAGE.NETAPPNTPSERVER': "storage.NetAppNtpServer",
'STORAGE.NETAPPSENSOR': "storage.NetAppSensor",
'STORAGE.NETAPPSTORAGEVM': "storage.NetAppStorageVm",
'STORAGE.NETAPPVOLUME': "storage.NetAppVolume",
'STORAGE.NETAPPVOLUMESNAPSHOT': "storage.NetAppVolumeSnapshot",
'STORAGE.PHYSICALDISK': "storage.PhysicalDisk",
'STORAGE.PHYSICALDISKEXTENSION': "storage.PhysicalDiskExtension",
'STORAGE.PHYSICALDISKUSAGE': "storage.PhysicalDiskUsage",
'STORAGE.PUREARRAY': "storage.PureArray",
'STORAGE.PURECONTROLLER': "storage.PureController",
'STORAGE.PUREDISK': "storage.PureDisk",
'STORAGE.PUREHOST': "storage.PureHost",
'STORAGE.PUREHOSTGROUP': "storage.PureHostGroup",
'STORAGE.PUREHOSTLUN': "storage.PureHostLun",
'STORAGE.PUREPORT': "storage.PurePort",
'STORAGE.PUREPROTECTIONGROUP': "storage.PureProtectionGroup",
'STORAGE.PUREPROTECTIONGROUPSNAPSHOT': "storage.PureProtectionGroupSnapshot",
'STORAGE.PUREREPLICATIONSCHEDULE': "storage.PureReplicationSchedule",
'STORAGE.PURESNAPSHOTSCHEDULE': "storage.PureSnapshotSchedule",
'STORAGE.PUREVOLUME': "storage.PureVolume",
'STORAGE.PUREVOLUMESNAPSHOT': "storage.PureVolumeSnapshot",
'STORAGE.SASEXPANDER': "storage.SasExpander",
'STORAGE.SASPORT': "storage.SasPort",
'STORAGE.SPAN': "storage.Span",
'STORAGE.STORAGEPOLICY': "storage.StoragePolicy",
'STORAGE.VDMEMBEREP': "storage.VdMemberEp",
'STORAGE.VIRTUALDRIVE': "storage.VirtualDrive",
'STORAGE.VIRTUALDRIVECONTAINER': "storage.VirtualDriveContainer",
'STORAGE.VIRTUALDRIVEEXTENSION': "storage.VirtualDriveExtension",
'STORAGE.VIRTUALDRIVEIDENTITY': "storage.VirtualDriveIdentity",
'SYSLOG.POLICY': "syslog.Policy",
'TAM.ADVISORYCOUNT': "tam.AdvisoryCount",
'TAM.ADVISORYDEFINITION': "tam.AdvisoryDefinition",
'TAM.ADVISORYINFO': "tam.AdvisoryInfo",
'TAM.ADVISORYINSTANCE': "tam.AdvisoryInstance",
'TAM.SECURITYADVISORY': "tam.SecurityAdvisory",
'TASK.HITACHISCOPEDINVENTORY': "task.HitachiScopedInventory",
'TASK.HYPERFLEXSCOPEDINVENTORY': "task.HyperflexScopedInventory",
'TASK.IWESCOPEDINVENTORY': "task.IweScopedInventory",
'TASK.NETAPPSCOPEDINVENTORY': "task.NetAppScopedInventory",
'TASK.PUBLICCLOUDSCOPEDINVENTORY': "task.PublicCloudScopedInventory",
'TASK.PURESCOPEDINVENTORY': "task.PureScopedInventory",
'TASK.SERVERSCOPEDINVENTORY': "task.ServerScopedInventory",
'TECHSUPPORTMANAGEMENT.COLLECTIONCONTROLPOLICY': "techsupportmanagement.CollectionControlPolicy",
'TECHSUPPORTMANAGEMENT.DOWNLOAD': "techsupportmanagement.Download",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTBUNDLE': "techsupportmanagement.TechSupportBundle",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTSTATUS': "techsupportmanagement.TechSupportStatus",
'TERMINAL.AUDITLOG': "terminal.AuditLog",
'TERRAFORM.EXECUTOR': "terraform.Executor",
'THERMAL.POLICY': "thermal.Policy",
'TOP.SYSTEM': "top.System",
'UCSD.BACKUPINFO': "ucsd.BackupInfo",
'UUIDPOOL.BLOCK': "uuidpool.Block",
'UUIDPOOL.POOL': "uuidpool.Pool",
'UUIDPOOL.POOLMEMBER': "uuidpool.PoolMember",
'UUIDPOOL.UNIVERSE': "uuidpool.Universe",
'UUIDPOOL.UUIDLEASE': "uuidpool.UuidLease",
'VIRTUALIZATION.CISCOHYPERVISORMANAGER': "virtualization.CiscoHypervisorManager",
'VIRTUALIZATION.ESXICONSOLE': "virtualization.EsxiConsole",
'VIRTUALIZATION.HOST': "virtualization.Host",
'VIRTUALIZATION.IWECLUSTER': "virtualization.IweCluster",
'VIRTUALIZATION.IWEDATACENTER': "virtualization.IweDatacenter",
'VIRTUALIZATION.IWEDVUPLINK': "virtualization.IweDvUplink",
'VIRTUALIZATION.IWEDVSWITCH': "virtualization.IweDvswitch",
'VIRTUALIZATION.IWEHOST': "virtualization.IweHost",
'VIRTUALIZATION.IWEHOSTINTERFACE': "virtualization.IweHostInterface",
'VIRTUALIZATION.IWEHOSTVSWITCH': "virtualization.IweHostVswitch",
'VIRTUALIZATION.IWENETWORK': "virtualization.IweNetwork",
'VIRTUALIZATION.IWEVIRTUALDISK': "virtualization.IweVirtualDisk",
'VIRTUALIZATION.IWEVIRTUALMACHINE': "virtualization.IweVirtualMachine",
'VIRTUALIZATION.IWEVIRTUALMACHINENETWORKINTERFACE': "virtualization.IweVirtualMachineNetworkInterface",
'VIRTUALIZATION.VIRTUALDISK': "virtualization.VirtualDisk",
'VIRTUALIZATION.VIRTUALMACHINE': "virtualization.VirtualMachine",
'VIRTUALIZATION.VIRTUALNETWORK': "virtualization.VirtualNetwork",
'VIRTUALIZATION.VMWARECLUSTER': "virtualization.VmwareCluster",
'VIRTUALIZATION.VMWAREDATACENTER': "virtualization.VmwareDatacenter",
'VIRTUALIZATION.VMWAREDATASTORE': "virtualization.VmwareDatastore",
'VIRTUALIZATION.VMWAREDATASTORECLUSTER': "virtualization.VmwareDatastoreCluster",
'VIRTUALIZATION.VMWAREDISTRIBUTEDNETWORK': "virtualization.VmwareDistributedNetwork",
'VIRTUALIZATION.VMWAREDISTRIBUTEDSWITCH': "virtualization.VmwareDistributedSwitch",
'VIRTUALIZATION.VMWAREFOLDER': "virtualization.VmwareFolder",
'VIRTUALIZATION.VMWAREHOST': "virtualization.VmwareHost",
'VIRTUALIZATION.VMWAREKERNELNETWORK': "virtualization.VmwareKernelNetwork",
'VIRTUALIZATION.VMWARENETWORK': "virtualization.VmwareNetwork",
'VIRTUALIZATION.VMWAREPHYSICALNETWORKINTERFACE': "virtualization.VmwarePhysicalNetworkInterface",
'VIRTUALIZATION.VMWAREUPLINKPORT': "virtualization.VmwareUplinkPort",
'VIRTUALIZATION.VMWAREVCENTER': "virtualization.VmwareVcenter",
'VIRTUALIZATION.VMWAREVIRTUALDISK': "virtualization.VmwareVirtualDisk",
'VIRTUALIZATION.VMWAREVIRTUALMACHINE': "virtualization.VmwareVirtualMachine",
'VIRTUALIZATION.VMWAREVIRTUALMACHINESNAPSHOT': "virtualization.VmwareVirtualMachineSnapshot",
'VIRTUALIZATION.VMWAREVIRTUALNETWORKINTERFACE': "virtualization.VmwareVirtualNetworkInterface",
'VIRTUALIZATION.VMWAREVIRTUALSWITCH': "virtualization.VmwareVirtualSwitch",
'VMEDIA.POLICY': "vmedia.Policy",
'VMRC.CONSOLE': "vmrc.Console",
'VNC.CONSOLE': "vnc.Console",
'VNIC.ETHADAPTERPOLICY': "vnic.EthAdapterPolicy",
'VNIC.ETHIF': "vnic.EthIf",
'VNIC.ETHNETWORKPOLICY': "vnic.EthNetworkPolicy",
'VNIC.ETHQOSPOLICY': "vnic.EthQosPolicy",
'VNIC.FCADAPTERPOLICY': "vnic.FcAdapterPolicy",
'VNIC.FCIF': "vnic.FcIf",
'VNIC.FCNETWORKPOLICY': "vnic.FcNetworkPolicy",
'VNIC.FCQOSPOLICY': "vnic.FcQosPolicy",
'VNIC.ISCSIADAPTERPOLICY': "vnic.IscsiAdapterPolicy",
'VNIC.ISCSIBOOTPOLICY': "vnic.IscsiBootPolicy",
'VNIC.ISCSISTATICTARGETPOLICY': "vnic.IscsiStaticTargetPolicy",
'VNIC.LANCONNECTIVITYPOLICY': "vnic.LanConnectivityPolicy",
'VNIC.LCPSTATUS': "vnic.LcpStatus",
'VNIC.SANCONNECTIVITYPOLICY': "vnic.SanConnectivityPolicy",
'VNIC.SCPSTATUS': "vnic.ScpStatus",
'VRF.VRF': "vrf.Vrf",
'WORKFLOW.ANSIBLEBATCHEXECUTOR': "workflow.AnsibleBatchExecutor",
'WORKFLOW.BATCHAPIEXECUTOR': "workflow.BatchApiExecutor",
'WORKFLOW.BUILDTASKMETA': "workflow.BuildTaskMeta",
'WORKFLOW.BUILDTASKMETAOWNER': "workflow.BuildTaskMetaOwner",
'WORKFLOW.CATALOG': "workflow.Catalog",
'WORKFLOW.CUSTOMDATATYPEDEFINITION': "workflow.CustomDataTypeDefinition",
'WORKFLOW.ERRORRESPONSEHANDLER': "workflow.ErrorResponseHandler",
'WORKFLOW.PENDINGDYNAMICWORKFLOWINFO': "workflow.PendingDynamicWorkflowInfo",
'WORKFLOW.ROLLBACKWORKFLOW': "workflow.RollbackWorkflow",
'WORKFLOW.SOLUTIONACTIONDEFINITION': "workflow.SolutionActionDefinition",
'WORKFLOW.SOLUTIONACTIONINSTANCE': "workflow.SolutionActionInstance",
'WORKFLOW.SOLUTIONDEFINITION': "workflow.SolutionDefinition",
'WORKFLOW.SOLUTIONINSTANCE': "workflow.SolutionInstance",
'WORKFLOW.SOLUTIONOUTPUT': "workflow.SolutionOutput",
'WORKFLOW.SSHBATCHEXECUTOR': "workflow.SshBatchExecutor",
'WORKFLOW.TASKDEBUGLOG': "workflow.TaskDebugLog",
'WORKFLOW.TASKDEFINITION': "workflow.TaskDefinition",
'WORKFLOW.TASKINFO': "workflow.TaskInfo",
'WORKFLOW.TASKMETADATA': "workflow.TaskMetadata",
'WORKFLOW.TASKNOTIFICATION': "workflow.TaskNotification",
'WORKFLOW.TEMPLATEEVALUATION': "workflow.TemplateEvaluation",
'WORKFLOW.TEMPLATEFUNCTIONMETA': "workflow.TemplateFunctionMeta",
'WORKFLOW.WORKFLOWDEFINITION': "workflow.WorkflowDefinition",
'WORKFLOW.WORKFLOWINFO': "workflow.WorkflowInfo",
'WORKFLOW.WORKFLOWMETA': "workflow.WorkflowMeta",
'WORKFLOW.WORKFLOWMETADATA': "workflow.WorkflowMetadata",
'WORKFLOW.WORKFLOWNOTIFICATION': "workflow.WorkflowNotification",
},
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'class_id': (str,), # noqa: E501
'moid': (str,), # noqa: E501
'selector': (str,), # noqa: E501
'link': (str,), # noqa: E501
'account_moid': (str,), # noqa: E501
'create_time': (datetime,), # noqa: E501
'domain_group_moid': (str,), # noqa: E501
'mod_time': (datetime,), # noqa: E501
'owners': ([str], none_type,), # noqa: E501
'shared_scope': (str,), # noqa: E501
'tags': ([MoTag], none_type,), # noqa: E501
'version_context': (MoVersionContext,), # noqa: E501
'ancestors': ([MoBaseMoRelationship], none_type,), # noqa: E501
'parent': (MoBaseMoRelationship,), # noqa: E501
'permission_resources': ([MoBaseMoRelationship], none_type,), # noqa: E501
'display_names': (DisplayNames,), # noqa: E501
'device_mo_id': (str,), # noqa: E501
'dn': (str,), # noqa: E501
'rn': (str,), # noqa: E501
'model': (str,), # noqa: E501
'presence': (str,), # noqa: E501
'revision': (str,), # noqa: E501
'serial': (str,), # noqa: E501
'vendor': (str,), # noqa: E501
'previous_fru': (EquipmentFruRelationship,), # noqa: E501
'name': (str,), # noqa: E501
'order': (int,), # noqa: E501
'state': (str,), # noqa: E501
'type': (str,), # noqa: E501
'compute_physical': (ComputePhysicalRelationship,), # noqa: E501
'inventory_device_info': (InventoryDeviceInfoRelationship,), # noqa: E501
'registered_device': (AssetDeviceRegistrationRelationship,), # noqa: E501
'object_type': (str,), # noqa: E501
}
@cached_property
def discriminator():
lazy_import()
val = {
'boot.CddDevice': BootCddDevice,
'mo.MoRef': MoMoRef,
}
if not val:
return None
return {'class_id': val}
attribute_map = {
'class_id': 'ClassId', # noqa: E501
'moid': 'Moid', # noqa: E501
'selector': 'Selector', # noqa: E501
'link': 'link', # noqa: E501
'account_moid': 'AccountMoid', # noqa: E501
'create_time': 'CreateTime', # noqa: E501
'domain_group_moid': 'DomainGroupMoid', # noqa: E501
'mod_time': 'ModTime', # noqa: E501
'owners': 'Owners', # noqa: E501
'shared_scope': 'SharedScope', # noqa: E501
'tags': 'Tags', # noqa: E501
'version_context': 'VersionContext', # noqa: E501
'ancestors': 'Ancestors', # noqa: E501
'parent': 'Parent', # noqa: E501
'permission_resources': 'PermissionResources', # noqa: E501
'display_names': 'DisplayNames', # noqa: E501
'device_mo_id': 'DeviceMoId', # noqa: E501
'dn': 'Dn', # noqa: E501
'rn': 'Rn', # noqa: E501
'model': 'Model', # noqa: E501
'presence': 'Presence', # noqa: E501
'revision': 'Revision', # noqa: E501
'serial': 'Serial', # noqa: E501
'vendor': 'Vendor', # noqa: E501
'previous_fru': 'PreviousFru', # noqa: E501
'name': 'Name', # noqa: E501
'order': 'Order', # noqa: E501
'state': 'State', # noqa: E501
'type': 'Type', # noqa: E501
'compute_physical': 'ComputePhysical', # noqa: E501
'inventory_device_info': 'InventoryDeviceInfo', # noqa: E501
'registered_device': 'RegisteredDevice', # noqa: E501
'object_type': 'ObjectType', # noqa: E501
}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""BootCddDeviceRelationship - a model defined in OpenAPI
Args:
Keyword Args:
class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "mo.MoRef", must be one of ["mo.MoRef", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
moid (str): The Moid of the referenced REST resource.. [optional] # noqa: E501
selector (str): An OData $filter expression which describes the REST resource to be referenced. This field may be set instead of 'moid' by clients. 1. If 'moid' is set this field is ignored. 1. If 'selector' is set and 'moid' is empty/absent from the request, Intersight determines the Moid of the resource matching the filter expression and populates it in the MoRef that is part of the object instance being inserted/updated to fulfill the REST request. An error is returned if the filter matches zero or more than one REST resource. An example filter string is: Serial eq '3AA8B7T11'.. [optional] # noqa: E501
link (str): A URL to an instance of the 'mo.MoRef' class.. [optional] # noqa: E501
account_moid (str): The Account ID for this managed object.. [optional] # noqa: E501
create_time (datetime): The time when this managed object was created.. [optional] # noqa: E501
domain_group_moid (str): The DomainGroup ID for this managed object.. [optional] # noqa: E501
mod_time (datetime): The time when this managed object was last modified.. [optional] # noqa: E501
owners ([str], none_type): [optional] # noqa: E501
shared_scope (str): Intersight provides pre-built workflows, tasks and policies to end users through global catalogs. Objects that are made available through global catalogs are said to have a 'shared' ownership. Shared objects are either made globally available to all end users or restricted to end users based on their license entitlement. Users can use this property to differentiate the scope (global or a specific license tier) to which a shared MO belongs.. [optional] # noqa: E501
tags ([MoTag], none_type): [optional] # noqa: E501
version_context (MoVersionContext): [optional] # noqa: E501
ancestors ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
parent (MoBaseMoRelationship): [optional] # noqa: E501
permission_resources ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
display_names (DisplayNames): [optional] # noqa: E501
device_mo_id (str): The database identifier of the registered device of an object.. [optional] # noqa: E501
dn (str): The Distinguished Name unambiguously identifies an object in the system.. [optional] # noqa: E501
rn (str): The Relative Name uniquely identifies an object within a given context.. [optional] # noqa: E501
model (str): This field identifies the model of the given component.. [optional] # noqa: E501
presence (str): This field identifies the presence (equipped) or absence of the given component.. [optional] # noqa: E501
revision (str): This field identifies the revision of the given component.. [optional] # noqa: E501
serial (str): This field identifies the serial of the given component.. [optional] # noqa: E501
vendor (str): This field identifies the vendor of the given component.. [optional] # noqa: E501
previous_fru (EquipmentFruRelationship): [optional] # noqa: E501
name (str): The name of the boot device configured in the boot policy.. [optional] # noqa: E501
order (int): The order of the boot device configured in the boot policy.. [optional] # noqa: E501
state (str): The state of the boot device configured in the boot policy.. [optional] # noqa: E501
type (str): The type of the boot device configured in the boot policy.. [optional] # noqa: E501
compute_physical (ComputePhysicalRelationship): [optional] # noqa: E501
inventory_device_info (InventoryDeviceInfoRelationship): [optional] # noqa: E501
registered_device (AssetDeviceRegistrationRelationship): [optional] # noqa: E501
object_type (str): The fully-qualified name of the remote type referred by this relationship.. [optional] # noqa: E501
"""
class_id = kwargs.get('class_id', "mo.MoRef")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'class_id': class_id,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
@cached_property
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
],
'oneOf': [
BootCddDevice,
MoMoRef,
none_type,
],
}
| 62.732468
| 1,678
| 0.657792
|
665eeec04039b49ff248d53ec5ffb732ec8f927e
| 690
|
py
|
Python
|
deepr/jobs/cleanup_checkpoints.py
|
drohde/deepr
|
672772ea3ce9cf391f9f8efc7ae9c9d438957817
|
[
"Apache-2.0"
] | 50
|
2020-05-19T17:29:44.000Z
|
2022-01-15T20:50:50.000Z
|
deepr/jobs/cleanup_checkpoints.py
|
drohde/deepr
|
672772ea3ce9cf391f9f8efc7ae9c9d438957817
|
[
"Apache-2.0"
] | 75
|
2020-05-20T16:53:37.000Z
|
2022-01-12T15:53:46.000Z
|
deepr/jobs/cleanup_checkpoints.py
|
drohde/deepr
|
672772ea3ce9cf391f9f8efc7ae9c9d438957817
|
[
"Apache-2.0"
] | 17
|
2020-05-25T13:23:03.000Z
|
2022-02-21T11:22:08.000Z
|
"""Cleanup Checkpoints in path_model"""
from dataclasses import dataclass
import logging
from deepr.jobs import base
from deepr.io.path import Path
LOGGER = logging.getLogger(__name__)
@dataclass
class CleanupCheckpoints(base.Job):
"""Cleanup Checkpoints in path_model"""
path_model: str
path_checkpoints: str = "checkpoints"
def run(self):
LOGGER.info(f"Cleanup checkpoints in {self.path_model}/{self.path_checkpoints}")
checkpoint_files = Path(self.path_model, self.path_checkpoints).glob("model.ckpt-*")
for checkpoint_file in checkpoint_files:
LOGGER.info(f"- Deleting {checkpoint_file}")
checkpoint_file.delete()
| 26.538462
| 92
| 0.718841
|
1fc689e2e7241cf464d6a83666bcb5b9a3737845
| 5,375
|
py
|
Python
|
src/pretix/control/context.py
|
NicsTr/pretix
|
e6d2380d9ed1836cc64a688b2be20d00a8500eab
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-04-25T00:11:00.000Z
|
2020-04-25T00:11:00.000Z
|
src/pretix/control/context.py
|
NicsTr/pretix
|
e6d2380d9ed1836cc64a688b2be20d00a8500eab
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/pretix/control/context.py
|
NicsTr/pretix
|
e6d2380d9ed1836cc64a688b2be20d00a8500eab
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import sys
from importlib import import_module
from django.conf import settings
from django.db.models import Q
from django.urls import Resolver404, get_script_prefix, resolve
from django.utils.translation import get_language
from django_scopes import scope
from pretix.base.models.auth import StaffSession
from pretix.base.settings import GlobalSettingsObject
from pretix.control.navigation import (
get_event_navigation, get_global_navigation, get_organizer_navigation,
)
from ..helpers.i18n import (
get_javascript_format, get_javascript_output_format, get_moment_locale,
)
from ..multidomain.urlreverse import get_event_domain
from .signals import html_head, nav_topbar
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
def contextprocessor(request):
"""
Adds data to all template contexts
"""
if not hasattr(request, '_pretix_control_default_context'):
request._pretix_control_default_context = _default_context(request)
return request._pretix_control_default_context
def _default_context(request):
try:
url = resolve(request.path_info)
except Resolver404:
return {}
if not request.path.startswith(get_script_prefix() + 'control'):
return {}
ctx = {
'url_name': url.url_name,
'settings': settings,
'django_settings': settings,
'DEBUG': settings.DEBUG,
}
_html_head = []
if hasattr(request, 'event') and request.user.is_authenticated:
for receiver, response in html_head.send(request.event, request=request):
_html_head.append(response)
ctx['html_head'] = "".join(_html_head)
_js_payment_weekdays_disabled = '[]'
if getattr(request, 'event', None) and hasattr(request, 'organizer') and request.user.is_authenticated:
ctx['nav_items'] = get_event_navigation(request)
if request.event.settings.get('payment_term_weekdays'):
_js_payment_weekdays_disabled = '[0,6]'
ctx['has_domain'] = get_event_domain(request.event, fallback=True) is not None
if not request.event.testmode:
with scope(organizer=request.organizer):
complain_testmode_orders = request.event.cache.get('complain_testmode_orders')
if complain_testmode_orders is None:
complain_testmode_orders = request.event.orders.filter(testmode=True).exists()
request.event.cache.set('complain_testmode_orders', complain_testmode_orders, 30)
ctx['complain_testmode_orders'] = complain_testmode_orders
else:
ctx['complain_testmode_orders'] = False
if not request.event.live and ctx['has_domain']:
child_sess = request.session.get('child_session_{}'.format(request.event.pk))
s = SessionStore()
if not child_sess or not s.exists(child_sess):
s['pretix_event_access_{}'.format(request.event.pk)] = request.session.session_key
s.create()
ctx['new_session'] = s.session_key
request.session['child_session_{}'.format(request.event.pk)] = s.session_key
request.session['event_access'] = True
else:
ctx['new_session'] = child_sess
request.session['event_access'] = True
if request.GET.get('subevent', ''):
# Do not use .get() for lazy evaluation
ctx['selected_subevents'] = request.event.subevents.filter(pk=request.GET.get('subevent'))
elif getattr(request, 'organizer', None) and request.user.is_authenticated:
ctx['nav_items'] = get_organizer_navigation(request)
elif request.user.is_authenticated:
ctx['nav_items'] = get_global_navigation(request)
ctx['js_payment_weekdays_disabled'] = _js_payment_weekdays_disabled
_nav_topbar = []
if request.user.is_authenticated:
for receiver, response in nav_topbar.send(request, request=request):
_nav_topbar += response
ctx['nav_topbar'] = sorted(_nav_topbar, key=lambda n: n['label'])
ctx['js_datetime_format'] = get_javascript_format('DATETIME_INPUT_FORMATS')
ctx['js_date_format'] = get_javascript_format('DATE_INPUT_FORMATS')
ctx['js_long_date_format'] = get_javascript_output_format('DATE_FORMAT')
ctx['js_time_format'] = get_javascript_format('TIME_INPUT_FORMATS')
ctx['js_locale'] = get_moment_locale()
ctx['select2locale'] = get_language()[:2]
ctx['warning_update_available'] = False
ctx['warning_update_check_active'] = False
gs = GlobalSettingsObject()
ctx['global_settings'] = gs.settings
if request.user.is_staff:
if gs.settings.update_check_result_warning:
ctx['warning_update_available'] = True
if not gs.settings.update_check_ack and 'runserver' not in sys.argv:
ctx['warning_update_check_active'] = True
if request.user.is_authenticated:
ctx['staff_session'] = request.user.has_active_staff_session(request.session.session_key)
ctx['staff_need_to_explain'] = (
StaffSession.objects.filter(user=request.user, date_end__isnull=False).filter(
Q(comment__isnull=True) | Q(comment="")
)
if request.user.is_staff and settings.PRETIX_ADMIN_AUDIT_COMMENTS else StaffSession.objects.none()
)
return ctx
| 41.666667
| 110
| 0.693209
|
af22fd74e5e8f54aba78a52b0a0a7069fc8205fd
| 29
|
py
|
Python
|
NGUI/__init__.py
|
TanyaAdams1/WechatEncrypt
|
e3437a465275de51c2f2b26ca46d249ed3a48d97
|
[
"Apache-2.0"
] | 1
|
2017-08-09T06:58:44.000Z
|
2017-08-09T06:58:44.000Z
|
NGUI/__init__.py
|
TanyaAdams1/WechatEncrypt
|
e3437a465275de51c2f2b26ca46d249ed3a48d97
|
[
"Apache-2.0"
] | null | null | null |
NGUI/__init__.py
|
TanyaAdams1/WechatEncrypt
|
e3437a465275de51c2f2b26ca46d249ed3a48d97
|
[
"Apache-2.0"
] | null | null | null |
from NGUI.interface import *
| 14.5
| 28
| 0.793103
|
86143f23f413ca0c9f8904af83b44b2d8be66ecf
| 94
|
py
|
Python
|
dell_projector/__init__.py
|
Abhiseshan/dell-projector
|
ef869afc9c33a6907160948c6548cad2d96e7341
|
[
"MIT"
] | null | null | null |
dell_projector/__init__.py
|
Abhiseshan/dell-projector
|
ef869afc9c33a6907160948c6548cad2d96e7341
|
[
"MIT"
] | null | null | null |
dell_projector/__init__.py
|
Abhiseshan/dell-projector
|
ef869afc9c33a6907160948c6548cad2d96e7341
|
[
"MIT"
] | null | null | null |
"""Python library to control Dell projector."""
from dell_projector.main import Projector
| 23.5
| 48
| 0.765957
|
2b397ec5a3f2dcd8313e37d77ef93ab113b14084
| 4,469
|
py
|
Python
|
wsgi/myproject/myproject/settings.py
|
ccltw/openshift-nginx-django-channels
|
2d3f02f5b3f34b23e08bfb1cf71b2f679a8fb4f1
|
[
"MIT"
] | null | null | null |
wsgi/myproject/myproject/settings.py
|
ccltw/openshift-nginx-django-channels
|
2d3f02f5b3f34b23e08bfb1cf71b2f679a8fb4f1
|
[
"MIT"
] | null | null | null |
wsgi/myproject/myproject/settings.py
|
ccltw/openshift-nginx-django-channels
|
2d3f02f5b3f34b23e08bfb1cf71b2f679a8fb4f1
|
[
"MIT"
] | null | null | null |
"""
Django settings for myproject project.
Generated by 'django-admin startproject' using Django 1.11.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
WSGI_DIR = os.path.dirname(BASE_DIR)
REPO_DIR = os.path.dirname(WSGI_DIR)
DATA_DIR = os.environ.get('OPENSHIFT_DATA_DIR', BASE_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'o0kw%gy1bpipf%e%ut+(68a_53k84wmpbre@(@3!xz$#9%n=rn'
# import sys
# sys.path.append(os.path.join(REPO_DIR, 'libs'))
# import secrets
# SECRETS = secrets.getter(os.path.join(DATA_DIR, 'secrets.json'))
# SECRET_KEY = SECRETS['secret_key']
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
from socket import gethostname
ALLOWED_HOSTS = [
gethostname(), # For internal OpenShift load balancer security purposes.
os.environ.get('OPENSHIFT_APP_DNS'), # Dynamically map to the OpenShift gear name.
#'example.com', # First DNS alias (set up in the app)
#'www.example.com', # Second DNS alias (set up in the app)
]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'channels',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'myproject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'myproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(DATA_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'zh-Hant'
TIME_ZONE = 'Asia/Taipei'
# LANGUAGE_CODE = 'en-us'
# TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(WSGI_DIR, 'static')
# CHANNEL_LAYERS = {
# "default": {
# "BACKEND": "asgiref.inmemory.ChannelLayer",
# "ROUTING": "myproject.routing.channel_routing",
# },
# }
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'asgi_redis.RedisChannelLayer',
'CONFIG': {
'hosts': [("redis://:{}@{}:{}").format(
os.environ.get('REDIS_PASSWORD', ''),
os.environ.get('OPENSHIFT_REDIS_HOST', 'localhost'),
os.environ.get('OPENSHIFT_REDIS_PORT', '6379')
)],
},
'ROUTING': 'myproject.routing.channel_routing',
}
}
| 27.084848
| 91
| 0.67599
|
5cd77b6b42bb3310e3d3dad9b73ef72ba099a0f4
| 472
|
py
|
Python
|
departments/models.py
|
masrufjaman/central-shop
|
c65f80a0f48cf1e07133bad8d6c0d48fdc548226
|
[
"MIT"
] | 1
|
2021-07-04T13:55:07.000Z
|
2021-07-04T13:55:07.000Z
|
departments/models.py
|
masrufjaman/central-shop
|
c65f80a0f48cf1e07133bad8d6c0d48fdc548226
|
[
"MIT"
] | null | null | null |
departments/models.py
|
masrufjaman/central-shop
|
c65f80a0f48cf1e07133bad8d6c0d48fdc548226
|
[
"MIT"
] | null | null | null |
from django.db import models
class Product(models.Model):
name = models.CharField(max_length=200, null=True)
price = models.FloatField(null=True)
digital = models.BooleanField(default=False, null=True, blank=False)
image = models.ImageField(null=True, blank=True)
def __str__(self):
return self.name
@property
def imageURL(self):
try:
url = self.image.url
except:
url = ''
return url
| 23.6
| 72
| 0.627119
|
775d61dd4f248a959a94f3fe34aa8c495c4852d2
| 1,373
|
py
|
Python
|
mariamalia/urls.py
|
marie0901/portfolio-django-api
|
eeacc4f75ae7d0ecfe573d5098cc97dbc822b565
|
[
"MIT"
] | null | null | null |
mariamalia/urls.py
|
marie0901/portfolio-django-api
|
eeacc4f75ae7d0ecfe573d5098cc97dbc822b565
|
[
"MIT"
] | null | null | null |
mariamalia/urls.py
|
marie0901/portfolio-django-api
|
eeacc4f75ae7d0ecfe573d5098cc97dbc822b565
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from django.conf.urls import url, include
from django.contrib import admin
from django.views.static import serve
from rest_framework import routers
from blog import views as bv
from projects import views as pv
from tags import views as tv
from learning import views as lv
from sliders import views as sv
from about import views as av
app_name = "mariamalia"
router = routers.SimpleRouter()
router.register(r'tags', tv.TagViewSet)
router.register(r'projects', pv.ProjectViewSet)
router.register(r'blog/posts', bv.PostViewSet)
router.register(r'learning/courses', lv.CourseViewSet)
router.register(r'learning/schools', lv.SchoolViewSet)
router.register(r'learning/books', lv.BookViewSet)
router.register(r'learning/quotes', lv.QuoteViewSet)
router.register(r'sliders', sv.SlideViewSet)
router.register(r'about/sliders', av.AboutSlideViewSet)
urlpatterns = [
url(r'^markdownx/', include('markdownx.urls')),
url(r'^admin/', admin.site.urls),
url(r'^api/v1/auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api/v1/', include(router.urls)),
]
# Django should not serve media files in production,
# they should be served by server instead
if settings.DEBUG:
urlpatterns += [
url(r'^media/(?P<path>.*)$', serve, {
'document_root': settings.MEDIA_ROOT,
}),
]
| 28.020408
| 86
| 0.7378
|
c24ab1246a29abafa3cfa540f41af74e4dee608e
| 1,103
|
py
|
Python
|
py3/scrapy/scrapy_lamyoung_title_multi_xpath_csv.py
|
baiyuwubing/python-exercise
|
4dc703368ac385d4d412587bc672de09a33a0381
|
[
"MIT"
] | 11
|
2019-12-06T07:13:39.000Z
|
2022-02-22T10:16:22.000Z
|
py3/scrapy/scrapy_lamyoung_title_multi_xpath_csv.py
|
baiyuwubing/python-exercise
|
4dc703368ac385d4d412587bc672de09a33a0381
|
[
"MIT"
] | 1
|
2020-03-31T06:39:17.000Z
|
2020-03-31T06:39:17.000Z
|
py3/scrapy/scrapy_lamyoung_title_multi_xpath_csv.py
|
baiyuwubing/python-exercise
|
4dc703368ac385d4d412587bc672de09a33a0381
|
[
"MIT"
] | 16
|
2020-03-12T08:31:50.000Z
|
2021-09-18T11:05:28.000Z
|
#author lamyoung
import requests
from multiprocessing.dummy import Pool
import lxml.html
import csv
def scrapy(index):
page_url = '';
if index>1:
page_url=f'page{index}/'
url=f'http://lamyoung.com/{page_url}';
print(url);
html=requests.get(url);
if html.status_code == 200:
html_bytes=html.content;
selector = lxml.html.fromstring(html_bytes);
all_items=selector.xpath('//div[@class="post-preview"]')
write_content=[];
for item in all_items:
links = item.xpath('a/@href')
title=item.xpath('a/h2[@class="post-title"]/text()')
title_0=title[0].strip();
write_content.append({'title': title_0, 'link': f'http://lamyoung.com{links[0]}'});
return write_content
else:
return [];
pool = Pool(3);
orign_num=[x for x in range(1,10)];
result = pool.map(scrapy,orign_num);
with open('lamyoung_title_multi_xpath_out.csv', 'w', newline='') as csvfile:
fieldnames = ['title', 'link']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for write_content in result:
for _content in write_content:
writer.writerow(_content);
| 26.902439
| 86
| 0.69447
|
065a4bc766565fdf6986cbf930b1d3e424768f85
| 800
|
py
|
Python
|
2_multi_line_command.py
|
leo-gal/pyplus_exercise
|
223d3c16fe485a0ee99c3ab7d161a758975a9d7b
|
[
"Apache-2.0"
] | null | null | null |
2_multi_line_command.py
|
leo-gal/pyplus_exercise
|
223d3c16fe485a0ee99c3ab7d161a758975a9d7b
|
[
"Apache-2.0"
] | null | null | null |
2_multi_line_command.py
|
leo-gal/pyplus_exercise
|
223d3c16fe485a0ee99c3ab7d161a758975a9d7b
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
from netmiko import ConnectHandler
from getpass import getpass
device1 = {
"host": "cisco3.lasthop.io",
"username": "pyclass",
"password": getpass(),
"device_type": "cisco_ios"
# "session_log": session_log.txt
}
net_connect = ConnectHandler(**device1)
print(net_connect.find_prompt())
command = 'delete flash:/cisco3-cfg-May--6-11-15-42.192-60'
output = net_connect.send_command(command, expect_string=r'cisco3-cfg-May', strip_prompt=False, strip_command=False )
output += net_connect.send_command('\n', expect_string=r'confirm', strip_prompt=False, strip_command=False )
output += net_connect.send_command('y', expect_string=r'#', strip_prompt=False, strip_command=False)
print(output)
| 36.363636
| 133
| 0.68125
|
200e6635c46a81741e58fad09f96297d05a17e89
| 4,290
|
py
|
Python
|
Particulates.py
|
Nano2PlastProject/InteractiveFullMulti
|
492d89ef8244d4bdbebfc8be2692308da36ba735
|
[
"MIT"
] | null | null | null |
Particulates.py
|
Nano2PlastProject/InteractiveFullMulti
|
492d89ef8244d4bdbebfc8be2692308da36ba735
|
[
"MIT"
] | null | null | null |
Particulates.py
|
Nano2PlastProject/InteractiveFullMulti
|
492d89ef8244d4bdbebfc8be2692308da36ba735
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 30 16:10:20 2019
@author: AntoniaPraetorius
#PradoDomercq added some modifications for Size bins
"""
import math
from GlobalConstants import *
#define Particulates class
class Particulates:
"This is a class to create Particulate objects"
#class attribute
species = "particulate"
#constructor
def __init__(self, plastic_prop, MP_index):
self.name = plastic_prop.name.loc[MP_index]
self.composition = plastic_prop.composition.loc[MP_index]
self.density_kg_m3 = plastic_prop.density_kg_m3.loc[MP_index]
self.shape = plastic_prop.MPshape.loc[MP_index]
self.diameter_um = plastic_prop.diameter_um.loc[MP_index] #for spherical MPs and fibres. Should be 0 for all others.
self.diameter_m = self.diameter_um*10**-6 #for spherical MPs and fibres. Should be 0 for all others.
self.radius_m = self.diameter_um*10**-6/2
self.length_a_um = plastic_prop.length_a_um.loc[MP_index] #longest length (for nonspherical MPs)
self.length_a_m = self.length_a_um*10**-6
self.length_b_um = plastic_prop.length_b_um.loc[MP_index] #intermediate length (for nonspherical MPs)
self.length_b_m = self.length_b_um*10**-6
self.length_c_um = plastic_prop.length_c_um.loc[MP_index] #shortest length (for nonspherical MPs)
self.length_c_m = self.length_c_um*10**-6
#methods
#volume calculation
#different formulas for different particle shapes.
#currently defined for spheres, fibres, cylinders, pellets and irregular fragments
def calc_volume(self):
if self.shape == "sphere":
self.volume_m3 = 4/3*math.pi*(self.radius_m)**3
#calculates volume (in m3) of spherical particles from MP radius
self.CSF = 1
#calculate corey shape factor (CSF)
#(Waldschlaeger 2019, doi:10.1021/acs.est.8b06794)
#particle number concentration calculation
elif self.shape == "fibre" or self.shape == "fiber" or self.shape == "cylinder":
self.volume_m3 = math.pi*(self.radius_m)**2*self.length_a_m
#calculates volume (in m3) of fibres or cylinders from diameter and
#length assuming cylindrical shape
self.CSF = self.radius_m/math.sqrt(self.length_a_m*self.radius_m)
#calculate corey shape factor (CSF)
#(Waldschlaeger 2019, doi:10.1021/acs.est.8b06794)
#particle number concentration calculation
elif self.shape == "pellet" or self.shape == "fragment":
self.volume_m3 = self.length_a_m*self.length_b_m*self.length_c_m
#approximate volume calculation for irregular fragments
#approximated as a cuboid using longest, intermediate and shortest length
#!! Note: not sure if pellets fits best here or rather as sphere/cylinder
#might adjust later!!
self.CSF = self.length_c_m/math.sqrt(self.length_a_m*self.length_b_m)
#calculate corey shape factor (CSF)
#(Waldschlaeger 2019, doi:10.1021/acs.est.8b06794)
#particle number concentration calculation
else:
print("Error: unknown shape")
#print error message for shapes other than spheres
#(to be removed when other volume calculations are implemented)
def calc_numConc(self, concMass_mg_L, concNum_part_L):
if concNum_part_L == 0:
self.concNum_part_m3 = concMass_mg_L/1000/self.density_kg_m3/self.volume_m3
#if mass concentration is given, it is converted to number concentration
else:
self.concNum_part_m3 = concNum_part_L*1000
#if number concentration is given, it is converted from part/L to part/m3
#degradation estimations
""" relates only to MP & NPs. Full degradation probably extremely slow
possibly not significant for most simulations. But add anyway for scenario
analysis or biodegradable polymers. Values currently placeholders
! Add a size relation?!"""
| 43.333333
| 124
| 0.649417
|
2be8d5bc85267f7441c31112a08a04577518f17f
| 3,463
|
py
|
Python
|
xlsxwriter/test/worksheet/test_write_sheet_views2.py
|
yxwlr995/-Python-Pandas-XlsxWriter
|
cd28c1b968795b67f3013c49a0e02ffda5898163
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
xlsxwriter/test/worksheet/test_write_sheet_views2.py
|
yxwlr995/-Python-Pandas-XlsxWriter
|
cd28c1b968795b67f3013c49a0e02ffda5898163
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
xlsxwriter/test/worksheet/test_write_sheet_views2.py
|
yxwlr995/-Python-Pandas-XlsxWriter
|
cd28c1b968795b67f3013c49a0e02ffda5898163
|
[
"BSD-2-Clause-FreeBSD"
] | 1
|
2020-04-12T16:44:58.000Z
|
2020-04-12T16:44:58.000Z
|
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013, John McNamara, jmcnamara@cpan.org
#
import unittest
from ..compatibility import StringIO
from ...worksheet import Worksheet
class TestWriteSheetViews(unittest.TestCase):
"""
Test the Worksheet _write_sheet_views() method.
"""
def setUp(self):
self.fh = StringIO()
self.worksheet = Worksheet()
self.worksheet._set_filehandle(self.fh)
def test_write_sheet_views1(self):
"""Test the _write_sheet_views() method with freeze panes"""
self.worksheet.select()
self.worksheet.freeze_panes(1, 0)
self.worksheet._write_sheet_views()
exp = '<sheetViews><sheetView tabSelected="1" workbookViewId="0"><pane ySplit="1" topLeftCell="A2" activePane="bottomLeft" state="frozen"/><selection pane="bottomLeft"/></sheetView></sheetViews>'
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_views2(self):
"""Test the _write_sheet_views() method with freeze panes"""
self.worksheet.select()
self.worksheet.freeze_panes(0, 1)
self.worksheet._write_sheet_views()
exp = '<sheetViews><sheetView tabSelected="1" workbookViewId="0"><pane xSplit="1" topLeftCell="B1" activePane="topRight" state="frozen"/><selection pane="topRight"/></sheetView></sheetViews>'
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_views3(self):
"""Test the _write_sheet_views() method with freeze panes"""
self.worksheet.select()
self.worksheet.freeze_panes(1, 1)
self.worksheet._write_sheet_views()
exp = '<sheetViews><sheetView tabSelected="1" workbookViewId="0"><pane xSplit="1" ySplit="1" topLeftCell="B2" activePane="bottomRight" state="frozen"/><selection pane="topRight" activeCell="B1" sqref="B1"/><selection pane="bottomLeft" activeCell="A2" sqref="A2"/><selection pane="bottomRight"/></sheetView></sheetViews>'
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_views4(self):
"""Test the _write_sheet_views() method with freeze panes"""
self.worksheet.select()
self.worksheet.freeze_panes('G4')
self.worksheet._write_sheet_views()
exp = '<sheetViews><sheetView tabSelected="1" workbookViewId="0"><pane xSplit="6" ySplit="3" topLeftCell="G4" activePane="bottomRight" state="frozen"/><selection pane="topRight" activeCell="G1" sqref="G1"/><selection pane="bottomLeft" activeCell="A4" sqref="A4"/><selection pane="bottomRight"/></sheetView></sheetViews>'
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_views5(self):
"""Test the _write_sheet_views() method with freeze panes"""
self.worksheet.select()
self.worksheet.freeze_panes(3, 6, 3, 6, 1)
self.worksheet._write_sheet_views()
exp = '<sheetViews><sheetView tabSelected="1" workbookViewId="0"><pane xSplit="6" ySplit="3" topLeftCell="G4" activePane="bottomRight" state="frozenSplit"/><selection pane="topRight" activeCell="G1" sqref="G1"/><selection pane="bottomLeft" activeCell="A4" sqref="A4"/><selection pane="bottomRight"/></sheetView></sheetViews>'
got = self.fh.getvalue()
self.assertEqual(got, exp)
if __name__ == '__main__':
unittest.main()
| 35.701031
| 333
| 0.66041
|
41359b97aa04732fee7fe8c66cc9e3ab057b06ba
| 11,392
|
py
|
Python
|
Tools/BuildSlaveSupport/build.webkit.org-config/factories.py
|
jacadcaps/webkitty
|
9aebd2081349f9a7b5d168673c6f676a1450a66d
|
[
"BSD-2-Clause"
] | 6
|
2021-07-05T16:09:39.000Z
|
2022-03-06T22:44:42.000Z
|
Tools/BuildSlaveSupport/build.webkit.org-config/factories.py
|
jacadcaps/webkitty
|
9aebd2081349f9a7b5d168673c6f676a1450a66d
|
[
"BSD-2-Clause"
] | 7
|
2022-03-15T13:25:39.000Z
|
2022-03-15T13:25:44.000Z
|
Tools/BuildSlaveSupport/build.webkit.org-config/factories.py
|
jacadcaps/webkitty
|
9aebd2081349f9a7b5d168673c6f676a1450a66d
|
[
"BSD-2-Clause"
] | null | null | null |
# Copyright (C) 2017 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from buildbot.process import factory
from buildbot.steps import trigger
from steps import *
class Factory(factory.BuildFactory):
def __init__(self, platform, configuration, architectures, buildOnly, additionalArguments, SVNMirror, device_model):
factory.BuildFactory.__init__(self)
self.addStep(ConfigureBuild(platform=platform, configuration=configuration, architecture=" ".join(architectures), buildOnly=buildOnly, additionalArguments=additionalArguments, SVNMirror=SVNMirror, device_model=device_model))
if SVNMirror:
self.addStep(WaitForSVNServer())
self.addStep(CheckOutSource(SVNMirror=SVNMirror))
if not (platform == "jsc-only"):
self.addStep(KillOldProcesses())
self.addStep(CleanBuildIfScheduled())
self.addStep(DeleteStaleBuildFiles())
if platform == "win":
self.addStep(InstallWin32Dependencies())
if platform == "gtk" and "--no-experimental-features" not in (additionalArguments or []):
self.addStep(InstallGtkDependencies())
if platform == "wpe":
self.addStep(InstallWpeDependencies())
class BuildFactory(Factory):
ShouldRunJSCBundleStep = False
def __init__(self, platform, configuration, architectures, triggers=None, additionalArguments=None, SVNMirror=None, device_model=None):
Factory.__init__(self, platform, configuration, architectures, True, additionalArguments, SVNMirror, device_model)
if platform == "win" or platform.startswith("playstation"):
self.addStep(CompileWebKit(timeout=2 * 60 * 60))
else:
self.addStep(CompileWebKit())
if triggers:
self.addStep(ArchiveBuiltProduct())
self.addStep(UploadBuiltProduct())
if platform.startswith('mac') or platform.startswith('ios-simulator') or platform.startswith('tvos-simulator') or platform.startswith('watchos-simulator'):
self.addStep(ArchiveMinifiedBuiltProduct())
self.addStep(UploadMinifiedBuiltProduct())
if self.ShouldRunJSCBundleStep:
self.addStep(GenerateJSCBundle())
self.addStep(TransferToS3())
self.addStep(trigger.Trigger(schedulerNames=triggers))
class TestFactory(Factory):
JSCTestClass = RunJavaScriptCoreTests
LayoutTestClass = RunWebKitTests
ShouldRunJSCBundleStep = False
def getProduct(self):
self.addStep(DownloadBuiltProduct())
self.addStep(ExtractBuiltProduct())
def __init__(self, platform, configuration, architectures, additionalArguments=None, SVNMirror=None, device_model=None, **kwargs):
Factory.__init__(self, platform, configuration, architectures, False, additionalArguments, SVNMirror, device_model, **kwargs)
self.getProduct()
if platform == 'wincairo':
self.addStep(InstallWinCairoDependencies())
if platform.startswith('mac') or platform.startswith('ios-simulator'):
self.addStep(WaitForCrashCollection())
if self.JSCTestClass:
self.addStep(self.JSCTestClass())
if self.LayoutTestClass:
self.addStep(self.LayoutTestClass())
if platform.startswith('win') or platform.startswith('mac') or platform.startswith('ios-simulator'):
self.addStep(RunAPITests())
if platform.startswith('mac'):
self.addStep(RunLLDBWebKitTests())
self.addStep(RunWebKitPyTests())
self.addStep(RunPerlTests())
self.addStep(RunBindingsTests())
self.addStep(RunBuiltinsTests())
if not platform.startswith('win'):
self.addStep(RunDashboardTests())
if platform.startswith('mac') or platform.startswith('ios-simulator'):
self.addStep(TriggerCrashLogSubmission())
if self.LayoutTestClass:
self.addStep(ArchiveTestResults())
self.addStep(UploadTestResults())
self.addStep(ExtractTestResults())
if self.ShouldRunJSCBundleStep:
self.addStep(GenerateJSCBundle())
if platform == "gtk":
self.addStep(RunGtkAPITests())
if additionalArguments and "--display-server=wayland" in additionalArguments:
self.addStep(RunWebDriverTests())
if platform == "wpe":
self.addStep(RunWPEAPITests())
self.addStep(RunWebDriverTests())
class BuildAndTestFactory(TestFactory):
def getProduct(self):
self.addStep(CompileWebKit())
def __init__(self, platform, configuration, architectures, triggers=None, additionalArguments=None, SVNMirror=None, device_model=None, **kwargs):
TestFactory.__init__(self, platform, configuration, architectures, additionalArguments, SVNMirror, device_model, **kwargs)
if triggers:
self.addStep(ArchiveBuiltProduct())
self.addStep(UploadBuiltProduct())
self.addStep(trigger.Trigger(schedulerNames=triggers))
class BuildAndTestLLINTCLoopFactory(Factory):
def __init__(self, platform, configuration, architectures, triggers=None, additionalArguments=None, SVNMirror=None, device_model=None, **kwargs):
Factory.__init__(self, platform, configuration, architectures, False, additionalArguments, SVNMirror, device_model, **kwargs)
self.addStep(CompileLLINTCLoop())
self.addStep(RunLLINTCLoopTests())
class BuildAndTest32bitJSCFactory(Factory):
def __init__(self, platform, configuration, architectures, triggers=None, additionalArguments=None, SVNMirror=None, device_model=None, **kwargs):
Factory.__init__(self, platform, configuration, architectures, False, additionalArguments, SVNMirror, device_model, **kwargs)
self.addStep(Compile32bitJSC())
self.addStep(Run32bitJSCTests())
class BuildAndNonLayoutTestFactory(BuildAndTestFactory):
LayoutTestClass = None
class BuildAndRemoteJSCTestsFactory(Factory):
def __init__(self, platform, configuration, architectures, triggers=None, additionalArguments=None, SVNMirror=None, device_model=None):
Factory.__init__(self, platform, configuration, architectures, False, additionalArguments, SVNMirror, device_model)
self.addStep(CompileJSCOnly(timeout=60 * 60))
self.addStep(RunRemoteJavaScriptCoreTests(timeout=60 * 60))
class TestWebKit1LeaksFactory(Factory):
def __init__(self, platform, configuration, architectures, additionalArguments=None, SVNMirror=None, device_model=None):
Factory.__init__(self, platform, configuration, architectures, False, additionalArguments, SVNMirror, device_model)
self.addStep(DownloadBuiltProduct())
self.addStep(ExtractBuiltProduct())
self.addStep(RunWebKit1LeakTests())
self.addStep(ArchiveTestResults())
self.addStep(UploadTestResults())
self.addStep(ExtractTestResultsAndLeaks())
class TestAllButJSCFactory(TestFactory):
JSCTestClass = None
class BuildAndGenerateJSCBundleFactory(BuildFactory):
ShouldRunJSCBundleStep = True
class BuildAndNonLayoutTestAndGenerateJSCBundleFactory(BuildAndNonLayoutTestFactory):
ShouldRunJSCBundleStep = True
class TestJSCFactory(Factory):
def __init__(self, platform, configuration, architectures, additionalArguments=None, SVNMirror=None, device_model=None):
Factory.__init__(self, platform, configuration, architectures, False, additionalArguments, SVNMirror, device_model)
self.addStep(DownloadBuiltProduct())
self.addStep(ExtractBuiltProduct())
self.addStep(RunJavaScriptCoreTests())
class Test262Factory(Factory):
def __init__(self, platform, configuration, architectures, additionalArguments=None, SVNMirror=None, device_model=None):
Factory.__init__(self, platform, configuration, architectures, False, additionalArguments, SVNMirror, device_model)
self.addStep(DownloadBuiltProduct())
self.addStep(ExtractBuiltProduct())
self.addStep(RunTest262Tests())
class TestJSFactory(Factory):
def __init__(self, platform, configuration, architectures, additionalArguments=None, SVNMirror=None, device_model=None):
Factory.__init__(self, platform, configuration, architectures, False, additionalArguments, SVNMirror, device_model)
self.addStep(DownloadBuiltProduct())
self.addStep(ExtractBuiltProduct())
self.addStep(RunJavaScriptCoreTests())
self.addStep(RunTest262Tests())
class TestWebDriverFactory(Factory):
def __init__(self, platform, configuration, architectures, additionalArguments=None, SVNMirror=None, device_model=None):
Factory.__init__(self, platform, configuration, architectures, False, additionalArguments, SVNMirror, device_model)
self.addStep(DownloadBuiltProduct())
self.addStep(ExtractBuiltProduct())
self.addStep(RunWebDriverTests())
class TestWebKit1Factory(TestFactory):
LayoutTestClass = RunWebKit1Tests
class TestWebKit1AllButJSCFactory(TestWebKit1Factory):
JSCTestClass = None
class BuildAndPerfTestFactory(Factory):
def __init__(self, platform, configuration, architectures, additionalArguments=None, SVNMirror=None, device_model=None, **kwargs):
Factory.__init__(self, platform, configuration, architectures, False, additionalArguments, SVNMirror, device_model, **kwargs)
self.addStep(CompileWebKit())
self.addStep(RunAndUploadPerfTests())
if platform == "gtk":
self.addStep(RunBenchmarkTests(timeout=2000))
class DownloadAndPerfTestFactory(Factory):
def __init__(self, platform, configuration, architectures, additionalArguments=None, SVNMirror=None, device_model=None, **kwargs):
Factory.__init__(self, platform, configuration, architectures, False, additionalArguments, SVNMirror, device_model, **kwargs)
self.addStep(DownloadBuiltProduct())
self.addStep(ExtractBuiltProduct())
self.addStep(RunAndUploadPerfTests())
if platform == "gtk":
self.addStep(RunBenchmarkTests(timeout=2000))
| 46.688525
| 232
| 0.729898
|
d3969a9c4aa70708827d15bfe7946268d6cfe4b6
| 1,351
|
py
|
Python
|
nova/db/sqlalchemy/migrate_repo/versions/136_add_index_to_instances.py
|
bopopescu/nova_audit
|
1cd2901802f82d39411adfa04cf2f432ff3bf280
|
[
"Apache-2.0"
] | 2
|
2015-11-05T04:52:34.000Z
|
2016-03-07T03:00:06.000Z
|
nova/db/sqlalchemy/migrate_repo/versions/136_add_index_to_instances.py
|
bopopescu/nova_audit
|
1cd2901802f82d39411adfa04cf2f432ff3bf280
|
[
"Apache-2.0"
] | 1
|
2018-01-19T07:50:49.000Z
|
2018-01-19T07:50:49.000Z
|
nova/db/sqlalchemy/migrate_repo/versions/136_add_index_to_instances.py
|
bopopescu/nova_audit
|
1cd2901802f82d39411adfa04cf2f432ff3bf280
|
[
"Apache-2.0"
] | 1
|
2020-07-24T09:15:58.000Z
|
2020-07-24T09:15:58.000Z
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import MetaData, Table, Index
INDEX_NAME = 'instances_host_node_deleted_idx'
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
instances = Table('instances', meta, autoload=True)
# Based on instance_get_all_host_and_node
# from: nova/db/sqlalchemy/api.py
index = Index(INDEX_NAME,
instances.c.host, instances.c.node, instances.c.deleted)
index.create(migrate_engine)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
instances = Table('instances', meta, autoload=True)
index = Index(INDEX_NAME,
instances.c.host, instances.c.node, instances.c.deleted)
index.drop(migrate_engine)
| 32.166667
| 78
| 0.712805
|
5dc636b3c78e517e29ad6990d3b7d08df79fcb8e
| 1,104
|
py
|
Python
|
bcs-ui/backend/templatesets/release/urls.py
|
laodiu/bk-bcs
|
2a956a42101ff6487ff521fb3ef429805bfa7e26
|
[
"Apache-2.0"
] | 599
|
2019-06-25T03:20:46.000Z
|
2022-03-31T12:14:33.000Z
|
bcs-ui/backend/templatesets/release/urls.py
|
laodiu/bk-bcs
|
2a956a42101ff6487ff521fb3ef429805bfa7e26
|
[
"Apache-2.0"
] | 537
|
2019-06-27T06:03:44.000Z
|
2022-03-31T12:10:01.000Z
|
bcs-ui/backend/templatesets/release/urls.py
|
laodiu/bk-bcs
|
2a956a42101ff6487ff521fb3ef429805bfa7e26
|
[
"Apache-2.0"
] | 214
|
2019-06-25T03:26:05.000Z
|
2022-03-31T07:52:03.000Z
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.conf.urls import url
from .views import ReleaseViewSet
urlpatterns = [
url(r'^$', ReleaseViewSet.as_view({'post': 'create', 'get': 'list'})),
url(r'^(?P<release_id>\d+)/$', ReleaseViewSet.as_view({'get': 'get', 'put': 'update', 'delete': 'delete'})),
url(r'^-/manifests/preview/$', ReleaseViewSet.as_view({'post': 'preview_manifests'})),
]
| 46
| 115
| 0.735507
|
3d9176f57a1bfa7e2d45ff910e8d619921eb8473
| 395
|
py
|
Python
|
core/recc/crypto/password.py
|
bogonets/answer
|
57f892a9841980bcbc35fa1e27521b34cd94bc25
|
[
"MIT"
] | 3
|
2021-06-20T02:24:10.000Z
|
2022-01-26T23:55:33.000Z
|
core/recc/crypto/password.py
|
bogonets/answer
|
57f892a9841980bcbc35fa1e27521b34cd94bc25
|
[
"MIT"
] | null | null | null |
core/recc/crypto/password.py
|
bogonets/answer
|
57f892a9841980bcbc35fa1e27521b34cd94bc25
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from hashlib import pbkdf2_hmac
from recc.variables.crypto import (
DEFAULT_PBKDF2_HMAC_HASH_NAME,
DEFAULT_PBKDF2_HMAC_ITERATIONS,
)
def encrypt_password(hashed_user_pw: str, salt: bytes) -> bytes:
return pbkdf2_hmac(
DEFAULT_PBKDF2_HMAC_HASH_NAME,
bytes.fromhex(hashed_user_pw),
salt,
DEFAULT_PBKDF2_HMAC_ITERATIONS,
)
| 23.235294
| 64
| 0.713924
|
e873e0d88b780e527db72e53446357ba24f60492
| 837
|
py
|
Python
|
supermethod.py
|
pawankumarsharm/Pythoncoding
|
f0e5f6c1d22b101e109088529640326dd5405a6a
|
[
"bzip2-1.0.6"
] | null | null | null |
supermethod.py
|
pawankumarsharm/Pythoncoding
|
f0e5f6c1d22b101e109088529640326dd5405a6a
|
[
"bzip2-1.0.6"
] | null | null | null |
supermethod.py
|
pawankumarsharm/Pythoncoding
|
f0e5f6c1d22b101e109088529640326dd5405a6a
|
[
"bzip2-1.0.6"
] | null | null | null |
class Person:
def __init__(self,name,age):
self.name=name
self.age=age
def display(self):
print('name:',self.name)
print('age:',self.age)
class Student(Person):
def __init__(self,name,age,rollno,marks):
super().__init__(name,age)
self.rollno=rollno
self.marks=marks
def display(self):
super().display()
print('Roll Number:',self.rollno)
print('Marks:',self.marks)
class Teacher(Person):
def __init__(self,name,age,salary,subject):
super().__init__(name,age)
self.salary=salary
self.subject=subject
def display(self):
super().display()
print('Salary:',self.salary)
print('Subject:',self.subject)
s=Student('Ravi',23,101,90)
p=Teacher('Durga',62,10000,'Python')
s.display()
p.display()
'''name: Ravi
age: 23
Roll Number: 101
Marks: 90
name: Durga
age: 62
Salary: 10000
Subject: Python'''
| 19.465116
| 44
| 0.694146
|
b9e6d18de9a44296cf67467f38c01fcdfab211df
| 1,295
|
py
|
Python
|
asyncstdlib/__init__.py
|
mgorny/asyncstdlib
|
55c00379a0a9897124c0d5588c364744ac14d4d4
|
[
"MIT"
] | null | null | null |
asyncstdlib/__init__.py
|
mgorny/asyncstdlib
|
55c00379a0a9897124c0d5588c364744ac14d4d4
|
[
"MIT"
] | null | null | null |
asyncstdlib/__init__.py
|
mgorny/asyncstdlib
|
55c00379a0a9897124c0d5588c364744ac14d4d4
|
[
"MIT"
] | null | null | null |
"""The missing async toolbox"""
from .builtins import (
anext,
zip,
map,
filter,
enumerate,
iter,
all,
any,
max,
min,
sum,
list,
dict,
set,
tuple,
sorted,
)
from .functools import reduce, lru_cache, cache, cached_property
from .contextlib import closing, contextmanager, nullcontext, ExitStack
from .itertools import (
accumulate,
cycle,
chain,
compress,
dropwhile,
islice,
takewhile,
starmap,
tee,
pairwise,
zip_longest,
groupby,
)
from .asynctools import borrow, scoped_iter, await_each, apply
__version__ = "3.9.1"
__all__ = [
"anext",
"zip",
"map",
"filter",
"enumerate",
"iter",
"all",
"any",
"max",
"min",
"sum",
"list",
"dict",
"set",
"tuple",
"sorted",
# functools
"reduce",
"lru_cache",
"cache",
"cached_property",
# contextlib
"closing",
"contextmanager",
"nullcontext",
"ExitStack",
# itertools
"accumulate",
"cycle",
"chain",
"compress",
"dropwhile",
"takewhile",
"islice",
"starmap",
"tee",
"pairwise",
"zip_longest",
"groupby",
# asynctools
"borrow",
"scoped_iter",
"await_each",
"apply",
]
| 15.05814
| 71
| 0.542857
|
3a8b82de973f923651b7e5e57d5ab073e3a97e92
| 7,230
|
py
|
Python
|
cupti_trace/cupti_make_report.py
|
grate-driver/envytools
|
237d859ed4307524ed99f9a3601e3ef40a7d0814
|
[
"MIT"
] | 7
|
2017-01-26T10:30:42.000Z
|
2022-01-12T19:58:34.000Z
|
cupti_trace/cupti_make_report.py
|
skeggsb/envytools
|
43528b2b84c6f9e2b87d284fab409bd4ed83e6ea
|
[
"MIT"
] | 6
|
2017-02-05T12:04:16.000Z
|
2017-04-18T13:45:07.000Z
|
cupti_trace/cupti_make_report.py
|
skeggsb/envytools
|
43528b2b84c6f9e2b87d284fab409bd4ed83e6ea
|
[
"MIT"
] | 3
|
2017-01-23T10:12:05.000Z
|
2017-09-06T20:25:15.000Z
|
#!/usr/bin/python3
# Copyright (C) 2015 Samuel Pitoiset <samuel.pitoiset@gmail.com>
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice (including the next
# paragraph) shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import getopt
import os
import re
import subprocess
import shutil
import sys
def get_cupti_path():
return os.getenv("CUPTI_PATH", "/opt/cuda/extras/CUPTI")
def cupti_query_parse_output(output, token):
data = []
for line in output.splitlines():
if re.search(token, line):
b = line.find('=') + 2
e = line.find('\n')
data.append(line[b:])
return data
def cupti_get_domain_ids(device):
return cupti_query_parse_output(cupti_query_domains(device), "^Id")
def cupti_query(device, opts):
cmd = get_cupti_path() + "/sample/cupti_query/cupti_query "
cmd += "-device " + device + " " + opts
proc = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE)
stdout, stderr = proc.communicate()
if not proc.returncode == 0:
return proc.returncode
return stdout.decode()
def cupti_query_domains(device):
return cupti_query(device, "-getdomains")
def cupti_query_events_by_domain(device, domain):
return cupti_query(device, "-domain " + str(domain) + " -getevents")
def cupti_query_metrics(device):
return cupti_query(device, "-getmetrics")
def cupti_save_domains_list(device):
f = open("list_domains.txt", "w")
f.write(cupti_query_domains(device))
f.close()
def cupti_save_events_list(device):
domain_ids = cupti_get_domain_ids(device)
for domain_id in domain_ids:
f = open("domain_" + str(domain_id) + ".txt", "w")
f.write(cupti_query_events_by_domain(device, domain_id))
f.close()
def cupti_save_metrics_list(device):
f = open("list_metrics.txt", "w")
f.write(cupti_query_metrics(device))
f.close()
def cupti_trace(device, chipset, opts):
cmd = "cupti_trace -a " + chipset + " -d " + device + " " + opts
proc = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE)
stdout, stderr = proc.communicate()
if not proc.returncode == 0:
return proc.returncode
return stdout.decode()
def cupti_get_event_names(device, domain_id):
output = cupti_query_events_by_domain(device, domain_id)
return cupti_query_parse_output(output, "^Name")
def cupti_trace_all_events(device, chipset):
f = open("report_events.txt", "w")
domain_ids = cupti_get_domain_ids(device)
for domain_id in domain_ids:
print ("Domain #" + str(domain_id))
event_names = cupti_get_event_names(device, domain_id)
for event_name in event_names:
print ("Event " + event_name)
f.write(cupti_trace(device, chipset, "-e " + event_name))
f.close()
def cupti_get_metric_names(device):
return cupti_query_parse_output(cupti_query_metrics(device), "^Name")
def cupti_trace_all_metrics(device, chipset):
f = open("report_metrics.txt", "w")
metric_names = cupti_get_metric_names(device)
for metric_name in metric_names:
print ("Metric " + metric_name)
f.write(cupti_trace(device, chipset, "-m " + metric_name))
f.close()
def dry_run_valgrind_mmt():
cmd = "valgrind --tool=mmt"
proc = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if not proc.returncode == 1:
return proc.returncode
lines = stderr.decode().splitlines()
if not lines[0] == "valgrind: no program specified":
return 0
return 1
def main():
try:
long_opts = ["chipset=",
"device=",
"overwrite"]
opts, args = getopt.getopt(sys.argv[1:], "a:d:o", long_opts)
except getopt.GetoptError as err:
print (str(err))
sys.exit(2)
device = "0"
chipset = None
overwrite = False
for opt, arg in opts:
if opt in ("-a", "--chipset"):
chipset = str(arg)
elif opt in ("-d", "--device"):
device = str(arg)
elif opt in ("-o", "--overwrite"):
overwrite = True
else:
assert False, "Unknown option!"
if chipset == None:
print ("Must specify a chipset (-a)")
sys.exit(2)
output_dir = "nv" + chipset + "_cupti_report"
if os.path.exists(output_dir):
if not overwrite:
print ("Output directory already exists, try --overwrite!")
sys.exit(2)
else:
shutil.rmtree(output_dir, ignore_errors=True)
os.mkdir(output_dir)
os.chdir(output_dir)
if not dry_run_valgrind_mmt():
print ("You are not running valgrind-mmt!")
sys.exit(2)
if not shutil.which("demmt"):
print ("Failed to find demt!")
# Check CUPTI samples
path = get_cupti_path() + "/sample/cupti_query/cupti_query"
if not os.path.exists(path):
print ("Failed to find cupti_query!")
path = get_cupti_path() + "/sample/callback_event/callback_event"
if not os.path.exists(path):
print ("Failed to find callback_event!")
path = get_cupti_path() + "/sample/callback_metric/callback_metric"
if not os.path.exists(path):
print ("Failed to find callback_metric!")
print ("Assuming device Id: " + device)
cupti_save_domains_list(device)
cupti_save_events_list(device)
cupti_save_metrics_list(device)
cupti_trace_all_events(device, chipset)
cupti_trace_all_metrics(device, chipset)
print ("Creating a tarball...")
os.chdir("../")
if shutil.which("tar"):
archive_name = output_dir + ".tar.gz"
cmd = "tar -czf " + archive_name + " " + output_dir
proc = subprocess.Popen(cmd.split())
stdout, stderr = proc.communicate()
if not proc.returncode == 0:
return proc.returncode
if shutil.which("xz"):
cmd = "xz " + archive_name
proc = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE)
stdout, stderr = proc.communicate()
if not proc.returncode == 0:
return proc.returncode
print ("Thanks for running cupti_trace! :-)")
if __name__ == "__main__":
main()
| 34.103774
| 76
| 0.657676
|
f47574a281b658cfe25ab5accf6ec394394cfbdf
| 2,162
|
py
|
Python
|
multisplice.py
|
stgn/multisplice
|
f9701f3190fd0bbcf82fb3bb029a23b150e892e9
|
[
"MIT"
] | null | null | null |
multisplice.py
|
stgn/multisplice
|
f9701f3190fd0bbcf82fb3bb029a23b150e892e9
|
[
"MIT"
] | null | null | null |
multisplice.py
|
stgn/multisplice
|
f9701f3190fd0bbcf82fb3bb029a23b150e892e9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os, re
import click
from subprocess import call
from collections import defaultdict
from datetime import timedelta
from fractions import Fraction
@click.command()
@click.option('--fps', '-f', default='30000/1001', metavar='FRAMERATE',
help='Frame rate to convert frame numbers to timestamps at '
'as a float or fraction. Defaults to 30000/1001.')
@click.option('--dirty', '-d', is_flag=True,
help='Don\'t delete intermediate split files.')
@click.option('--output', '-o', metavar='FILENAME',
help='Output filename of spliced result.')
@click.argument('manifest', type=click.File())
def multisplice(manifest, fps, output, dirty):
"""Splices together portions of audio from multiple source files, as specified by a MANIFEST file."""
tmpl = manifest.readline().rstrip('\r\n')
out = output or os.path.splitext(tmpl.format('spliced'))[0] + '.mka'
parts = []
for line in manifest:
m = re.search(r'(\S+) (\d+) (\d+)', line)
if m:
p = m.groups()
parts.append((tmpl.format(p[0]), (int(p[1]), int(p[2]) + 1)))
else:
raise Exception('Unable to parse manifest')
agg = defaultdict(list)
for f, r in parts:
agg[f].append(r)
files = [('_{}' + '-{:03d}' * (len(agg[f]) > 1) + '.mka').format(f, agg[f].index(r) + 1) for f, r in parts]
try:
ft = 1 / Fraction(fps)
for f, fr in agg.iteritems():
arg = ','.join('-'.join(str(timedelta(seconds=float(ft * t))) for t in r) for r in fr)
if call(['mkvmerge', '-q', f, '-o', '_{}.mka'.format(f), '--split', 'parts:' + arg]) > 1:
raise Exception('{}: Split failed'.format(f))
join_files = ['+' * (i > 0) + f for i, f in enumerate(files)]
if call(['mkvmerge', '-q', '-o', out] + join_files) > 1:
raise Exception('Splice failed')
finally:
if not dirty:
for f in filter(os.path.isfile, files):
os.remove(f)
if __name__ == '__main__':
multisplice()
| 37.929825
| 111
| 0.552729
|
a6dfb13d6cba9db9753585429e1eac019c01c390
| 1,785
|
py
|
Python
|
Experimental setup/pressure_flow_regression.py
|
leonpliner/PhD
|
06319fbcfa15c586b7315a900f0e199d47974f7e
|
[
"Apache-2.0"
] | null | null | null |
Experimental setup/pressure_flow_regression.py
|
leonpliner/PhD
|
06319fbcfa15c586b7315a900f0e199d47974f7e
|
[
"Apache-2.0"
] | null | null | null |
Experimental setup/pressure_flow_regression.py
|
leonpliner/PhD
|
06319fbcfa15c586b7315a900f0e199d47974f7e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Fluigent Pressure - Flow experiment data processing.
The aim of this experiment was to determine the pressure-flow relationship.
This will be done by fitting a polynomial regression model to the obtained
dataset.
Since there was an issue with the liquid refill which has contaminated the
original dataset with non-representative flow measurements, a threshold will be
applied at the value P=224 mbar.
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
threshold=11200
dataset = pd.read_csv('pressure_flow_23_Mar_2021.csv')
P= dataset.P[:threshold]
Q= dataset.Q[:threshold]
P=P[:,np.newaxis]
Q=Q[:,np.newaxis]
#%%
plt.figure()
plt.scatter(Q,P,s=0.1)
plt.xlabel('Measured flow Q, ul/min')
plt.ylabel('Measured pressure P, mbar')
#%%
from sklearn.model_selection import train_test_split
Q_train, Q_test, P_train, P_test = train_test_split(Q,P,test_size=0.2,random_state=0)
#%%
from sklearn.preprocessing import PolynomialFeatures
poly = PolynomialFeatures(degree=2)
x_poly= poly.fit_transform(Q_train)
#%%
from sklearn.linear_model import LinearRegression
reg=LinearRegression()
reg.fit(x_poly,P_train)
y_pred=reg.predict(x_poly)
#%%
print ('The model equation: \nP = ',str(reg.intercept_[0]),' + ',str(reg.coef_[0,1])+'*Q + ',str(reg.coef_[0,2])+'*Q^2')
#%%
import operator
sort_axis = operator.itemgetter(0)
sorted_zip = sorted(zip(Q_train,y_pred), key=sort_axis)
Q_train, y_pred = zip(*sorted_zip)
#%%
plt.figure()
plt.scatter(Q, P, color = 'blue', s=0.1, label='Measured values')
plt.plot(Q_train, y_pred, color = 'red', linewidth=2, markersize=2, label='Linear polynomial model')
plt.xlabel('Flow Q, ul/min')
plt.ylabel('Pressure P, mbar')
plt.legend(loc='lower right')
plt.show()
| 23.486842
| 120
| 0.738936
|
ff244276960fe3cc640c0a9401ed49b849dff17a
| 797
|
py
|
Python
|
config.py
|
ProjoyRoy/flask-microblog
|
99ec2b005e35406541ad4ece2ed9bf378544acd6
|
[
"MIT"
] | null | null | null |
config.py
|
ProjoyRoy/flask-microblog
|
99ec2b005e35406541ad4ece2ed9bf378544acd6
|
[
"MIT"
] | null | null | null |
config.py
|
ProjoyRoy/flask-microblog
|
99ec2b005e35406541ad4ece2ed9bf378544acd6
|
[
"MIT"
] | null | null | null |
import os
import secrets
from datetime import timedelta
WTF_CSRF_ENABLED = True
SECRET_KEY = secrets.SECRET_KEY
REMEMBER_COOKIE_DURATION = timedelta(days=365)
basedir = os.path.abspath(os.path.dirname(__file__))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
OAUTH_CREDENTIALS = secrets.OAUTH_CREDENTIALS
# mail server settings
MAIL_SERVER = 'smtp.googlemail.com'
MAIL_PORT = 465
MAIL_USE_TLS = False
MAIL_USE_SSL = True
MAIL_USERNAME = secrets.MAIL_USERNAME
MAIL_PASSWORD = secrets.MAIL_PASSWORD
# administrator list
ADMINS = secrets.ADMINS
# pagination
POSTS_PER_PAGE_INDEX = 3
POSTS_PER_PAGE_PROFILE = 5
# WHOOSH database
WHOOSH_BASE = os.path.join(basedir, 'search.db')
MAX_SEARCH_RESULTS = 50
| 23.441176
| 72
| 0.800502
|
d99e29e26c57c805122ab136000f4fca8c112543
| 6,814
|
py
|
Python
|
kapitan/inputs/jinja2_filters.py
|
anirban1c/kapitan
|
cea275f4e2c2b62fe16d6a919f71e8c6cca83af4
|
[
"Apache-2.0"
] | null | null | null |
kapitan/inputs/jinja2_filters.py
|
anirban1c/kapitan
|
cea275f4e2c2b62fe16d6a919f71e8c6cca83af4
|
[
"Apache-2.0"
] | 1
|
2019-03-29T10:38:51.000Z
|
2019-03-29T10:38:51.000Z
|
kapitan/inputs/jinja2_filters.py
|
anirban1c/kapitan
|
cea275f4e2c2b62fe16d6a919f71e8c6cca83af4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
#
# Copyright 2019 The Kapitan Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
import base64
import glob
import os
import datetime
import time
import re
import types
import logging
from six import string_types
from random import Random, shuffle
from importlib import util
from kapitan.errors import CompileError
from kapitan import utils, cached
logger = logging.getLogger(__name__)
# default path from where user defined custom filters are read
DEFAULT_JINJA2_FILTERS_PATH = os.path.join('lib', 'jinja2_filters.py')
def load_jinja2_filters(env):
"""Load Jinja2 custom filters into env"""
env.filters['sha256'] = utils.sha256_string
env.filters['b64encode'] = base64_encode
env.filters['b64decode'] = base64_decode
env.filters['yaml'] = to_yaml
env.filters['fileglob'] = fileglob
env.filters['bool'] = to_bool
env.filters['to_datetime'] = to_datetime
env.filters['strftime'] = strftime
env.filters['regex_replace'] = regex_replace
env.filters['regex_escape'] = regex_escape
env.filters['regex_search'] = regex_search
env.filters['regex_findall'] = regex_findall
env.filters['reveal_maybe'] = reveal_maybe
env.filters['ternary'] = ternary
env.filters['shuffle'] = randomize_list
def load_module_from_path(env, path):
"""
Loads a python module from provided path and adds it to jinja2 environment
filter name is same as that of function
"""
try:
module_name = os.path.basename(path).split('.')[0]
custom_filter_spec = util.spec_from_file_location(module_name, path)
custom_filter_module = util.module_from_spec(custom_filter_spec)
custom_filter_spec.loader.exec_module(custom_filter_module)
for function in dir(custom_filter_module):
if isinstance(getattr(custom_filter_module, function),
types.FunctionType):
logger.debug("custom filter loaded from {}".format(path))
env.filters[function] = getattr(custom_filter_module, function)
except Exception as e:
logger.debug("failed to find custom filter from path {}".format(path))
raise IOError("jinja2 failed to render, could not load filter at {}: {}".format(path, e))
def load_jinja2_filters_from_file(env, jinja2_filters):
"""
if filter points to default file and in case it doesn't exist then proceed silently, no error
else try to load module (which will throw error in case of non existence of file)
"""
jinja2_filters = os.path.normpath(jinja2_filters)
if jinja2_filters == DEFAULT_JINJA2_FILTERS_PATH:
if not os.path.isfile(jinja2_filters):
return
load_module_from_path(env, jinja2_filters)
# Custom filters
def reveal_maybe(ref_tag):
"Will reveal ref_tag if valid and --reveal flag is used"
if cached.args['compile'].reveal:
return cached.revealer_obj.reveal_raw(ref_tag)
else:
return ref_tag
def base64_encode(string):
return base64.b64encode(string.encode("UTF-8")).decode("UTF-8")
def base64_decode(string):
return base64.b64decode(string).decode("UTF-8")
def to_yaml(obj):
return yaml.safe_dump(obj, default_flow_style=False)
# Following filters are from https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/filter/core.py
def fileglob(pathname):
"""return list of matched regular files for glob"""
return [g for g in glob.glob(pathname) if os.path.isfile(g)]
def to_bool(a):
"""return a bool for the arg"""
if a is None or isinstance(a, bool):
return a
if isinstance(a, string_types):
a = a.lower()
if a in ('yes', 'on', '1', 'true', 1):
return True
return False
def to_datetime(string, format="%Y-%m-%d %H:%M:%S"):
return datetime.datetime.strptime(string, format)
def strftime(string_format, second=None):
"""return current date string for format. See https://docs.python.org/3/library/time.html#time.strftime for format"""
if second is not None:
try:
second = int(second)
except Exception:
raise CompileError('Invalid value for epoch value ({})'.format(second))
return time.strftime(string_format, time.localtime(second))
def regex_replace(value='', pattern='', replacement='', ignorecase=False):
"""Perform a `re.sub` returning a string"""
if ignorecase:
flags = re.I
else:
flags = 0
_re = re.compile(pattern, flags=flags)
return _re.sub(replacement, value)
def regex_escape(string):
"""Escape all regular expressions special characters from STRING."""
return re.escape(string)
def regex_search(value, regex, *args, **kwargs):
"""Perform re.search and return the list of matches or a backref"""
groups = list()
for arg in args:
if arg.startswith('\\g'):
match = re.match(r'\\g<(\S+)>', arg).group(1)
groups.append(match)
elif arg.startswith('\\'):
match = int(re.match(r'\\(\d+)', arg).group(1))
groups.append(match)
else:
raise CompileError('Unknown argument')
flags = 0
if kwargs.get('ignorecase'):
flags |= re.I
if kwargs.get('multiline'):
flags |= re.M
match = re.search(regex, value, flags)
if match:
if not groups:
return match.group()
else:
items = list()
for item in groups:
items.append(match.group(item))
return items
def regex_findall(value, regex, multiline=False, ignorecase=False):
"""Perform re.findall and return the list of matches"""
flags = 0
if ignorecase:
flags |= re.I
if multiline:
flags |= re.M
return re.findall(regex, value, flags)
def ternary(value, true_val, false_val, none_val=None):
"""value ? true_val : false_val"""
if value is None and none_val is not None:
return none_val
elif bool(value):
return true_val
else:
return false_val
def randomize_list(mylist, seed=None):
try:
mylist = list(mylist)
if seed:
r = Random(seed)
r.shuffle(mylist)
else:
shuffle(mylist)
except Exception:
pass
return mylist
| 31.114155
| 121
| 0.668036
|
99e8e20cab58e09f84db36050d6c43d25e8128ed
| 6,833
|
py
|
Python
|
python_modules/libraries/dagster-k8s/dagster_k8s_tests/test_job_spec.py
|
JPeer264/dagster-fork
|
32cc87a36134be7c442fa85d6867eb1d3301aea0
|
[
"Apache-2.0"
] | 1
|
2020-09-19T16:35:59.000Z
|
2020-09-19T16:35:59.000Z
|
python_modules/libraries/dagster-k8s/dagster_k8s_tests/test_job_spec.py
|
JPeer264/dagster-fork
|
32cc87a36134be7c442fa85d6867eb1d3301aea0
|
[
"Apache-2.0"
] | null | null | null |
python_modules/libraries/dagster-k8s/dagster_k8s_tests/test_job_spec.py
|
JPeer264/dagster-fork
|
32cc87a36134be7c442fa85d6867eb1d3301aea0
|
[
"Apache-2.0"
] | null | null | null |
import os
import yaml
from dagster_k8s import construct_dagster_graphql_k8s_job
from dagster_k8s.job import K8S_RESOURCE_REQUIREMENTS_KEY, get_k8s_resource_requirements
from dagster_test.test_project import (
get_test_project_external_pipeline,
test_project_docker_image,
test_project_environments_path,
)
from dagster import __version__ as dagster_version
from dagster import seven
from dagster.core.definitions.utils import validate_tags
from dagster.core.storage.pipeline_run import PipelineRun
from dagster.core.test_utils import create_run_for_test
from dagster.utils import load_yaml_from_path
from .utils import image_pull_policy, remove_none_recursively, wait_for_job_and_get_logs
EXPECTED_JOB_SPEC = '''
api_version: batch/v1
kind: Job
metadata:
labels:
app.kubernetes.io/component: runmaster
app.kubernetes.io/instance: dagster
app.kubernetes.io/name: dagster
app.kubernetes.io/part-of: dagster
app.kubernetes.io/version: {dagster_version}
name: dagster-run-{run_id}
spec:
backoff_limit: 4
template:
metadata:
labels:
app.kubernetes.io/component: runmaster
app.kubernetes.io/instance: dagster
app.kubernetes.io/name: dagster
app.kubernetes.io/part-of: dagster
app.kubernetes.io/version: {dagster_version}
name: dagster-run-{run_id}
spec:
containers:
- args:
- -p
- executeRunInProcess
- -v
- '{{"runId": "{run_id}"}}'
command:
- dagster-graphql
env:
- name: DAGSTER_HOME
value: /opt/dagster/dagster_home
- name: DAGSTER_PG_PASSWORD
value_from:
secret_key_ref:
key: postgresql-password
name: dagster-postgresql-secret
env_from:
- config_map_ref:
name: dagster-pipeline-env
- config_map_ref:
name: test-env-configmap
- secret_ref:
name: test-env-secret
image: {job_image}
image_pull_policy: {image_pull_policy}
name: dagster-run-{run_id}{resources}
volume_mounts:
- mount_path: /opt/dagster/dagster_home/dagster.yaml
name: dagster-instance
sub_path: dagster.yaml
image_pull_secrets:
- name: element-dev-key
restart_policy: Never
service_account_name: dagit-admin
volumes:
- config_map:
name: dagster-instance
name: dagster-instance
ttl_seconds_after_finished: 86400
'''
def test_valid_job_format(run_launcher):
docker_image = test_project_docker_image()
environment_dict = load_yaml_from_path(
os.path.join(test_project_environments_path(), 'env.yaml')
)
pipeline_name = 'demo_pipeline'
run = PipelineRun(pipeline_name=pipeline_name, environment_dict=environment_dict)
job_name = 'dagster-run-%s' % run.run_id
pod_name = 'dagster-run-%s' % run.run_id
job = construct_dagster_graphql_k8s_job(
run_launcher.job_config,
args=['-p', 'executeRunInProcess', '-v', seven.json.dumps({'runId': run.run_id}),],
job_name=job_name,
pod_name=pod_name,
component='runmaster',
)
assert (
yaml.dump(remove_none_recursively(job.to_dict()), default_flow_style=False).strip()
== EXPECTED_JOB_SPEC.format(
run_id=run.run_id,
job_image=docker_image,
image_pull_policy=image_pull_policy(),
dagster_version=dagster_version,
resources='',
).strip()
)
def test_valid_job_format_with_resources(run_launcher):
docker_image = test_project_docker_image()
environment_dict = load_yaml_from_path(
os.path.join(test_project_environments_path(), 'env.yaml')
)
pipeline_name = 'demo_pipeline'
run = PipelineRun(pipeline_name=pipeline_name, environment_dict=environment_dict)
tags = validate_tags(
{
K8S_RESOURCE_REQUIREMENTS_KEY: (
{
'requests': {'cpu': '250m', 'memory': '64Mi'},
'limits': {'cpu': '500m', 'memory': '2560Mi'},
}
)
}
)
resources = get_k8s_resource_requirements(tags)
job_name = 'dagster-run-%s' % run.run_id
pod_name = 'dagster-run-%s' % run.run_id
job = construct_dagster_graphql_k8s_job(
run_launcher.job_config,
args=['-p', 'executeRunInProcess', '-v', seven.json.dumps({'runId': run.run_id}),],
job_name=job_name,
resources=resources,
pod_name=pod_name,
component='runmaster',
)
assert (
yaml.dump(remove_none_recursively(job.to_dict()), default_flow_style=False).strip()
== EXPECTED_JOB_SPEC.format(
run_id=run.run_id,
job_image=docker_image,
image_pull_policy=image_pull_policy(),
dagster_version=dagster_version,
resources='''
resources:
limits:
cpu: 500m
memory: 2560Mi
requests:
cpu: 250m
memory: 64Mi''',
).strip()
)
def test_k8s_run_launcher(dagster_instance, helm_namespace):
environment_dict = load_yaml_from_path(
os.path.join(test_project_environments_path(), 'env.yaml')
)
pipeline_name = 'demo_pipeline'
run = create_run_for_test(
dagster_instance,
pipeline_name=pipeline_name,
environment_dict=environment_dict,
mode='default',
)
dagster_instance.launch_run(run.run_id, get_test_project_external_pipeline(pipeline_name))
result = wait_for_job_and_get_logs(
job_name='dagster-run-%s' % run.run_id, namespace=helm_namespace
)
assert not result.get('errors')
assert result['data']
assert (
result['data']['executeRunInProcess']['__typename'] == 'ExecuteRunInProcessSuccess'
), 'no match, result: {}'.format(result)
def test_failing_k8s_run_launcher(dagster_instance, helm_namespace):
environment_dict = {'blah blah this is wrong': {}}
pipeline_name = 'demo_pipeline'
run = create_run_for_test(
dagster_instance, pipeline_name=pipeline_name, environment_dict=environment_dict
)
dagster_instance.launch_run(run.run_id, get_test_project_external_pipeline(pipeline_name))
result = wait_for_job_and_get_logs(
job_name='dagster-run-%s' % run.run_id, namespace=helm_namespace
)
assert not result.get('errors')
assert result['data']
assert result['data']['executeRunInProcess']['__typename'] == 'PipelineConfigValidationInvalid'
assert len(result['data']['executeRunInProcess']['errors']) == 2
assert set(error['reason'] for error in result['data']['executeRunInProcess']['errors']) == {
'FIELD_NOT_DEFINED',
'MISSING_REQUIRED_FIELD',
}
| 32.383886
| 99
| 0.662813
|
f73c80973a7d2cfbcb7f86532731fb3427a0ed23
| 146,585
|
py
|
Python
|
ion/services/sa/observatory/observatory_management_service.py
|
ooici/coi-services
|
43246f46a82e597345507afd7dfc7373cb346afa
|
[
"BSD-2-Clause"
] | 3
|
2016-09-20T09:50:06.000Z
|
2018-08-10T01:41:38.000Z
|
ion/services/sa/observatory/observatory_management_service.py
|
ooici/coi-services
|
43246f46a82e597345507afd7dfc7373cb346afa
|
[
"BSD-2-Clause"
] | null | null | null |
ion/services/sa/observatory/observatory_management_service.py
|
ooici/coi-services
|
43246f46a82e597345507afd7dfc7373cb346afa
|
[
"BSD-2-Clause"
] | 2
|
2016-03-16T22:25:49.000Z
|
2016-11-26T14:54:21.000Z
|
#!/usr/bin/env python
"""Service managing marine facility sites and deployments"""
import string
import time
import logging
from collections import defaultdict
from pyon.core.governance import ORG_MANAGER_ROLE, DATA_OPERATOR, OBSERVATORY_OPERATOR, INSTRUMENT_OPERATOR, GovernanceHeaderValues, has_org_role
from ooi.logging import log
from pyon.core.exception import NotFound, BadRequest, Inconsistent
from pyon.public import CFG, IonObject, RT, PRED, LCS, LCE, OT
from pyon.ion.resource import ExtendedResourceContainer
from ion.services.dm.utility.test.parameter_helper import ParameterHelper
from ion.services.dm.utility.granule import RecordDictionaryTool
from ion.services.sa.instrument.status_builder import AgentStatusBuilder
from ion.services.sa.observatory.deployment_activator import DeploymentPlanner
from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient
from ion.services.sa.observatory.observatory_util import ObservatoryUtil
from ion.services.sa.observatory.asset_tracking import AssetTracking
from ion.services.sa.observatory.deployment_util import DeploymentUtil
from ion.services.sa.product.data_product_management_service import DataProductManagementService
from ion.processes.event.device_state import DeviceStateManager
from ion.util.geo_utils import GeoUtils
from ion.util.related_resources_crawler import RelatedResourcesCrawler
from ion.util.datastore.resources import ResourceRegistryUtil
from interface.services.sa.iobservatory_management_service import BaseObservatoryManagementService
from interface.objects import OrgTypeEnum, ComputedValueAvailability, ComputedIntValue, ComputedListValue, ComputedDictValue, AggregateStatusType, DeviceStatusType, TemporalBounds, DatasetWindow
from interface.objects import MarineFacilityOrgExtension, NegotiationStatusEnum, NegotiationTypeEnum, ProposalOriginatorEnum, GeospatialBounds
from datetime import datetime
import calendar
INSTRUMENT_OPERATOR_ROLE = 'INSTRUMENT_OPERATOR'
OBSERVATORY_OPERATOR_ROLE = 'OBSERVATORY_OPERATOR'
DATA_OPERATOR_ROLE = 'DATA_OPERATOR'
STATUS_UNKNOWN = {1:1, 2:1, 3:1, 4:1}
class ObservatoryManagementService(BaseObservatoryManagementService):
def on_init(self):
self.override_clients(self.clients)
self.agent_status_builder = AgentStatusBuilder(process=self)
self.HIERARCHY_DEPTH = {RT.InstrumentSite: 3,
RT.PlatformSite: 2,
RT.Subsite: 1,
RT.Observatory: 0,
}
self.HIERARCHY_LOOKUP = [RT.Observatory,
RT.Subsite,
RT.PlatformSite,
RT.InstrumentSite]
#todo: add lcs methods for these??
# # set up all of the policy interceptions
# if self.container and self.container.governance_controller:
# reg_precondition = self.container.governance_controller.register_process_operation_precondition
# reg_precondition(self, 'execute_observatory_lifecycle',
# self.RR2.policy_fn_lcs_precondition("observatory_id"))
# reg_precondition(self, 'execute_subsite_lifecycle',
# self.RR2.policy_fn_lcs_precondition("subsite_id"))
# reg_precondition(self, 'execute_platform_site_lifecycle',
# self.RR2.policy_fn_lcs_precondition("platform_site_id"))
# reg_precondition(self, 'execute_instrument_site_lifecycle',
# self.RR2.policy_fn_lcs_precondition("instrument_site_id"))
def override_clients(self, new_clients):
"""
Replaces the service clients with a new set of them... and makes sure they go to the right places
"""
self.RR2 = EnhancedResourceRegistryClient(new_clients.resource_registry)
#shortcut names for the import sub-services
if hasattr(new_clients, "resource_registry"):
self.RR = new_clients.resource_registry
if hasattr(new_clients, "instrument_management"):
self.IMS = new_clients.instrument_management
if hasattr(new_clients, "data_process_management"):
self.PRMS = new_clients.data_process_management
def _calc_geospatial_point_center(self, site):
siteTypes = [RT.Site, RT.Subsite, RT.Observatory, RT.PlatformSite, RT.InstrumentSite]
if site and site.type_ in siteTypes:
# if the geospatial_bounds is set then calculate the geospatial_point_center
for constraint in site.constraint_list:
if constraint.type_ == OT.GeospatialBounds:
site.geospatial_point_center = GeoUtils.calc_geospatial_point_center(constraint)
##########################################################################
#
# CRUD OPS
#
##########################################################################
def create_marine_facility(self, org=None):
"""Create an Org (domain of authority) that realizes a marine facility. This Org will have
set up roles for a marine facility. Shared resources, such as a device can only be
registered in one marine facility Org, and additionally in many virtual observatory Orgs. The
marine facility operators will have more extensive permissions and will supercede virtual
observatory commands
@param org Org
@retval org_id str
@throws BadRequest if object does not have _id or _rev attribute
@throws NotFound object with specified id does not exist
"""
log.debug("ObservatoryManagementService.create_marine_facility(): %s", org)
# create the org
org.org_type = OrgTypeEnum.MARINE_FACILITY
org_id = self.clients.org_management.create_org(org)
#Instantiate initial set of User Roles for this marine facility
instrument_operator_role = IonObject(RT.UserRole,
governance_name=INSTRUMENT_OPERATOR_ROLE,
name='Facility Operator', #previously Instrument Operator
description='Operate and post events related to Facility Platforms and Instruments')
self.clients.org_management.add_user_role(org_id, instrument_operator_role)
observatory_operator_role = IonObject(RT.UserRole,
governance_name=OBSERVATORY_OPERATOR_ROLE,
name='Facility Manager', # previously Observatory Operator
description='Change Facility configuration, post Site-related events')
self.clients.org_management.add_user_role(org_id, observatory_operator_role)
data_operator_role = IonObject(RT.UserRole,
governance_name=DATA_OPERATOR_ROLE,
name='Facility Data Operator', # previously Data Operator
description='Manipulate and post events related to Facility Data products')
self.clients.org_management.add_user_role(org_id, data_operator_role)
return org_id
def create_virtual_observatory(self, org=None):
"""Create an Org (domain of authority) that realizes a virtual observatory. This Org will have
set up roles for a virtual observatory. Shared resources, such as a device can only be
registered in one marine facility Org, and additionally in many virtual observatory Orgs. The
marine facility operators will have more extensive permissions and will supercede virtual
observatory commands
@param org Org
@retval org_id str
@throws BadRequest if object does not have _id or _rev attribute
@throws NotFound object with specified id does not exist
"""
log.debug("ObservatoryManagementService.create_virtual_observatory(): %s", org)
# create the org
org.org_type = OrgTypeEnum.VIRTUAL_OBSERVATORY
org_id = self.clients.org_management.create_org(org)
return org_id
def create_observatory(self, observatory=None, org_id=""):
"""Create a Observatory resource. An observatory is coupled
with one Org. The Org is created and associated as part of this call.
@param observatory Observatory
@retval observatory_id str
@throws BadRequest if object does not have _id or _rev attribute
@throws NotFound object with specified id does not exist
"""
# if the geospatial_bounds is set then calculate the geospatial_point_center
self._calc_geospatial_point_center(observatory)
# create the marine facility
observatory_id = self.RR2.create(observatory, RT.Observatory)
if org_id:
self.assign_resource_to_observatory_org(observatory_id, org_id)
return observatory_id
def read_observatory(self, observatory_id=''):
"""Read a Observatory resource
@param observatory_id str
@retval observatory Observatory
@throws NotFound object with specified id does not exist
"""
return self.RR2.read(observatory_id, RT.Observatory)
def update_observatory(self, observatory=None):
"""Update a Observatory resource
@param observatory Observatory
@throws NotFound object with specified id does not exist
"""
# if the geospatial_bounds is set then calculate the geospatial_point_center
self._calc_geospatial_point_center(observatory)
return self.RR2.update(observatory, RT.Observatory)
def delete_observatory(self, observatory_id=''):
"""Delete a Observatory resource
@param observatory_id str
@throws NotFound object with specified id does not exist
"""
return self.RR2.lcs_delete(observatory_id, RT.Observatory)
def force_delete_observatory(self, observatory_id=''):
return self.RR2.force_delete(observatory_id, RT.Observatory)
def create_subsite(self, subsite=None, parent_id=''):
"""Create a Subsite resource. A subsite is a frame of reference within an observatory. Its parent is
either the observatory or another subsite.
@param subsite Subsite
@param parent_id str
@retval subsite_id str
@throws BadRequest if object does not have _id or _rev attribute
@throws NotFound object with specified id does not exist
"""
# if the geospatial_bounds is set then calculate the geospatial_point_center
self._calc_geospatial_point_center(subsite)
subsite_id = self.RR2.create(subsite, RT.Subsite)
if parent_id:
self.assign_site_to_site(subsite_id, parent_id)
return subsite_id
def read_subsite(self, subsite_id=''):
"""Read a Subsite resource
@param subsite_id str
@retval subsite Subsite
@throws NotFound object with specified id does not exist
"""
return self.RR2.read(subsite_id, RT.Subsite)
def update_subsite(self, subsite=None):
"""Update a Subsite resource
@param subsite Subsite
@throws NotFound object with specified id does not exist
"""
# if the geospatial_bounds is set then calculate the geospatial_point_center
self._calc_geospatial_point_center(subsite)
return self.RR2.update(subsite, RT.Subsite)
def delete_subsite(self, subsite_id=''):
"""Delete a subsite resource, removes assocations to parents
@param subsite_id str
@throws NotFound object with specified id does not exist
"""
self.RR2.lcs_delete(subsite_id, RT.Subsite)
def force_delete_subsite(self, subsite_id=''):
self.RR2.force_delete(subsite_id, RT.Subsite)
def create_platform_site(self, platform_site=None, parent_id=''):
"""Create a PlatformSite resource. A platform_site is a frame of reference within an observatory. Its parent is
either the observatory or another platform_site.
@param platform_site PlatformSite
@param parent_id str
@retval platform_site_id str
@throws BadRequest if object does not have _id or _rev attribute
@throws NotFound object with specified id does not exist
"""
# if the geospatial_bounds is set then calculate the geospatial_point_center
self._calc_geospatial_point_center(platform_site)
platform_site_id = self.RR2.create(platform_site, RT.PlatformSite)
if parent_id:
self.RR2.assign_site_to_one_site_with_has_site(platform_site_id, parent_id)
return platform_site_id
def read_platform_site(self, platform_site_id=''):
"""Read a PlatformSite resource
@param platform_site_id str
@retval platform_site PlatformSite
@throws NotFound object with specified id does not exist
"""
return self.RR2.read(platform_site_id, RT.PlatformSite)
def update_platform_site(self, platform_site=None):
"""Update a PlatformSite resource
@param platform_site PlatformSite
@throws NotFound object with specified id does not exist
"""
# if the geospatial_bounds is set then calculate the geospatial_point_center
self._calc_geospatial_point_center(platform_site)
return self.RR2.update(platform_site, RT.PlatformSite)
def delete_platform_site(self, platform_site_id=''):
"""Delete a PlatformSite resource, removes assocations to parents
@param platform_site_id str
@throws NotFound object with specified id does not exist
"""
self.RR2.lcs_delete(platform_site_id, RT.PlatformSite)
def force_delete_platform_site(self, platform_site_id=''):
self.RR2.force_delete(platform_site_id, RT.PlatformSite)
def create_instrument_site(self, instrument_site=None, parent_id=''):
"""Create a InstrumentSite resource. A instrument_site is a frame of reference within an observatory. Its parent is
either the observatory or another instrument_site.
@param instrument_site InstrumentSite
@param parent_id str
@retval instrument_site_id str
@throws BadRequest if object does not have _id or _rev attribute
@throws NotFound object with specified id does not exist
"""
# if the geospatial_bounds is set then calculate the geospatial_point_center
self._calc_geospatial_point_center(instrument_site)
instrument_site_id = self.RR2.create(instrument_site, RT.InstrumentSite)
if parent_id:
self.RR2.assign_site_to_one_site_with_has_site(instrument_site_id, parent_id)
return instrument_site_id
def read_instrument_site(self, instrument_site_id=''):
"""Read a InstrumentSite resource
@param instrument_site_id str
@retval instrument_site InstrumentSite
@throws NotFound object with specified id does not exist
"""
return self.RR2.read(instrument_site_id, RT.InstrumentSite)
def update_instrument_site(self, instrument_site=None):
"""Update a InstrumentSite resource
@param instrument_site InstrumentSite
@throws NotFound object with specified id does not exist
"""
# if the geospatial_bounds is set then calculate the geospatial_point_center
self._calc_geospatial_point_center(instrument_site)
return self.RR2.update(instrument_site, RT.InstrumentSite)
def delete_instrument_site(self, instrument_site_id=''):
"""Delete a InstrumentSite resource, removes assocations to parents
@param instrument_site_id str
@throws NotFound object with specified id does not exist
"""
self.RR2.lcs_delete(instrument_site_id, RT.InstrumentSite)
def force_delete_instrument_site(self, instrument_site_id=''):
self.RR2.force_delete(instrument_site_id, RT.InstrumentSite)
def create_deployment(self, deployment=None, site_id="", device_id=""):
"""
Create a Deployment resource. Represents a (possibly open-ended) time interval
grouping one or more resources within a given context, such as an instrument
deployment on a platform at an observatory site.
"""
deployment_id = self.RR2.create(deployment, RT.Deployment)
#Verify that site and device exist, add links if they do
if site_id:
site_obj = self.RR2.read(site_id)
if site_obj:
self.assign_site_to_deployment(site_id=site_id, deployment_id=deployment_id)
if device_id:
device_obj = self.RR2.read(device_id)
if device_obj:
self.assign_device_to_deployment(device_id=device_id, deployment_id=deployment_id)
return deployment_id
def update_deployment(self, deployment=None):
# Overwrite Deployment object
self.RR2.update(deployment, RT.Deployment)
def read_deployment(self, deployment_id=''):
deployment_obj = self.RR2.read(deployment_id, RT.Deployment)
return deployment_obj
def delete_deployment(self, deployment_id=''):
"""
Delete a Deployment resource
"""
self.RR2.lcs_delete(deployment_id, RT.Deployment)
def force_delete_deployment(self, deployment_id=''):
self.RR2.force_delete(deployment_id, RT.Deployment)
############################
#
# ASSOCIATIONS
#
############################
def assign_site_to_site(self, child_site_id='', parent_site_id=''):
"""Connects a child site (any subtype) to a parent site (any subtype)
@param child_site_id str
@param parent_site_id str
@throws NotFound object with specified id does not exist
"""
self.RR2.assign_site_to_site_with_has_site(child_site_id, parent_site_id)
def unassign_site_from_site(self, child_site_id='', parent_site_id=''):
"""Disconnects a child site (any subtype) from a parent site (any subtype)
@param child_site_id str
@param parent_site_id str
@throws NotFound object with specified id does not exist
"""
self.RR2.unassign_site_from_site_with_has_site(child_site_id, parent_site_id)
def assign_device_to_site(self, device_id='', site_id=''):
"""Connects a device (any type) to a site (any subtype)
@param device_id str
@param site_id str
@throws NotFound object with specified id does not exist
"""
self.RR2.assign_device_to_site_with_has_device(device_id, site_id)
def unassign_device_from_site(self, device_id='', site_id=''):
"""Disconnects a device (any type) from a site (any subtype)
@param device_id str
@param site_id str
@throws NotFound object with specified id does not exist
"""
self.RR2.unassign_device_from_site_with_has_device(device_id, site_id)
def _update_device_add_geo_add_temporal(self, device_id='', site_id='', deployment_obj=''):
"""Assigns to device:
temporal extent from deployment
geo location from site
@param device_id str
@param site_id str
@param deployment_obj Deployment
@throws NotFound object with specified id does not exist
"""
device_obj = self.RR.read(device_id)
site_obj = self.RR.read(site_id)
for constraint in site_obj.constraint_list:
if constraint.type_ == OT.GeospatialBounds:
device_obj.geospatial_bounds = GeoUtils.calc_geo_bounds_for_geo_bounds_list(
[device_obj.geospatial_bounds, constraint])
for constraint in deployment_obj.constraint_list:
if constraint.type_ == OT.TemporalBounds:
device_obj.temporal_bounds = GeoUtils.calc_temp_bounds_for_temp_bounds_list(
[device_obj.temporal_bounds, constraint])
self.RR.update(device_obj)
def _update_device_remove_geo_update_temporal(self, device_id='', temporal_constraint=None):
"""Remove the geo location and update temporal extent (end) from the device
@param device_id str
@param site_id str
@throws NotFound object with specified id does not exist
"""
device_obj = self.RR.read(device_id)
bounds = GeospatialBounds(geospatial_latitude_limit_north=float(0),
geospatial_latitude_limit_south=float(0),
geospatial_longitude_limit_west=float(0),
geospatial_longitude_limit_east=float(0),
geospatial_vertical_min=float(0),
geospatial_vertical_max=float(0))
device_obj.geospatial_bounds = bounds
if temporal_constraint:
device_obj.temporal_bounds.end_datetime = GeoUtils.calc_temp_bounds_for_temp_bounds_list(
[device_obj.temporal_bounds, temporal_constraint])
self.RR.update(device_obj)
def _get_bounds_from_object(self, obj=''):
temporal = None
geographic = None
for constraint in obj.constraint_list:
if constraint.type_ == OT.TemporalBounds:
temporal = constraint
if constraint.type_ == OT.GeospatialBounds:
geographic = constraint
return temporal, geographic
def assign_device_to_network_parent(self, child_device_id='', parent_device_id=''):
"""Connects a device (any type) to parent in the RSN network
@param child_device_id str
@param parent_device_id str
@throws NotFound object with specified id does not exist
"""
self.RR2.assign_device_to_one_device_with_has_network_parent(parent_device_id, child_device_id)
def unassign_device_from_network_parent(self, child_device_id='', parent_device_id=''):
"""Disconnects a child device (any type) from parent in the RSN network
@param child_device_id str
@param parent_device_id str
@throws NotFound object with specified id does not exist
"""
self.RR2.unassign_device_from_device_with_has_network_parent(parent_device_id, child_device_id)
def assign_instrument_model_to_instrument_site(self, instrument_model_id='', instrument_site_id=''):
self.RR2.assign_instrument_model_to_instrument_site_with_has_model(instrument_model_id, instrument_site_id)
def unassign_instrument_model_from_instrument_site(self, instrument_model_id='', instrument_site_id=''):
self.RR2.unassign_instrument_model_from_instrument_site_with_has_model(instrument_model_id, instrument_site_id)
def assign_platform_model_to_platform_site(self, platform_model_id='', platform_site_id=''):
self.RR2.assign_platform_model_to_platform_site_with_has_model(platform_model_id, platform_site_id)
def unassign_platform_model_from_platform_site(self, platform_model_id='', platform_site_id=''):
self.RR2.unassign_platform_model_from_platform_site_with_has_model(platform_model_id, platform_site_id)
def assign_resource_to_observatory_org(self, resource_id='', org_id=''):
if not org_id:
raise BadRequest("Org id not given")
if not resource_id:
raise BadRequest("Resource id not given")
#log.trace("assign_resource_to_observatory_org: org_id=%s, resource_id=%s ", org_id, resource_id)
self.clients.org_management.share_resource(org_id, resource_id)
def unassign_resource_from_observatory_org(self, resource_id='', org_id=''):
if not org_id:
raise BadRequest("Org id not given")
if not resource_id:
raise BadRequest("Resource id not given")
self.clients.org_management.unshare_resource(org_id, resource_id)
##########################################################################
#
# DEPLOYMENTS
#
##########################################################################
def _get_deployment_assocs(self, deployment_id):
res_ids, assocs = self.RR.find_subjects(predicate=PRED.hasDeployment, object=deployment_id, id_only=True)
assoc_by_type = dict(Site=[], Device=[])
for a in assocs:
if a.st not in assoc_by_type:
assoc_by_type[a.st] = []
assoc_by_type[a.st].append(a)
if a.st.endswith("Device"):
assoc_by_type["Device"].append(a)
if a.st.endswith("Site"):
assoc_by_type["Site"].append(a)
return assoc_by_type
def assign_device_to_deployment(self, device_id='', deployment_id=''):
device = self.RR.read(device_id)
dep_assocs = self._get_deployment_assocs(deployment_id)
if dep_assocs["Device"]:
raise BadRequest("Deployment %s - Cannot have more than 1 Device" % deployment_id)
if device.type_ == RT.InstrumentDevice:
self.RR2.assign_deployment_to_instrument_device_with_has_deployment(deployment_id, device_id)
if dep_assocs["Site"] and dep_assocs["Site"][0].st != RT.InstrumentSite:
raise BadRequest("Deployment %s - Device %s (%s) incompatible with associated Site %s (%s)" % (
deployment_id, device_id, device.type_, dep_assocs["Site"][0].s, dep_assocs["Site"][0].st))
elif device.type_ == RT.PlatformDevice:
self.RR2.assign_deployment_to_platform_device_with_has_deployment(deployment_id, device_id)
if dep_assocs["Site"] and dep_assocs["Site"][0].st != RT.PlatformSite:
raise BadRequest("Deployment %s - Device %s (%s) incompatible with associated Site %s (%s)" % (
deployment_id, device_id, device.type_, dep_assocs["Site"][0].s, dep_assocs["Site"][0].st))
else:
raise BadRequest("Illegal resource type to assign to Deployment: %s" % device.type_)
def unassign_device_from_deployment(self, device_id='', deployment_id=''):
device = self.RR.read(device_id)
if device.type_ == RT.InstrumentDevice:
self.RR2.unassign_deployment_from_instrument_device_with_has_deployment(deployment_id, device_id)
elif device.type_ == RT.PlatformDevice:
self.RR2.unassign_deployment_from_platform_device_with_has_deployment(deployment_id, device_id)
else:
raise BadRequest("Illegal resource type to assign to Deployment: %s" % device.type_)
def assign_site_to_deployment(self, site_id='', deployment_id=''):
site = self.RR.read(site_id)
dep_assocs = self._get_deployment_assocs(deployment_id)
if dep_assocs["Site"]:
raise BadRequest("Deployment %s - Cannot have more than 1 Site" % deployment_id)
if site.type_ == RT.InstrumentSite:
self.RR2.assign_deployment_to_instrument_site_with_has_deployment(deployment_id, site_id)
if dep_assocs["Device"] and dep_assocs["Device"][0].st != RT.InstrumentDevice:
raise BadRequest("Deployment %s - Site %s (%s) incompatible with associated Device %s (%s)" % (
deployment_id, site_id, site.type_, dep_assocs["Device"][0].s, dep_assocs["Device"][0].st))
elif site.type_ == RT.PlatformSite:
self.RR2.assign_deployment_to_platform_site_with_has_deployment(deployment_id, site_id)
if dep_assocs["Device"] and dep_assocs["Device"][0].st != RT.PlatformDevice:
raise BadRequest("Deployment %s - Site %s (%s) incompatible with associated Device %s (%s)" % (
deployment_id, site_id, site.type_, dep_assocs["Device"][0].s, dep_assocs["Device"][0].st))
else:
raise BadRequest("Illegal resource type to assign to Deployment: %s" % site.type_)
def unassign_site_from_deployment(self, site_id='', deployment_id=''):
site = self.RR.read(site_id)
if site.type_ == RT.InstrumentSite:
self.RR2.unassign_deployment_from_instrument_site_with_has_deployment(deployment_id, site_id)
elif site.type_ == RT.PlatformSite:
self.RR2.unassign_deployment_from_platform_site_with_has_deployment(deployment_id, site_id)
else:
raise BadRequest("Illegal resource type to assign to Deployment: %s" % site.type_)
def activate_deployment(self, deployment_id='', activate_subscriptions=False):
"""
Make the devices on this deployment the primary devices for the sites
"""
dep_util = DeploymentUtil(self.container)
# Verify that the deployment exists
deployment_obj = self.RR2.read(deployment_id)
log.info("Activating deployment %s '%s'", deployment_id, deployment_obj.name)
# Find an existing primary deployment
dep_site_id, dep_dev_id = dep_util.get_deployment_relations(deployment_id)
active_dep = dep_util.get_site_primary_deployment(dep_site_id)
if active_dep and active_dep._id == deployment_id:
raise BadRequest("Deployment %s already active for site %s" % (deployment_id, dep_site_id))
self.deploy_planner = DeploymentPlanner(self.clients)
pairs_to_remove, pairs_to_add = self.deploy_planner.prepare_activation(deployment_obj)
log.debug("activate_deployment pairs_to_add: %s", pairs_to_add)
log.debug("activate_deployment pairs_to_remove: %s", pairs_to_remove)
if not pairs_to_add:
log.warning('No Site and Device pairs were added to activate this deployment')
temp_constraint = dep_util.get_temporal_constraint(deployment_obj)
# process any removals
for site_id, device_id in pairs_to_remove:
log.info("Unassigning hasDevice; device '%s' from site '%s'", device_id, site_id)
self.unassign_device_from_site(device_id, site_id)
log.info("Removing geo and updating temporal attrs for device '%s'", device_id)
self._update_device_remove_geo_update_temporal(device_id, temp_constraint)
# Sever the connection between dev/site and the primary deployment
assocs = self.clients.resource_registry.find_associations(device_id, PRED.hasPrimaryDeployment, deployment_id)
for assoc in assocs:
self.RR.delete_association(assoc)
assocs = self.clients.resource_registry.find_associations(site_id, PRED.hasPrimaryDeployment, deployment_id)
for assoc in assocs:
self.RR.delete_association(assoc)
# process the additions
for site_id, device_id in pairs_to_add:
log.info("Setting primary device '%s' for site '%s'", device_id, site_id)
self.assign_device_to_site(device_id, site_id)
log.info("Adding geo and updating temporal attrs for device '%s'", device_id)
self._update_device_add_geo_add_temporal(device_id, site_id, deployment_obj)
site_obj = self.RR2.read(site_id)
dev_obj = self.RR2.read(device_id)
# Make this deployment Primary for every device and site
self.RR.create_association(subject=device_id, predicate=PRED.hasPrimaryDeployment, object=deployment_id, assoc_type=RT.Deployment)
self.RR.create_association(subject=site_id, predicate=PRED.hasPrimaryDeployment, object=deployment_id, assoc_type=RT.Deployment)
# Add a withinDeployment association from Device to Deployment
# so the entire history of a Device can be found.
self.RR.create_association(subject=device_id, predicate=PRED.withinDeployment, object=deployment_id, assoc_type=RT.Deployment)
sdps, _ = self.RR.find_objects(subject=site_id, predicate=PRED.hasOutputProduct, object_type=RT.DataProduct, id_only=False)
sdps_ids = [s._id for s in sdps] # Get a list of Site Data Product IDs
sdps_streams, _ = self.RR.find_objects_mult(subjects=sdps_ids, predicate=PRED.hasStream, id_only=False)
dpds, _ = self.RR.find_objects(subject=device_id, predicate=PRED.hasOutputProduct, object_type=RT.DataProduct, id_only=False)
dps_ids = [d._id for d in dpds] # Get a list of device data product ids
dps_streams, _ = self.RR.find_objects_mult(subjects=dps_ids, predicate=PRED.hasStream, id_only=False)
# Match SDPs to DDPs to get dataset_id and update the dataset_windows.
if not sdps_ids and log.isEnabledFor(logging.DEBUG):
log.debug("Not updating data_windows on Site '%s'... no SiteDataProducts were found." % site_id)
for sdp in sdps:
if not sdp.ingest_stream_name:
log.warning("Unable to pair site data product %s without an ingest stream name", sdp.name)
continue # Ingest stream name isn't defined
for dpd in dpds:
# breakpoint(locals(), globals())
if sdp.ingest_stream_name == dpd.ingest_stream_name:
# Update the window list in the resource
site_dataset_id = self.RR2.find_object(sdp._id, PRED.hasDataset, id_only=True)
device_dataset_id = self.RR2.find_object(dpd._id, PRED.hasDataset, id_only=True)
bounds = TemporalBounds(start_datetime=temp_constraint.start_datetime, end_datetime=str(calendar.timegm(datetime(2038,1,1).utctimetuple())))
window = DatasetWindow(dataset_id=device_dataset_id, bounds=bounds)
sdp.dataset_windows.append(window)
self.clients.data_product_management.update_data_product(sdp)
# TODO: Once coverages support None for open intervals on complex, we'll change it
# in the man time, 2038 is pretty far out, and the world will end shortly after, so
# it's pretty good for an arbitrary point in the future
start = int(temp_constraint.start_datetime) + 2208988800
end = calendar.timegm(datetime(2038,1,1).utctimetuple()) + 2208988800
self.clients.dataset_management.add_dataset_window_to_complex(device_dataset_id, (start, end), site_dataset_id)
dp_params = self.clients.data_product_management.get_data_product_parameters(dpd._id, id_only=False)
# print [d.name for d in dp_params]
for param in dp_params:
if 'lat' in param.name and param.parameter_type == 'sparse':
# Update sparse lat/lon data with site lat/lon
site_obj = self.RR.read(site_id)
# Search for GeospatialBounds bbox constraint
for constraint in site_obj.constraint_list:
if constraint.type_ == OT.GeospatialBounds:
# Get the midpoint of the site geospatial bounds
mid_point = GeoUtils.calc_geospatial_point_center(constraint)
# Create granule using midpoint
stream_def_id, _ = self.RR.find_objects(subject=dpd, predicate=PRED.hasStreamDefinition, id_only=True)
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id[0])
rdt['time'] = [start]
rdt['lat'] = [mid_point['lat']]
rdt['lon'] = [mid_point['lon']]
ParameterHelper.publish_rdt_to_data_product(dpd, rdt)
if deployment_obj.lcstate != LCS.DEPLOYED:
self.RR.execute_lifecycle_transition(deployment_id, LCE.DEPLOY)
else:
log.warn("Deployment %s was already DEPLOYED when activated", deployment_obj._id)
if active_dep:
log.info("activate_deployment(): Deactivating prior Deployment %s at site %s" % (active_dep._id, dep_site_id))
# Set Deployment end date
olddep_tc = dep_util.get_temporal_constraint(active_dep)
newdep_tc = dep_util.get_temporal_constraint(deployment_obj)
if float(olddep_tc.end_datetime) > float(newdep_tc.start_datetime):
# Set to new deployment start date
dep_util.set_temporal_constraint(active_dep, end_time=newdep_tc.start_datetime)
self.RR.update(active_dep)
# Change LCS
if active_dep.lcstate == LCS.DEPLOYED:
self.RR.execute_lifecycle_transition(active_dep._id, LCE.INTEGRATE)
else:
log.warn("Prior Deployment %s was not in DEPLOYED lcstate", active_dep._id)
def deactivate_deployment(self, deployment_id=''):
"""Remove the primary device designation for the deployed devices at the sites
@param deployment_id str
@throws NotFound object with specified id does not exist
@throws BadRequest if devices can not be undeployed
"""
#Verify that the deployment exists
deployment_obj = self.RR2.read(deployment_id)
dep_util = DeploymentUtil(self.container)
if deployment_obj.lcstate != LCS.DEPLOYED:
log.warn("deactivate_deployment(): Deployment %s is not DEPLOYED" % deployment_id)
# raise BadRequest("This deployment is not active")
# get all associated components
self.deploy_planner = DeploymentPlanner(self.clients)
site_ids, device_ids = self.deploy_planner.get_deployment_sites_devices(deployment_obj)
dep_util.set_temporal_constraint(deployment_obj, end_time=DeploymentUtil.DATE_NOW)
self.RR.update(deployment_obj)
temp_constraint = dep_util.get_temporal_constraint(deployment_obj)
# delete only associations where both site and device have passed the filter
for s in site_ids:
dataset_ids = []
ds, _ = self.RR.find_objects(s, PRED.hasDevice, id_only=True)
for d in ds:
if d in device_ids:
a = self.RR.get_association(s, PRED.hasDevice, d)
self.RR.delete_association(a)
log.info("Removing geo and updating temporal attrs for device '%s'", d)
self._update_device_remove_geo_update_temporal(d, temp_constraint)
try:
self.RR.execute_lifecycle_transition(d, LCE.INTEGRATE)
except BadRequest:
log.warn("Could not set device %s lcstate to INTEGRATED", d)
primary_d = self.RR.find_associations(subject=d, predicate=PRED.hasPrimaryDeployment, object=deployment_id)
if primary_d:
self.RR.delete_association(primary_d[0])
primary_s = self.RR.find_associations(subject=s, predicate=PRED.hasPrimaryDeployment, object=deployment_id)
if primary_s:
self.RR.delete_association(primary_s[0])
# Get Dataset IDs for a Device
dps, _ = self.RR.find_objects(subject=d, predicate=PRED.hasOutputProduct, id_only=True)
dataset_ids, _ = self.RR.find_objects_mult(subjects=dps, predicate=PRED.hasDataset, id_only=True)
dataset_ids = list(set(dataset_ids))
# Get the Deployment time bounds as datetime objects
temporal, geographc = self._get_bounds_from_object(obj=deployment_obj)
# Set the ending of the appropriate dataset_windows. Have to search by dataset_id because we are
# not creating any new resources for the dataset_window logic!
site_dps, _ = self.RR.find_objects(s, PRED.hasOutputProduct, id_only=True)
for dp in site_dps:
site_data_product = self.RR.read(dp)
# This is assuming that data_windows is ALWAYS kept IN ORDER (Ascending).
# There should NEVER be a situation where there are two dataset_window
# attribute missing an 'ending' value. If there is, it wasn't deactivated
# properly.
for window in site_data_product.dataset_windows:
if window.dataset_id in dataset_ids:
# Set up the tuples of start and stops
old_start = int(window.bounds.start_datetime) + 2208988800
old_end = int(window.bounds.end_datetime) + 2208988800
new_start = old_start
new_end = int(temporal.end_datetime) + 2208988800
# Update the data product resource
window.bounds.end_datetime = temporal.end_datetime
site_dataset_id = self.RR2.find_object(site_data_product._id, PRED.hasDataset, id_only=True)
device_dataset_id = window.dataset_id
# Update the dataset
self.clients.dataset_management.update_dataset_window_for_complex(device_dataset_id, (old_start, old_end), (new_start, new_end), site_dataset_id)
break
self.clients.data_product_management.update_data_product(site_data_product)
# This should set the deployment resource to retired.
# Michael needs to fix the RR retire logic so it does not
# retire all associations before we can use it. Currently we switch
# back to INTEGRATE.
#self.RR.execute_lifecycle_transition(deployment_id, LCE.RETIRE)
# mark deployment as not deployed (developed seems appropriate)
if deployment_obj.lcstate == LCS.DEPLOYED:
self.RR.execute_lifecycle_transition(deployment_id, LCE.INTEGRATE)
else:
log.warn("Deployment %s was not in DEPLOYED lcstate", deployment_id)
def prepare_deployment_support(self, deployment_id=''):
extended_resource_handler = ExtendedResourceContainer(self)
resource_data = extended_resource_handler.create_prepare_resource_support(deployment_id, OT.DeploymentPrepareSupport)
#Fill out service request information for creating a instrument agent instance
extended_resource_handler.set_service_requests(resource_data.create_request, 'observatory_management',
'create_deployment', { "deployment": "$(deployment)" })
#Fill out service request information for creating a instrument agent instance
extended_resource_handler.set_service_requests(resource_data.update_request, 'observatory_management',
'update_deployment', { "deployment": "$(deployment)" })
#Fill out service request information for assigning a InstrumentDevice
extended_resource_handler.set_service_requests(resource_data.associations['DeploymentHasInstrumentDevice'].assign_request, 'observatory_management',
'assign_device_to_deployment', {"device_id": "$(instrument_device_id)",
"deployment_id": deployment_id })
#Fill out service request information for assigning a PlatformDevice
extended_resource_handler.set_service_requests(resource_data.associations['DeploymentHasPlatformDevice'].assign_request, 'observatory_management',
'assign_device_to_deployment', {"device_id": "$(platform_device_id)",
"deployment_id": deployment_id })
#Fill out service request information for unassigning a InstrumentDevice
extended_resource_handler.set_service_requests(resource_data.associations['DeploymentHasInstrumentDevice'].unassign_request, 'observatory_management',
'unassign_device_from_deployment', {"device_id": "$(instrument_device_id)",
"deployment_id": deployment_id })
#Fill out service request information for unassigning a PlatformDevice
extended_resource_handler.set_service_requests(resource_data.associations['DeploymentHasPlatformDevice'].unassign_request, 'observatory_management',
'unassign_device_from_deployment', {"device_id": "$(platform_device_id)",
"deployment_id": deployment_id })
#Fill out service request information for assigning a InstrumentSite
extended_resource_handler.set_service_requests(resource_data.associations['DeploymentHasInstrumentSite'].assign_request, 'observatory_management',
'assign_site_to_deployment', {"site_id": "$(instrument_site_id)",
"deployment_id": deployment_id })
#Fill out service request information for assigning a PlatformSite
extended_resource_handler.set_service_requests(resource_data.associations['DeploymentHasPlatformSite'].assign_request, 'observatory_management',
'assign_site_to_deployment', {"site_id": "$(platform_site_id)",
"deployment_id": deployment_id })
#Fill out service request information for unassigning a InstrumentSite
extended_resource_handler.set_service_requests(resource_data.associations['DeploymentHasInstrumentSite'].unassign_request, 'observatory_management',
'unassign_site_from_deployment', {"site_id": "$(instrument_site_id)",
"deployment_id": deployment_id })
#Fill out service request information for unassigning a PlatformSite
extended_resource_handler.set_service_requests(resource_data.associations['DeploymentHasPlatformSite'].unassign_request, 'observatory_management',
'unassign_site_from_deployment', {"site_id": "$(platform_site_id)",
"deployment_id": deployment_id })
return resource_data
##########################################################################
#
# FIND OPS
#
##########################################################################
def find_org_by_observatory(self, observatory_id=''):
"""
"""
orgs,_ = self.RR.find_subjects(RT.Org, PRED.hasResource, observatory_id, id_only=False)
return orgs
def find_related_frames_of_reference(self, input_resource_id='', output_resource_type_list=None):
# use the related resources crawler
finder = RelatedResourcesCrawler()
# generate the partial function (cached association list)
get_assns = finder.generate_related_resources_partial(self.RR, [PRED.hasSite])
# run 2 searches allowing all site-based resource types: one down (subj-obj), one up (obj-subj)
full_crawllist = [RT.InstrumentSite, RT.PlatformSite, RT.Subsite, RT.Observatory]
search_down = get_assns({PRED.hasSite: (True, False)}, full_crawllist)
search_up = get_assns({PRED.hasSite: (False, True)}, full_crawllist)
# the searches return a list of association objects, so compile all the ids by extracting them
retval_ids = set([])
# we want only those IDs that are not the input resource id
for a in search_down(input_resource_id, -1) + search_up(input_resource_id, -1):
if a.o not in retval_ids and a.o != input_resource_id:
retval_ids.add(a.o)
if a.s not in retval_ids and a.s != input_resource_id:
retval_ids.add(a.s)
log.trace("converting retrieved ids to objects = %s" % retval_ids)
#initialize the dict
retval = dict((restype, []) for restype in output_resource_type_list)
#workaround for read_mult problem
all_res = []
if retval_ids: all_res = self.RR.read_mult(list(retval_ids))
#all_res = self.RR.read_mult(retval_ids)
# put resources in the slot based on their type
for resource in all_res:
typename = type(resource).__name__
if typename in output_resource_type_list:
retval[typename].append(resource)
# display a count of how many resources we retrieved
log.debug("got these resources: %s", dict([(k, len(v)) for k, v in retval.iteritems()]))
return retval
def find_related_sites(self, parent_resource_id='', exclude_site_types=None, include_parents=False,
include_devices=False, id_only=False):
if not parent_resource_id:
raise BadRequest("Must provide a parent parent_resource_id")
exclude_site_types = exclude_site_types or []
if not isinstance(exclude_site_types, list):
raise BadRequest("exclude_site_types mut be a list, is: %s" % type(exclude_site_types))
parent_resource = self.RR.read(parent_resource_id)
org_id, site_id = None, None
if parent_resource.type_ == RT.Org:
org_id = parent_resource_id
elif RT.Site in parent_resource._get_extends():
site_id = parent_resource_id
else:
raise BadRequest("Illegal parent_resource_id type. Expected Org/Site, given:%s" % parent_resource.type_)
RR2 = EnhancedResourceRegistryClient(self.RR)
RR2.cache_resources(RT.Observatory)
RR2.cache_resources(RT.PlatformSite)
RR2.cache_resources(RT.InstrumentSite)
if include_devices:
RR2.cache_resources(RT.PlatformDevice)
RR2.cache_resources(RT.InstrumentDevice)
outil = ObservatoryUtil(self, enhanced_rr=RR2)
site_resources, site_children = outil.get_child_sites(site_id, org_id,
exclude_types=exclude_site_types, include_parents=include_parents, id_only=id_only)
site_devices, device_resources = None, None
if include_devices:
site_devices = outil.get_device_relations(site_children.keys())
device_list = list({tup[1] for key,dev_list in site_devices.iteritems() if dev_list for tup in dev_list})
device_resources = RR2.read_mult(device_list)
# HACK:
dev_by_id = {dev._id: dev for dev in device_resources}
site_resources.update(dev_by_id)
return site_resources, site_children, site_devices, device_resources
def get_sites_devices_status(self, parent_resource_ids=None, include_sites=False, include_devices=False, include_status=False):
if not parent_resource_ids:
raise BadRequest("Must provide a parent parent_resource_id")
result_dict = {}
RR2 = EnhancedResourceRegistryClient(self.RR)
RR2.cache_resources(RT.Observatory)
RR2.cache_resources(RT.PlatformSite)
RR2.cache_resources(RT.InstrumentSite)
RR2.cache_resources(RT.PlatformDevice)
RR2.cache_resources(RT.InstrumentDevice)
outil = ObservatoryUtil(self, enhanced_rr=RR2, device_status_mgr=DeviceStateManager())
parent_resource_objs = RR2.read_mult(parent_resource_ids)
res_by_id = dict(zip(parent_resource_ids, parent_resource_objs))
# Loop thru all the provided site ids and create the result structure
for parent_resource_id in parent_resource_ids:
parent_resource = res_by_id[parent_resource_id]
org_id, site_id = None, None
if parent_resource.type_ == RT.Org:
org_id = parent_resource_id
elif RT.Site in parent_resource._get_extends():
site_id = parent_resource_id
site_result_dict = {}
site_resources, site_children = outil.get_child_sites(site_id, org_id, include_parents=True, id_only=False)
if include_sites:
site_result_dict["site_resources"] = site_resources
site_result_dict["site_children"] = site_children
all_device_statuses = {}
if include_devices or include_status:
RR2.cache_predicate(PRED.hasSite)
RR2.cache_predicate(PRED.hasDevice)
all_device_statuses = outil.get_status_roll_ups(parent_resource_id)
if include_status:
#add code to grab the master status table to pass in to the get_status_roll_ups calc
log.debug('get_sites_devices_status site master_status_table: %s ', all_device_statuses)
site_result_dict["site_status"] = all_device_statuses
#create the aggreagate_status for each device and site
log.debug("calculate site aggregate status")
site_status = [all_device_statuses.get(x,{}).get('agg',DeviceStatusType.STATUS_UNKNOWN) for x in site_children.keys()]
site_status_dict = dict(zip(site_children.keys(), site_status))
log.debug('get_sites_devices_status site_status_dict: %s ', site_status_dict)
site_result_dict["site_aggregate_status"] = site_status_dict
if include_devices:
log.debug("calculate device aggregate status")
inst_status = [all_device_statuses.get(x,{}).get('agg',DeviceStatusType.STATUS_UNKNOWN) for x in all_device_statuses.keys()]
device_agg_status_dict = dict(zip(all_device_statuses.keys(), inst_status))
log.debug('get_sites_devices_status device_agg_status_dict: %s ', device_agg_status_dict)
site_result_dict["device_aggregate_status"] = device_agg_status_dict
result_dict[parent_resource_id] = site_result_dict
return result_dict
def find_site_data_products(self, parent_resource_id='', include_sites=False, include_devices=False,
include_data_products=False):
if not parent_resource_id:
raise BadRequest("Must provide a parent parent_resource_id")
outil = ObservatoryUtil(self)
res_dict = outil.get_site_data_products(parent_resource_id, include_sites=include_sites,
include_devices=include_devices,
include_data_products=include_data_products)
return res_dict
# -------------------------------------------------------------------------
# Marine Asset Management RESOURCES (start)
# -------------------------------------------------------------------------
# AssetType
def create_asset_type(self, asset_type=None):
"""Create a AssetType resource.
@param asset_type RT.AssetType
@retval asset_type_id str
@throws: BadRequest 'asset_type object is empty'
"""
if not asset_type:
raise BadRequest('asset_type object is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
asset_type_id = at.create_asset_type(asset_type)
return asset_type_id
def read_asset_type(self, asset_type_id=''):
"""Read an AssetType resource.
@param asset_type_id str
@retval asset_type RT.AssetType
@throws: BadRequest 'asset_type_id parameter is empty'
"""
if not asset_type_id:
raise BadRequest('asset_type_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
asset_type = at.read_asset_type(asset_type_id)
return asset_type
def update_asset_type(self, asset_type=None):
"""Update an AssetType resource.
@param asset_type RT.AssetType
@throws: BadRequest 'asset_type object is empty'
"""
if not asset_type:
raise BadRequest('asset_type object is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
obj = at.update_asset_type(asset_type)
return obj
def delete_asset_type(self, asset_type_id=''):
"""Delete an AssetType resource.
@param asset_type_id str
@throws: BadRequest 'asset_type_id parameter is empty'
"""
if not asset_type_id:
raise BadRequest('asset_type_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.delete_asset_type(asset_type_id)
def force_delete_asset_type(self, asset_type_id=''):
"""Force delete an AssetType resource
@param asset_type_id str
@throws: BadRequest 'asset_type_id parameter is empty'
"""
if not asset_type_id:
raise BadRequest('asset_type_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.force_delete_asset_type(asset_type_id)
def update_attribute_specifications(self, resource_id='', spec_dict=None):
""" Update attribute_specifications of resource using spec_dict provided.
@param resource_id str # id of RT.Asset or RT.EventDurationType
@param spec_dict [] # list of attribute specification name(s)
@throws BadRequest 'resource_id parameter is empty'
@throws BadRequest 'spec_dict parameter is empty'
@throws Inconsistent unable to process resource of this type
"""
# TODO NOTE: Must abide by state restriction model
# Updating attribute_specification is dependent on state (i.e. if in integrated or deployment state,
# updates are not permitted unless the operator has privileges to do so.
if not resource_id:
raise BadRequest('resource_id parameter is empty')
if not spec_dict:
raise BadRequest('spec_dict parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.update_attribute_specifications(resource_id, spec_dict)
def delete_attribute_specification(self, resource_id='', attr_spec_names=None):
"""Delete attribute_specifications in list of attr_spec_names and return the
TypeResource attribute_specifications dictionary for resource_id.
@param resource_id str # id of RT.Asset or RT.EventDurationType
@param attr_spec_names [] # list of attribute specification name(s)
@retval r_obj {} # dictionary of attribute specification(s)
@throws BadRequest 'resource_id parameter is empty'
@throws BadRequest 'attr_spec_names parameter is empty'
"""
# TODO NOTE: Must abide by state restriction model
# Delete attribute_specifications in list of attr_spec_names and return the
# TypeResource attribute_specifications dictionary for resource_id.
if not resource_id:
raise BadRequest('resource_id parameter is empty')
if not attr_spec_names:
raise BadRequest('attr_spec_names parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
r_obj = at.delete_attribute_specification(resource_id, attr_spec_names)
return r_obj
#
# Asset
#
def create_asset(self, asset=None, asset_type_id=''):
"""Create an Asset resource. If alt_ids provided verify well formed and unique
in namespace RT.Asset. An Asset is coupled with an AssetType. The AssetType is
created and associated within this call if asset_type_id provided.
@param asset RT.Asset
@param asset_type_id str # optional
@param asset_id str
@throws BadRequest 'asset object is empty'
@throws Inconsistent 'multiple alt_ids not permitted for Asset resources'
@throws Inconsistent 'malformed alt_ids provided for Asset; required format \'Asset:asset_name\''
@throws BadRequest 'resource instance already exists (\'Asset\') with this altid: %s'
@throws Inconsistent 'Invalid asset object'
"""
if not asset:
raise BadRequest('asset object is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
asset_id = at.create_asset(asset, asset_type_id)
return asset_id
def read_asset(self, asset_id=''):
"""Read an Asset resource
@param asset_id str
@retval asset RT.Asset
@throws BadRequest 'asset_id parameter is empty'
"""
if not asset_id:
raise BadRequest('asset_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
obj = at.read_asset(asset_id)
return obj
def update_asset(self, asset=None):
"""Update an Asset resource. Ensure alt_ids value (if provided) is well formed and
unique in namespace. The asset object provided shall have asset_attrs defined and shall also have
an association (PRED.implementsAssetType) defined or method shall fail. asset.asset_attrs and
the association are required to perform validation and constraint checks prior to update.
@param asset RT.Asset
@throws BadRequest 'asset object is empty'
@throws BadRequest '_id is empty'
@throws BadRequest 'asset (id=%s) does not have association (PRED.implementsAssetType) defined'
@throws BadRequest 'asset (id=%s) has more than one association (PRED.implementsAssetType) defined'
@throws BadRequest 'asset type (id: \'%s\') does not have attribute_specifications'
@throws BadRequest 'asset_update requires asset_attrs to be provided'
@throws BadRequest 'attribute (\'%s\') not found in AssetType (id=\'%s\') AttributeSpecification '
@throws BadRequest 'update_asset: altid returned: %s; instance using current_altid_exists: %s'
@throws BadRequest (numerous error messages from lower methods inside update_asset)
@throws BadRequest 'update_asset failed'
"""
try:
if not asset:
raise BadRequest('asset object is empty')
if not asset._id:
raise NotFound('_id is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.update_asset(asset)
except BadRequest, Arguments:
raise BadRequest('update_asset: %s' % Arguments.get_error_message())
except NotFound, Arguments:
raise NotFound('update_asset: %s' % Arguments.get_error_message())
except Inconsistent, Arguments:
raise Inconsistent('update_asset: %s' % Arguments.get_error_message())
except:
raise BadRequest('update_asset failed')
return
def delete_asset(self, asset_id=''):
"""Delete an Asset resource
@param asset_id str
@throws BadRequest 'asset_id parameter is empty'
"""
if not asset_id:
raise BadRequest('asset_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.delete_asset(asset_id)
return
def force_delete_asset(self, asset_id=''):
""" Force delete an Asset resource
@param asset_id str
@throws BadRequest 'asset_id parameter is empty'
"""
if not asset_id:
raise BadRequest('asset_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.force_delete_asset(asset_id)
def get_asset_extension(self, asset_id='', ext_associations=None, ext_exclude=None, user_id=''):
"""Returns an AssetExtension object containing additional related information
@param asset_id str
@param ext_associations dict
@param ext_exclude list
@param user_id str
@retval extended_asset AssetExtension
@throws BadRequest 'asset_id parameter is empty'
"""
if not asset_id:
raise BadRequest('asset_id parameter is empty')
extended_resource_handler = ExtendedResourceContainer(self)
extended_asset = extended_resource_handler.create_extended_resource_container(
extended_resource_type=OT.AssetExtension,
resource_id=asset_id,
computed_resource_type=OT.BaseComputedAttributes,
ext_associations=ext_associations,
ext_exclude=ext_exclude,
user_id=user_id)
from ion.util.extresource import strip_resource_extension, get_matchers, matcher_UserInfo, matcher_MarineAsset,\
matcher_DataProduct, matcher_DeviceModel, matcher_Device
matchers = get_matchers([matcher_MarineAsset, matcher_UserInfo])
strip_resource_extension(extended_asset, matchers=matchers)
return extended_asset
def prepare_asset_support(self, asset_id=''):
"""Asset prepare support for UI (create, update).
@param asset_id str
@retval resource_data resource_schema
"""
extended_resource_handler = ExtendedResourceContainer(self)
resource_data = extended_resource_handler.create_prepare_resource_support(asset_id, OT.AssetPrepareSupport)
#Fill out service request information for creating a instrument agent instance
extended_resource_handler.set_service_requests(resource_data.create_request, 'observatory_management',
'create_asset', { "asset": "$(asset)" })
#Fill out service request information for creating a instrument agent instance
extended_resource_handler.set_service_requests(resource_data.update_request, 'observatory_management',
'update_asset', { "asset": "$(asset)" })
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# assign event to asset (LocationEvent, OperabilityEvent, VerificationEvent, IntegrationEvent)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#Fill out service request information for assigning an EventDuration to Asset (LocationEvent)
extended_resource_handler.set_service_requests(resource_data.associations['AssetHasLocationEvent'].assign_request,
'observatory_management', 'assign_event_duration_to_asset',
{"event_duration_id": "$(event_duration_id)", "asset_id": asset_id })
#Fill out service request information for assigning an EventDuration to Asset (OperabilityEvent)
extended_resource_handler.set_service_requests(resource_data.associations['AssetHasOperabilityEvent'].assign_request,
'observatory_management', 'assign_event_duration_to_asset',
{"event_duration_id": "$(event_duration_id)", "asset_id": asset_id })
#Fill out service request information for assigning an EventDuration to Asset (VerificationEvent)
extended_resource_handler.set_service_requests(resource_data.associations['AssetHasVerificationEvent'].assign_request,
'observatory_management', 'assign_event_duration_to_asset',
{"event_duration_id": "$(event_duration_id)", "asset_id": asset_id })
#Fill out service request information for assigning an EventDuration to Asset (IntegrationEvent)
extended_resource_handler.set_service_requests(resource_data.associations['AssetHasAssemblyEvent'].assign_request,
'observatory_management', 'assign_event_duration_to_asset',
{"event_duration_id": "$(event_duration_id)", "asset_id": asset_id })
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# unassign event to asset (LocationEvent, OperabilityEvent, VerificationEvent, IntegrationEvent)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#Fill out service request information for unassigning an EventDuration to Asset (LocationEvent)
extended_resource_handler.set_service_requests(resource_data.associations['AssetHasLocationEvent'].unassign_request,
'observatory_management', 'unassign_event_duration_to_asset',
{"event_duration_id": "$(event_duration_id)", "asset_id": asset_id })
#Fill out service request information for unassigning an EventDuration to Asset (OperabilityEvent)
extended_resource_handler.set_service_requests(resource_data.associations['AssetHasOperabilityEvent'].unassign_request,
'observatory_management', 'unassign_event_duration_to_asset',
{"event_duration_id": "$(event_duration_id)", "asset_id": asset_id })
#Fill out service request information for unassigning an EventDuration to Asset (VerificationEvent)
extended_resource_handler.set_service_requests(resource_data.associations['AssetHasVerificationEvent'].unassign_request,
'observatory_management', 'unassign_event_duration_to_asset',
{"event_duration_id": "$(event_duration_id)", "asset_id": asset_id })
#Fill out service request information for unassigning an EventDuration to Asset (IntegrationEvent)
extended_resource_handler.set_service_requests(resource_data.associations['AssetHasAssemblyEvent'].unassign_request,
'observatory_management', 'unassign_event_duration_to_asset',
{"event_duration_id": "$(event_duration_id)", "asset_id": asset_id })
return resource_data
def assign_asset_type_to_asset(self, asset_type_id='',asset_id=''):
""" Link an Asset to an AssetType
@param asset_type_id str
@param asset_id str
@throws BadRequest 'asset_type_id parameter is empty'
@throws BadRequest 'asset_id parameter is empty'
"""
if not asset_type_id:
raise BadRequest('asset_type_id parameter is empty')
if not asset_id:
raise BadRequest('asset_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.assign_asset_type_to_asset(asset_type_id, asset_id)
def unassign_asset_type_from_asset(self, asset_type_id='', asset_id=''):
"""Remove link of Asset from AssetType.
@param asset_type_id str
@param asset_id str
@throws BadRequest 'asset_type_id parameter is empty'
@throws BadRequest 'asset_id parameter is empty'
"""
if not asset_type_id:
raise BadRequest('asset_type_id parameter is empty')
if not asset_id:
raise BadRequest('asset_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.unassign_asset_type_from_asset(asset_type_id, asset_id)
#
# EventDurationType
#
def create_event_duration_type(self, event_duration_type=None):
"""Create a EventDurationType resource.
@param event_duration_type RT.EventDurationType
@retval event_duration_type_id str
@throws: BadRequest 'event_duration_type parameter is empty'
"""
if not event_duration_type:
raise BadRequest('event_duration_type parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
event_duration_type = at.create_event_duration_type(event_duration_type)
return event_duration_type
def read_event_duration_type(self, event_duration_type_id=''):
"""Read an EventDurationType resource.
@param event_duration_type_id str
@retval event_duration_type RT.EventDurationType
@throws: BadRequest 'event_duration_type_id parameter is empty'
"""
if not event_duration_type_id:
raise BadRequest('event_duration_type_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
event_duration_type = at.read_event_duration_type(event_duration_type_id)
return event_duration_type
def update_event_duration_type(self, event_duration_type=None):
"""Update an EventDurationType resource.
@param event_duration_type RT.EventDurationType
@throws: BadRequest 'event_duration_type parameter is empty'
"""
if not event_duration_type:
raise BadRequest('event_duration_type parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.update_event_duration_type(event_duration_type)
return
def delete_event_duration_type(self, event_duration_type_id=''):
"""Delete an EventDurationType resource.
@param event_duration_type_id str
@throws: BadRequest 'event_duration_type_id parameter is empty'
"""
if not event_duration_type_id:
raise BadRequest('event_duration_type_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.delete_event_duration_type(event_duration_type_id)
return
def force_delete_event_duration_type(self, event_duration_type_id=''):
"""Force delete an EventDurationType resource.
@param event_duration__type_id str
@throws: BadRequest 'event_duration_type_id parameter is empty'
"""
if not event_duration_type_id:
raise BadRequest('event_duration_type_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.force_delete_event_duration_type(event_duration_type_id)
#
# EventDuration
#
def create_event_duration(self, event_duration=None, event_duration_type_id=''):
"""Create a EventDuration resource.
An EventDuration is created and is coupled with an EventDurationType if
the optional event_duration_type_id is provided.
@param event_duration RT.EventDuration
@param event_duration_type_id str # optional
@retval event_duration_id str
@throws BadRequest 'event_duration parameter is empty'
@throws Inconsistent 'multiple alt_ids not permitted for EventDuration resources'
@throws Inconsistent 'malformed EventDuration.alt_ids provided; required format empty or \'EventDuration:event_name\'
@throws Inconsistent 'invalid namespace (%s) provided for EventDuration resource'
@throws BadRequest 'resource instance already exists (\'EventDuration\') with this altid: %s'
@throws Inconsistent 'Invalid event_duration object'
"""
if not event_duration:
raise BadRequest('event_duration parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
event_duration_id = at.create_event_duration(event_duration, event_duration_type_id)
return event_duration_id
def read_event_duration(self, event_duration_id=''):
"""Read an EventDuration resource.
@param event_duration_id str
@retval event_duration RT.EventDuration
@throws BadRequest 'event_duration_id parameter is empty'
"""
if not event_duration_id:
raise BadRequest('event_duration_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
event_duration = at.read_event_duration(event_duration_id)
return event_duration
def update_event_duration(self, event_duration=None):
"""Update an EventDuration resource and ensure alt_ids value (if provided) is well formed and
unique in namespace. The event_duration object provided shall have event_duration_attrs
defined and shall also have an association (PRED.implementsEventDurationType) defined or
method shall fail. event_duration.event_duration_attrs and the association are required
to perform validation and constraint checks prior to update.
@param event_duration RT.EventDuration
@throws BadRequest 'update_event_duration failed'
@throws BadRequest 'event_duration parameter is empty'
@throws BadRequest 'event_duration (id=%s) does not have association (PRED.implementsEventDurationType) defined'
@throws BadRequest 'event_duration (id=%s) has more than one association (PRED.implementsEventDurationType) defined'
@throws BadRequest 'event_duration_update requires event_duration_attrs to be provided'
@throws BadRequest 'event_duration_update: altid returned: %s and current_altid_exists: %s'
@throws BadRequest 'update_event_duration failed'
"""
try:
if not event_duration:
raise BadRequest('event_duration parameter is empty')
if not event_duration._id:
raise NotFound('_id is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.update_event_duration(event_duration)
except BadRequest, Arguments:
raise BadRequest('update_event_duration: %s' % Arguments.get_error_message())
except NotFound, Arguments:
raise NotFound('update_event_duration: %s' % Arguments.get_error_message())
except Inconsistent, Arguments:
raise Inconsistent('update_event_duration: %s' % Arguments.get_error_message())
except:
raise BadRequest('update_event_duration failed')
return
def delete_event_duration(self, event_duration_id=''):
"""Delete an EventDuration resource.
@param event_duration_id str
@throws BadRequest 'event_duration_id parameter is empty'
"""
if not event_duration_id:
raise BadRequest('event_duration_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.delete_event_duration(event_duration_id)
return
def force_delete_event_duration(self, event_duration_id=''):
""" Force delete an EventDuration resource.
@param event_duration_id str
@throws BadRequest 'event_duration_id parameter is empty'
"""
if not event_duration_id:
raise BadRequest('event_duration_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.force_delete_event_duration(event_duration_id)
def assign_event_duration_type_to_event_duration(self, event_duration_type_id='', event_duration_id=''):
""" Link an EventDuration to an EventDurationType.
@param event_duration_type_id str
@param event_duration_id str
@throws BadRequest 'event_duration_type_id parameter is empty'
@throws BadRequest 'event_duration_id parameter is empty'
"""
if not event_duration_type_id:
raise BadRequest('event_duration_type_id parameter is empty')
if not event_duration_id:
raise BadRequest('event_duration_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.assign_event_duration_type_to_event_duration(event_duration_type_id, event_duration_id)
def unassign_event_duration_type_from_event_duration(self, event_duration_type_id='', event_duration_id=''):
"""Remove link of EventDuration from EventDurationType.
@param event_duration_type_id str
@param event_duration_id str
@throws BadRequest 'event_duration_type_id parameter is empty'
@throws BadRequest 'event_duration_id parameter is empty'
"""
if not event_duration_type_id:
raise BadRequest('event_duration_type_id parameter is empty')
if not event_duration_id:
raise BadRequest('event_duration_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.unassign_event_duration_type_from_event_duration(event_duration_type_id, event_duration_id)
def get_event_duration_extension(self, event_duration_id='', ext_associations=None, ext_exclude=None, user_id=''):
"""Returns an EventDurationExtension object containing additional related information
@param event_duration_id str
@param ext_associations dict
@param ext_exclude list
@param user_id str
@retval extended_event_duration EventDurationExtension
@throws BadRequest 'event_duration_id parameter is empty'
"""
if not event_duration_id:
raise BadRequest('event_duration_id parameter is empty')
extended_resource_handler = ExtendedResourceContainer(self)
extended_event_duration = extended_resource_handler.create_extended_resource_container(
extended_resource_type=OT.EventDurationExtension,
resource_id=event_duration_id,
computed_resource_type=OT.BaseComputedAttributes,
ext_associations=ext_associations,
ext_exclude=ext_exclude,
user_id=user_id)
from ion.util.extresource import strip_resource_extension, get_matchers, matcher_UserInfo, matcher_MarineAsset, \
matcher_DataProduct, matcher_DeviceModel, matcher_Device
matchers = get_matchers([matcher_MarineAsset, matcher_UserInfo])
strip_resource_extension(extended_event_duration, matchers=matchers)
return extended_event_duration
def prepare_event_duration_support(self, event_duration_id=''):
"""EventDuration prepare support for UI (create, update).
@param event_duration_id str
@retval resource_data resource_schema
"""
extended_resource_handler = ExtendedResourceContainer(self)
resource_data = extended_resource_handler.create_prepare_resource_support(event_duration_id, OT.EventDurationPrepareSupport)
#Fill out service request information for creating a instrument agent instance
extended_resource_handler.set_service_requests(resource_data.create_request, 'observatory_management',
'create_event_duration', { "event_duration": "$(event_duration)" })
#Fill out service request information for creating a instrument agent instance
extended_resource_handler.set_service_requests(resource_data.update_request, 'observatory_management',
'update_event_duration', { "event_duration": "$(event_duration)" })
"""
#Fill out service request information for assigning an EventDurationType from EventDuration
extended_resource_handler.set_service_requests(resource_data.associations['EventDurationHasEventDurationType'].assign_request, 'observatory_management',
'assign_event_duration_type_from_event_duration', {"event_duration_type_id": "$(event_duration_type_id)",
"event_duration_id": event_duration_id })
#Fill out service request information for unassigning an EventDurationType from EventDuration
extended_resource_handler.set_service_requests(resource_data.associations['EventDurationHasEventDurationType'].unassign_request, 'observatory_management',
'unassign_event_duration_type_from_event_duration', {"event_duration_type_id": "$(event_duration_type_id)",
"event_duration_id": event_duration_id })
"""
return resource_data
def assign_event_duration_to_asset(self, event_duration_id='', asset_id=''):
""" Link an EventDuration to an Asset.
@param event_duration_id str
@param asset_id str
@throws BadRequest 'event_duration_id parameter is empty'
@throws BadRequest 'asset_id parameter is empty'
@throws NotFound 'asset instance not found'
@throws Inconsistent 'this event duration has multiple event duration types'
@throws BadRequest 'this event duration does not have associated event duration type'
@throws BadRequest 'unknown EventCategoryEnum value for association category'
@throws BadRequest 'an association (%s) already exists, cannot assign more than one association of the same type'
@throws BadRequest 'unknown association category predicate (Event to Asset)'
@throws BadRequest 'failed to assign association (%s)
"""
if not event_duration_id:
raise BadRequest('event_duration_id parameter is empty')
if not asset_id:
raise BadRequest('asset_id parameter is empty')
try:
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.assign_event_duration_to_asset(event_duration_id, asset_id)
except BadRequest, Arguments:
raise BadRequest(Arguments.get_error_message())
except NotFound, Arguments:
raise NotFound(Arguments.get_error_message())
except Inconsistent, Arguments:
raise Inconsistent(Arguments.get_error_message())
except:
raise BadRequest('failed to assign association event duration to asset')
def unassign_event_duration_to_asset(self, event_duration_id='', asset_id=''):
"""Remove link of EventDuration from Asset.
@param event_duration_id str
@param asset_id str
@throws BadRequest 'event_duration_id parameter is empty'
@throws BadRequest 'asset_id parameter is empty'
@throws Inconsistent 'this event duration implements multiple event duration types'
@throws BadRequest 'this event duration does not have associated event duration type'
@throws Inconsistent 'this event duration has multiple associations with asset'
@throws BadRequest 'this event duration is not associated with asset'
"""
if not event_duration_id:
raise BadRequest('event_duration_id parameter is empty')
if not asset_id:
raise BadRequest('asset_id parameter is empty')
try:
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.unassign_event_duration_to_asset(event_duration_id, asset_id)
except BadRequest, Arguments:
raise BadRequest(Arguments.get_error_message())
except NotFound, Arguments:
raise NotFound(Arguments.get_error_message())
except Inconsistent, Arguments:
raise Inconsistent(Arguments.get_error_message())
except:
raise BadRequest('failed to unassign association (event duration from asset)')
#
# Asset associations to resource
# (not used; remove here AND from observatory_management_service.yml)
#
def assign_asset_to_resource(self, asset_id='',resource_id=''):
# Link an asset to a resource (deprecate)
#@param asset_id str
#@param resource_id str
#@throws NotFound object with specified id does not exist
#@throws BadRequest if object with specified id does not have_id or_rev attribute
#
if not asset_id:
raise BadRequest('asset_id parameter is empty')
if not resource_id:
raise BadRequest('resource_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.assign_asset_to_resource(asset_id, resource_id)
def unassign_asset_from_resource(self, asset_id='', resource_id=''):
#Remove link of asset from resource. (deprecate)
#@param asset_id str
#@param resource_id str
#@throws BadRequest if object with specified id does not have_id or_rev attribute
#
if not asset_id:
raise BadRequest('asset_id parameter is empty')
if not resource_id:
raise BadRequest('resource_id parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.unassign_asset_from_resource(asset_id, resource_id)
#
# CodeSpace
#
def create_code_space(self, code_space=None):
"""Create a CodeSpace resource.
@param code_space RT.CodeSpace
@retval id str
@throws: BadRequest 'code_space object is empty'
@throws: Inconsistent 'invalid code_space object'
"""
if not code_space:
raise BadRequest('code_space object is empty')
try:
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
id = at.create_code_space(code_space)
except BadRequest, Arguments:
raise BadRequest(Arguments.get_error_message())
except Inconsistent, Arguments:
raise Inconsistent(Arguments.get_error_message())
except NotFound, Arguments:
raise NotFound(Arguments.get_error_message())
except:
raise Inconsistent('invalid code_space object')
return id
def read_code_space(self, resource_id=''):
"""Read an CodeSpace resource.
@param resource_id str
@retval code_space RT.CodeSpace
@throws BadRequest 'resource_id parameter is empty'
@throws NotFound 'object with specified id does not exist'
"""
if not resource_id:
raise BadRequest('resource_id parameter is empty')
try:
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
obj = at.read_code_space(resource_id)
except:
raise NotFound('object with specified id does not exist.')
return obj
def update_code_space(self, code_space=None):
"""Update an CodeSpace resource.
@param code_space RT.CodeSpace
@throws BadRequest 'code_space object is empty'
"""
if not code_space:
raise BadRequest('code_space object is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
obj = at.update_code_space(code_space)
return obj
def delete_code_space(self, resource_id=''):
"""Delete a CodeSpace resource.
@param resource_id str
@throws BadRequest 'resource_id parameter is empty'
@throws NotFound 'object with specified id does not exist.'
"""
if not resource_id:
raise BadRequest('resource_id parameter is empty')
try:
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.delete_code_space(resource_id)
except:
raise NotFound('object with specified id does not exist.')
return
def force_delete_code_space(self, resource_id=''):
""" Force delete a CodeSpace resource.
@param resource_id str
@throws BadRequest 'resource_id parameter is empty'
@throws NotFound 'object with specified id does not exist.'
"""
if not resource_id:
raise BadRequest('resource_id parameter is empty')
try:
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.force_delete_code_space(resource_id)
except:
raise NotFound('object with specified id does not exist.')
return #obj
def read_codesets_by_name(self, resource_id='', names=None):
"""Read CodeSpace (id=resource_id) for list of codeset name(s); return list of CodeSets.
@param resource_id str
@param names []
@throws: BadRequest 'resource_id parameter is empty'
@throws: BadRequest 'names parameter is empty'
@throws NotFound 'object with specified resource_id (type RT.CodeSpace) does not exist'
"""
if not resource_id:
raise BadRequest('resource_id parameter is empty')
if not names:
raise BadRequest('names parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
codesets = at.read_codesets_by_name(resource_id, names)
return codesets
def read_codes_by_name(self, resource_id='', names=None, id_only=False):
"""Read CodeSpace with resource_id and for list of Code name(s); return list of Codes.
@param resource_id str
@param names []
@params id_only bool # optional
@throws BadRequest 'resource_id parameter is empty'
@throws BadRequest 'names parameter is empty'
@throws NotFound 'object with specified resource_id (type RT.CodeSpace) does not exist'
"""
if not resource_id:
raise BadRequest('resource_id parameter is empty')
if not names:
raise BadRequest('names parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
codes = at.read_codes_by_name(resource_id, names, id_only)
return codes
def update_codes(self, resource_id='', codes=None):
"""Read CodeSpace with resource_id, update Codes identified in dictionary of codes.
@param resource_id str
@param codes {}
@throws BadRequest 'resource_id parameter is empty'
@throws BadRequest 'codes parameter is empty'
@throws NotFound 'object with specified resource_id and type=RT.CodeSpace does not exist'
@throws NotFound 'code not found in CodeSpace (with id=resource_id).
@throws NotFound 'code provided for update with empty name.'
@throws NotFound 'codes not found in CodeSpace (with id=resource_id).'
"""
if not resource_id:
raise BadRequest('resource_id parameter is empty')
if not codes:
raise BadRequest('codes parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.update_codes(resource_id, codes)
def update_codesets(self, resource_id='', codesets=None):
"""Read CodeSpace, with resource_id, and update codesets as identified in
the dictionary codesets.
@param resource_id str
@param codesets {}
@throws BadRequest 'resource_id parameter is empty'
@throws BadRequest 'codesets parameter is empty'
@throws NotFound 'object with specified resource_id and type=RT.CodeSpace does not exist'
@throws NotFound 'CodeSet not found in CodeSpace.'
@throws NotFound 'CodeSet provided for update with empty name.'
@throws NotFound 'CodeSpace codesets is empty.'
"""
if not resource_id:
raise BadRequest('resource_id parameter is empty')
if not codesets:
raise BadRequest('codesets parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
at.update_codesets(resource_id, codesets)
def delete_codes(self, resource_id='', names=None):
"""Delete Codes (identified in names list) from CodeSpace; return list of Codes in CodeSpace.
Check if code is used by code_set; if so, remove code fom code_set, update code_set and then
delete the code.
@param resource_id str
@param names []
@retval codes_list []
@throws BadRequest 'resource_id parameter is empty'
@throws BadRequest 'names parameter is empty'
@throws NotFound 'object with resource_id and type RT.CodeSpace does not exist
"""
if not resource_id:
raise BadRequest('resource_id parameter is empty')
if not names:
raise BadRequest('names parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
codes_list = at.delete_codes(resource_id, names)
return codes_list
def delete_codesets(self, resource_id='', names=None):
"""Delete CodeSets identified in list names; return list of CodeSets in CodeSpace.
@param resource_id str
@param names []
@retval codeset_list []
@throws BadRequest 'resource_id parameter is empty'
@throws BadRequest 'names parameter is empty'
@throws NotFound 'object with resource_id and type RT.CodeSpace does not exist
"""
#todo (* Return value scheduled to change.)
if not resource_id:
raise BadRequest('resource_id parameter is empty')
if not names:
raise BadRequest('names parameter is empty')
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
codeset_list = at.delete_codesets(resource_id, names)
return codeset_list
############################
#
# START - Services for Marine Asset Management
#
############################
def declare_asset_tracking_resources(self, content='', content_type='', content_encoding=''):
"""Read content which defines asset management resources, instantiate resources;
return dictionary of resource ids by category of resource type.
@param content encoded blob # binascii.b2a_hex(content)
@param content_type file_descriptor.mimetype # file descriptor type
@param content_encoding 'b2a_hex' # encoding (set to binascii.b2a_hex)
@retval response {} # dict of resource ids by category of resource type
@throws BadRequest 'content parameter is empty'
@throws BadRequest 'declare_asset_tracking_resources error'
@throws BadRequest (from _process_xls)
@throws NotFound (from _process_xls)
@throws Inconsistent (from _process_xls)
"""
if not content:
raise BadRequest('content parameter is empty')
try:
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
response = at._process_xls(content, content_type, content_encoding)
except BadRequest, Arguments:
raise BadRequest(Arguments.get_error_message())
except NotFound, Arguments:
raise NotFound(Arguments.get_error_message())
except Inconsistent, Arguments:
raise Inconsistent(Arguments.get_error_message())
except:
raise BadRequest('declare_asset_tracking_resources error')
return response
def asset_tracking_report(self):
"""Query system instances of marine tracking resources (CodeSpaces,Codes,CodeSets, Assets, AssetTypes, EventDurations,
EventDurationTypes) produce xls workbook and return encoded content.
@retval content binascii.b2a_hex(xls workbook)
@throws BadRequest 'asset tracking report failed to produce xls'
@throws BadRequest (from _download_xls)
@throws NotFound (from _download_xls)
@throws Inconsistent (from _download_xls)
"""
try:
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
content = at._download_xls()
except BadRequest, Arguments:
raise BadRequest(Arguments.get_error_message())
except NotFound, Arguments:
raise NotFound(Arguments.get_error_message())
except Inconsistent, Arguments:
raise Inconsistent(Arguments.get_error_message())
except:
raise BadRequest('asset tracking report failed to produce xls')
return content
# Deprecate - helper picklists for altids (Asset and Event[Duration]s)
def get_altids(self, res_type=''):
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
picklist = at.get_altids(res_type)
return picklist
# helper picklists for altids (Asset and Event[Duration]s)
def get_assets_picklist(self, id_only=''):
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
picklist = at.get_picklist(RT.Asset, id_only)
return picklist
def get_events_picklist(self, id_only=''):
at = AssetTracking(self,container=self.container, enhanced_rr=self.RR2, rr=self.RR, node=self.container.node)
picklist = at.get_picklist(RT.EventDuration, id_only)
return picklist
# -------------------------------------------------------------------------
# Marine Asset Management RESOURCES (end)
# -------------------------------------------------------------------------
############################
#
# EXTENDED RESOURCES
#
############################
# TODO: Make every incoming call to this one
def get_site_extension(self, site_id='', ext_associations=None, ext_exclude=None, user_id=''):
site_extension = None
# Make a case decision on what what to do
site_obj = self.RR2.read(site_id)
site_type = site_obj._get_type()
if site_type == RT.InstrumentSite:
site_extension = self._get_instrument_site_extension(site_id, ext_associations, ext_exclude, user_id)
elif site_type in (RT.Observatory, RT.Subsite):
site_extension = self._get_platform_site_extension(site_id, ext_associations, ext_exclude, user_id)
elif site_type == RT.PlatformSite:
site_extension = self._get_platform_site_extension(site_id, ext_associations, ext_exclude, user_id)
else:
raise BadRequest("Unknown site type '%s' for site %s" % (site_type, site_id))
from ion.util.extresource import strip_resource_extension, get_matchers, matcher_DataProduct, matcher_DeviceModel, \
matcher_Device, matcher_UserInfo
matchers = get_matchers([matcher_DataProduct, matcher_DeviceModel, matcher_Device, matcher_UserInfo])
strip_resource_extension(site_extension, matchers=matchers)
return site_extension
# TODO: Redundant, remove operation and use get_site_extension
def get_observatory_site_extension(self, site_id='', ext_associations=None, ext_exclude=None, user_id=''):
return self.get_site_extension(site_id, ext_associations, ext_exclude, user_id)
# TODO: Redundant, remove operation and use get_site_extension
def get_platform_station_site_extension(self, site_id='', ext_associations=None, ext_exclude=None, user_id=''):
return self.get_site_extension(site_id, ext_associations, ext_exclude, user_id)
# TODO: Redundant, remove operation and use get_site_extension
def get_platform_assembly_site_extension(self, site_id='', ext_associations=None, ext_exclude=None, user_id=''):
return self.get_site_extension(site_id, ext_associations, ext_exclude, user_id)
# TODO: Redundant, remove operation and use get_site_extension
def get_platform_component_site_extension(self, site_id='', ext_associations=None, ext_exclude=None, user_id=''):
return self.get_site_extension(site_id, ext_associations, ext_exclude, user_id)
# TODO: Redundant, remove operation and use get_site_extension
def get_instrument_site_extension(self, site_id='', ext_associations=None, ext_exclude=None, user_id=''):
return self.get_site_extension(site_id, ext_associations, ext_exclude, user_id)
def _get_site_device(self, site_id, device_relations):
site_devices = [tup[1] for tup in device_relations.get(site_id, []) if tup[2] in (RT.InstrumentDevice, RT.PlatformDevice)]
if len(site_devices) > 1:
log.error("Inconsistent: Site %s has multiple devices: %s", site_id, site_devices)
if not site_devices:
return None
return site_devices[0]
def _get_site_extension(self, site_id='', ext_associations=None, ext_exclude=None, user_id=''):
"""Returns a site extension object containing common information, plus some helper objects
@param site_id str
@param ext_associations dict
@param ext_exclude list
@retval TBD
@throws BadRequest A parameter is missing
@throws NotFound An object with the specified observatory_id does not exist
"""
try:
if not site_id:
raise BadRequest("The site_id parameter is empty")
extended_resource_handler = ExtendedResourceContainer(self)
extended_site = extended_resource_handler.create_extended_resource_container(
extended_resource_type=OT.SiteExtension,
resource_id=site_id,
computed_resource_type=OT.SiteComputedAttributes,
ext_associations=ext_associations,
ext_exclude=ext_exclude,
user_id=user_id)
RR2 = EnhancedResourceRegistryClient(self.clients.resource_registry)
outil = ObservatoryUtil(self, enhanced_rr=RR2, device_status_mgr=DeviceStateManager())
# Find all subsites and devices
site_resources, site_children = outil.get_child_sites(parent_site_id=site_id, include_parents=False, id_only=False)
site_ids = site_resources.keys() + [site_id] # IDs of this site and all child sites
device_relations = outil.get_device_relations(site_ids)
# Set parent immediate child sites
parent_site_ids = [a.s for a in RR2.filter_cached_associations(PRED.hasSite, lambda a: a.p ==PRED.hasSite and a.o == site_id)]
if parent_site_ids:
extended_site.parent_site = RR2.read(parent_site_ids[0])
else:
extended_site.parent_site = None
extended_site.sites = [site_resources[ch_id] for ch_id in site_children[site_id]] if site_children.get(site_id, None) is not None else []
# Set all nested child devices, remove any dups
instrument_device_ids = list( set( [tup[1] for (parent,dlst) in device_relations.iteritems() for tup in dlst if tup[2] == RT.InstrumentDevice] ) )
platform_device_ids = list( set( [tup[1] for (parent,dlst) in device_relations.iteritems() for tup in dlst if tup[2] == RT.PlatformDevice] ) )
device_ids = list(set(instrument_device_ids + platform_device_ids))
device_objs = self.RR2.read_mult(device_ids)
devices_by_id = dict(zip(device_ids, device_objs))
extended_site.instrument_devices = [devices_by_id[did] for did in instrument_device_ids]
extended_site.platform_devices = [devices_by_id[did] for did in platform_device_ids]
# Set primary device at immediate child sites
extended_site.sites_devices = []
for ch_site in extended_site.sites:
device_id = self._get_site_device(ch_site._id, device_relations)
extended_site.sites_devices.append(devices_by_id.get(device_id, None))
extended_site.portal_instruments = extended_site.sites_devices # ALIAS
# Set deployments
RR2.cache_predicate(PRED.hasDeployment)
deployment_assocs = RR2.filter_cached_associations(PRED.hasDeployment, lambda a: a.s in site_ids)
deployment_ids = [a.o for a in deployment_assocs]
deployment_objs = RR2.read_mult(list(set(deployment_ids)))
extended_site.deployments = deployment_objs
# Get current active deployment. May be site or parent sites
dep_util = DeploymentUtil(self.container)
extended_site.deployment = dep_util.get_active_deployment(site_id, is_site=True, rr2=RR2)
# Set data products
RR2.cache_predicate(PRED.hasSource)
dataproduct_assocs = RR2.filter_cached_associations(PRED.hasSource, lambda a: a.o in site_ids)
dataproduct_ids = [a.s for a in dataproduct_assocs]
dataproduct_objs = RR2.read_mult(list(set(dataproduct_ids)))
extended_site.data_products = dataproduct_objs
log.debug("Building list of model objs")
# Build a lookup for device models via hasModel predicates.
# lookup is a 2d associative array of [subject type][subject id] -> object id
RR2.cache_predicate(PRED.hasModel)
lookup = {rt : {} for rt in [RT.InstrumentDevice, RT.PlatformDevice]}
for a in RR2.filter_cached_associations(PRED.hasModel, lambda assn: assn.st in lookup):
lookup[a.st][a.s] = a.o
def retrieve_model_objs(rsrc_list, object_type):
# rsrc_list is devices that need models looked up. object_type is the resource type (a device)
# not all devices have models (represented as None), which kills read_mult. so, extract the models ids,
# look up all the model ids, then create the proper output
model_list = [lookup[object_type].get(r._id) for r in rsrc_list]
model_uniq = list(set([m for m in model_list if m is not None]))
model_objs = self.RR2.read_mult(model_uniq)
model_dict = dict(zip(model_uniq, model_objs))
return [model_dict.get(m) for m in model_list]
extended_site.instrument_models = retrieve_model_objs(extended_site.instrument_devices, RT.InstrumentDevice)
extended_site.platform_models = retrieve_model_objs(extended_site.platform_devices, RT.PlatformDevice)
primary_device_id = self._get_site_device(site_id, device_relations)
# Filtered subsites by type/alt type
def fs(resource_type, filter_fn):
both = lambda s: ((resource_type == s._get_type()) and filter_fn(s))
return filter(both, site_resources.values())
extended_site.platform_station_sites = fs(RT.PlatformSite, lambda s: s.alt_resource_type == "StationSite")
extended_site.platform_component_sites = fs(RT.PlatformSite, lambda s: s.alt_resource_type == "PlatformComponentSite")
extended_site.platform_assembly_sites = fs(RT.PlatformSite, lambda s: s.alt_resource_type == "PlatformAssemblySite")
extended_site.instrument_sites = fs(RT.InstrumentSite, lambda _: True)
context = dict(
extended_site=extended_site,
enhanced_RR=RR2,
site_device_id=primary_device_id,
site_resources=site_resources,
site_children=site_children,
device_relations=device_relations,
outil=outil
)
return context
except:
log.error('_get_site_extension failed', exc_info=True)
raise
def _get_platform_site_extension(self, site_id='', ext_associations=None, ext_exclude=None, user_id=''):
"""Creates a SiteExtension and status for platforms and higher level sites"""
log.debug("_get_platform_site_extension")
context = self._get_site_extension(site_id, ext_associations, ext_exclude, user_id)
extended_site, RR2, platform_device_id, site_resources, site_children, device_relations, outil = \
context["extended_site"], context["enhanced_RR"], context["site_device_id"], \
context["site_resources"], context["site_children"], context["device_relations"], context["outil"]
statuses = outil.get_status_roll_ups(site_id, include_structure=True)
portal_status = []
if extended_site.portal_instruments:
for x in extended_site.portal_instruments:
if x:
portal_status.append(statuses.get(x._id,{}).get("agg", DeviceStatusType.STATUS_UNKNOWN))
else:
portal_status.append(DeviceStatusType.STATUS_UNKNOWN)
extended_site.computed.portal_status = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=portal_status)
else:
extended_site.computed.portal_status = ComputedListValue(status=ComputedValueAvailability.NOTAVAILABLE)
site_status = []
if extended_site.sites:
for x in extended_site.sites:
if x:
site_status.append(statuses.get(x._id,{}).get("agg", DeviceStatusType.STATUS_UNKNOWN))
else:
site_status.append(DeviceStatusType.STATUS_UNKNOWN)
extended_site.computed.site_status = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=site_status)
else:
extended_site.computed.site_status = ComputedListValue(status=ComputedValueAvailability.NOTAVAILABLE)
# create the list of station status from the overall status list
subset_status = []
for site in extended_site.platform_station_sites:
if not extended_site.sites.count(site):
log.error(" Platform Site does not exist in the full list of sites. id: %s", site._id)
break
idx = extended_site.sites.index( site )
subset_status.append( site_status[idx] )
extended_site.computed.station_status = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=subset_status)
inst_status = []
if extended_site.instrument_devices:
for x in extended_site.instrument_devices:
if x:
inst_status.append(statuses.get(x._id,{}).get("agg", DeviceStatusType.STATUS_UNKNOWN))
else:
inst_status.append(DeviceStatusType.STATUS_UNKNOWN)
extended_site.computed.instrument_status = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=inst_status)
else:
extended_site.computed.instrument_status = ComputedListValue(status=ComputedValueAvailability.NOTAVAILABLE)
plat_status = []
if extended_site.platform_devices:
for x in extended_site.platform_devices:
if x:
plat_status.append(statuses.get(x._id,{}).get("agg", DeviceStatusType.STATUS_UNKNOWN))
else:
plat_status.append(DeviceStatusType.STATUS_UNKNOWN)
extended_site.computed.platform_status = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=plat_status)
else:
extended_site.computed.platform_status = ComputedListValue(status=ComputedValueAvailability.NOTAVAILABLE)
comms_rollup = statuses.get(site_id,{}).get(AggregateStatusType.AGGREGATE_COMMS,DeviceStatusType.STATUS_UNKNOWN)
power_rollup = statuses.get(site_id,{}).get(AggregateStatusType.AGGREGATE_POWER,DeviceStatusType.STATUS_UNKNOWN)
data_rollup = statuses.get(site_id,{}).get(AggregateStatusType.AGGREGATE_DATA,DeviceStatusType.STATUS_UNKNOWN)
location_rollup = statuses.get(site_id,{}).get(AggregateStatusType.AGGREGATE_LOCATION,DeviceStatusType.STATUS_UNKNOWN)
extended_site.computed.communications_status_roll_up = ComputedIntValue(status=ComputedValueAvailability.PROVIDED, value=comms_rollup)
extended_site.computed.data_status_roll_up = ComputedIntValue(status=ComputedValueAvailability.PROVIDED, value=power_rollup)
extended_site.computed.location_status_roll_up = ComputedIntValue(status=ComputedValueAvailability.PROVIDED, value=data_rollup)
extended_site.computed.power_status_roll_up = ComputedIntValue(status=ComputedValueAvailability.PROVIDED, value=location_rollup)
dep_util = DeploymentUtil(self.container)
extended_site.deployment_info = dep_util.describe_deployments(extended_site.deployments,
status_map=statuses)
return extended_site
def _get_instrument_site_extension(self, site_id='', ext_associations=None, ext_exclude=None, user_id=''):
"""Creates a SiteExtension and status for instruments"""
context = self._get_site_extension(site_id, ext_associations, ext_exclude, user_id)
extended_site, RR2, inst_device_id, site_resources, site_children, device_relations, outil = \
context["extended_site"], context["enhanced_RR"], context["site_device_id"], \
context["site_resources"], context["site_children"], context["device_relations"], context["outil"]
statuses = outil.get_status_roll_ups(site_id, include_structure=True)
comms_rollup = statuses.get(site_id,{}).get(AggregateStatusType.AGGREGATE_COMMS,DeviceStatusType.STATUS_UNKNOWN)
power_rollup = statuses.get(site_id,{}).get(AggregateStatusType.AGGREGATE_POWER,DeviceStatusType.STATUS_UNKNOWN)
data_rollup = statuses.get(site_id,{}).get(AggregateStatusType.AGGREGATE_DATA,DeviceStatusType.STATUS_UNKNOWN)
location_rollup = statuses.get(site_id,{}).get(AggregateStatusType.AGGREGATE_LOCATION,DeviceStatusType.STATUS_UNKNOWN)
extended_site.computed.communications_status_roll_up = ComputedIntValue(status=ComputedValueAvailability.PROVIDED, value=comms_rollup)
extended_site.computed.data_status_roll_up = ComputedIntValue(status=ComputedValueAvailability.PROVIDED, value=power_rollup)
extended_site.computed.location_status_roll_up = ComputedIntValue(status=ComputedValueAvailability.PROVIDED, value=data_rollup)
extended_site.computed.power_status_roll_up = ComputedIntValue(status=ComputedValueAvailability.PROVIDED, value=location_rollup)
instrument_status = []
if extended_site.instrument_devices:
for x in extended_site.instrument_devices:
if x:
instrument_status.append(statuses.get(x._id,{}).get("agg", DeviceStatusType.STATUS_UNKNOWN))
else:
instrument_status.append(DeviceStatusType.STATUS_UNKNOWN)
extended_site.computed.instrument_status = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=instrument_status)
else:
extended_site.computed.instrument_status = ComputedListValue(status=ComputedValueAvailability.NOTAVAILABLE)
extended_site.computed.platform_status = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=[])
extended_site.computed.site_status = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=[])
extended_site.computed.portal_status = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=[])
dep_util = DeploymentUtil(self.container)
extended_site.deployment_info = dep_util.describe_deployments(extended_site.deployments,
status_map=statuses)
return extended_site
def get_deployment_extension(self, deployment_id='', ext_associations=None, ext_exclude=None, user_id=''):
if not deployment_id:
raise BadRequest("The deployment_id parameter is empty")
extended_resource_handler = ExtendedResourceContainer(self)
extended_deployment = extended_resource_handler.create_extended_resource_container(
extended_resource_type=OT.DeploymentExtension,
resource_id=deployment_id,
computed_resource_type=OT.DeploymentComputedAttributes,
ext_associations=ext_associations,
ext_exclude=ext_exclude,
user_id=user_id)
if not extended_deployment.device or not extended_deployment.site \
or not hasattr(extended_deployment.device, '_id') \
or not hasattr(extended_deployment.site, '_id'):
return extended_deployment
#raise Inconsistent('deployment %s should be associated with a device and a site' % deployment_id)
log.debug('have device: %r\nand site: %r', extended_deployment.device.__dict__, extended_deployment.site.__dict__)
RR2 = EnhancedResourceRegistryClient(self.clients.resource_registry)
finder = RelatedResourcesCrawler()
get_assns = finder.generate_related_resources_partial(RR2, [PRED.hasDevice])
# search from PlatformDevice to subplatform or InstrumentDevice
search_down = get_assns({PRED.hasDevice: (True, False)}, [RT.InstrumentDevice, RT.PlatformDevice])
# collect ids of devices below deployment target
platform_device_ids = set()
instrument_device_ids = set()
# make sure main device in deployment is in the list
if extended_deployment.device.type_==RT.InstrumentDevice:
instrument_device_ids.add(extended_deployment.device._id)
else:
platform_device_ids.add(extended_deployment.device._id)
for a in search_down(extended_deployment.device._id, -1):
if a.o != extended_deployment.device._id:
if a.ot == RT.InstrumentDevice:
instrument_device_ids.add(a.o)
else: # a.ot == RT.PlatformDevice:
platform_device_ids.add(a.o)
# get sites (portals)
extended_deployment.computed.portals = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=[])
subsite_ids = set()
device_by_site = { extended_deployment.site._id: extended_deployment.device._id }
for did in platform_device_ids:
related_sites = RR2.find_platform_site_ids_by_platform_device_using_has_device(did)
for sid in related_sites:
subsite_ids.add(sid)
device_by_site[sid] = did
for did in instrument_device_ids:
related_sites = RR2.find_instrument_site_ids_by_instrument_device_using_has_device(did)
for sid in related_sites:
subsite_ids.add(sid)
device_by_site[sid] = did
# sort the objects into the lists to be displayed
ids = list(platform_device_ids|instrument_device_ids|subsite_ids)
device_by_id = { extended_deployment.device._id: extended_deployment.device }
objs = self.RR.read_mult(ids)
for obj in objs:
if obj.type_==RT.InstrumentDevice:
extended_deployment.instrument_devices.append(obj)
elif obj.type_==RT.PlatformDevice:
extended_deployment.platform_devices.append(obj)
else: # InstrumentSite or PlatformSite
extended_deployment.computed.portals.value.append(obj)
# get associated models for all devices
devices = list(platform_device_ids|instrument_device_ids)
assocs = self.RR.find_associations(anyside=list(devices), id_only=False)
## WORKAROUND find_associations doesn't support anyside + predicate,
# so must use anyside to find a list of values and filter for predicate later
workaround = []
for a in assocs:
if a.p==PRED.hasModel:
workaround.append(a)
assocs = workaround
## end workaround
model_id_by_device = { a.s: a.o for a in assocs }
model_ids = set( [ a.o for a in assocs ])
models = self.RR.read_mult( list(model_ids) )
model_by_id = { o._id: o for o in models }
extended_deployment.instrument_models = [ model_by_id[model_id_by_device[d._id]] for d in extended_deployment.instrument_devices ]
extended_deployment.platform_models = [ model_by_id[model_id_by_device[d._id]] for d in extended_deployment.platform_devices ]
for p in extended_deployment.computed.portals.value:
if p._id in device_by_site and device_by_site[p._id] in device_by_id:
extended_deployment.portal_instruments.append( device_by_id[device_by_site[p._id]] )
# TODO -- all status values
#
#status: !ComputedIntValue
## combined list of sites and their status
##@ResourceType=InstrumentSite,PlatformSite
#portal_status: !ComputedListValue
## status of device lists
#instrument_status: !ComputedListValue
#platform_status: !ComputedListValue
from ion.util.extresource import strip_resource_extension, get_matchers, matcher_DataProduct, matcher_DeviceModel, \
matcher_Device, matcher_UserInfo
matchers = get_matchers([matcher_DataProduct, matcher_DeviceModel, matcher_Device, matcher_UserInfo])
strip_resource_extension(extended_deployment, matchers=matchers)
return extended_deployment
#-----------------------------------------------
# COMPUTED RESOURCES
#-----------------------------------------------
def get_marine_facility_extension(self, org_id='', ext_associations=None, ext_exclude=None, user_id=''):
"""Returns an MarineFacilityOrgExtension object containing additional related information
@param org_id str
@param ext_associations dict
@param ext_exclude list
@retval observatory ObservatoryExtension
@throws BadRequest A parameter is missing
@throws NotFound An object with the specified observatory_id does not exist
"""
if not org_id:
raise BadRequest("The org_id parameter is empty")
extended_resource_handler = ExtendedResourceContainer(self)
extended_org = extended_resource_handler.create_extended_resource_container(
extended_resource_type=OT.MarineFacilityOrgExtension,
resource_id=org_id,
computed_resource_type=OT.MarineFacilityOrgComputedAttributes,
ext_associations=ext_associations,
ext_exclude=ext_exclude,
user_id=user_id,
negotiation_status=NegotiationStatusEnum.OPEN)
RR2 = EnhancedResourceRegistryClient(self.RR)
RR2.cache_predicate(PRED.hasModel)
RR2.cache_predicate(PRED.hasDevice)
outil = ObservatoryUtil(self, enhanced_rr=RR2, device_status_mgr=DeviceStateManager())
#Fill out service request information for requesting data products
extended_org.data_products_request.service_name = 'resource_registry'
extended_org.data_products_request.service_operation = 'find_objects'
extended_org.data_products_request.request_parameters = {
'subject': org_id,
'predicate': 'hasResource',
'object_type': 'DataProduct',
'id_only': False,
'limit': 10,
'skip': 0
}
#Fill out service request information for requesting marine tracking resources - Assets
extended_org.assets_request.service_name = 'resource_registry'
extended_org.assets_request.service_operation = 'find_objects'
extended_org.assets_request.request_parameters = {
'subject': org_id,
'predicate': 'hasResource',
'object_type': 'Asset',
'id_only': False,
'limit': 10,
'skip': 0
}
#Fill out service request information for requesting marine tracking resources - AssetTypes
extended_org.asset_types_request.service_name = 'resource_registry'
extended_org.asset_types_request.service_operation = 'find_objects'
extended_org.asset_types_request.request_parameters = {
'subject': org_id,
'predicate': 'hasResource',
'object_type': 'AssetType',
'id_only': False,
'limit': 10,
'skip': 0
}
#Fill out service request information for requesting marine tracking resources - EventDuration
extended_org.event_durations_request.service_name = 'resource_registry'
extended_org.event_durations_request.service_operation = 'find_objects'
extended_org.event_durations_request.request_parameters = {
'subject': org_id,
'predicate': 'hasResource',
'object_type': 'EventDuration',
'id_only': False,
'limit': 10,
'skip': 0
}
#Fill out service request information for requesting marine tracking resources - EventDurationTypes
extended_org.event_duration_types_request.service_name = 'resource_registry'
extended_org.event_duration_types_request.service_operation = 'find_objects'
extended_org.event_duration_types_request.request_parameters = {
'subject': org_id,
'predicate': 'hasResource',
'object_type': 'EventDurationType',
'id_only': False,
'limit': 10,
'skip': 0
}
# extended object contains list of member ActorIdentity, so need to change to user info
rr_util = ResourceRegistryUtil(self.container)
extended_org.members = rr_util.get_actor_users(extended_org.members)
#Convert Negotiations to OrgUserNegotiationRequest
extended_org.open_requests = self._convert_negotiations_to_requests(extended_org, extended_org.open_requests)
extended_org.closed_requests = self._convert_negotiations_to_requests(extended_org, extended_org.closed_requests)
# lookup all hasModel predicates
# lookup is a 2d associative array of [subject type][subject id] -> object id (model)
lookup = dict([(rt, {}) for rt in [RT.InstrumentDevice, RT.PlatformDevice]])
for a in RR2.filter_cached_associations(PRED.hasModel, lambda assn: assn.st in lookup):
if a.st in lookup:
lookup[a.st][a.s] = a.o
def retrieve_model_objs(rsrc_list, object_type):
# rsrc_list is devices that need models looked up. object_type is the resource type (a device)
# not all devices have models (represented as None), which kills read_mult. so, extract the models ids,
# look up all the model ids, then create the proper output
model_list = [lookup[object_type].get(r._id) for r in rsrc_list]
model_uniq = list(set([m for m in model_list if m is not None]))
model_objs = self.clients.resource_registry.read_mult(model_uniq)
model_dict = dict(zip(model_uniq, model_objs))
return [model_dict.get(m) for m in model_list]
extended_org.instrument_models = retrieve_model_objs(extended_org.instruments, RT.InstrumentDevice)
extended_org.platform_models = retrieve_model_objs(extended_org.platforms, RT.PlatformDevice)
statuses = outil.get_status_roll_ups(org_id, include_structure=True)
site_status = []
if extended_org.sites:
for x in extended_org.sites:
if x:
site_status.append(statuses.get(x._id,{}).get("agg", DeviceStatusType.STATUS_UNKNOWN))
else:
site_status.append(DeviceStatusType.STATUS_UNKNOWN)
extended_org.computed.site_status = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=site_status)
else:
extended_org.computed.site_status = ComputedListValue(status=ComputedValueAvailability.NOTAVAILABLE)
inst_status = []
if extended_org.instruments:
for x in extended_org.instruments:
if x:
inst_status.append(statuses.get(x._id,{}).get("agg", DeviceStatusType.STATUS_UNKNOWN))
else:
inst_status.append(DeviceStatusType.STATUS_UNKNOWN)
extended_org.computed.instrument_status = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=inst_status)
else:
extended_org.computed.instrument_status = ComputedListValue(status=ComputedValueAvailability.NOTAVAILABLE)
plat_status = []
if extended_org.platforms:
for x in extended_org.platforms:
if x:
plat_status.append(statuses.get(x._id,{}).get("agg", DeviceStatusType.STATUS_UNKNOWN))
else:
plat_status.append(DeviceStatusType.STATUS_UNKNOWN)
extended_org.computed.platform_status = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=plat_status)
else:
extended_org.computed.platform_status = ComputedListValue(status=ComputedValueAvailability.NOTAVAILABLE)
subset = []
for site in extended_org.station_sites:
if site.alt_resource_type=='StationSite':
subset.append(site)
extended_org.station_sites = subset
station_status = []
if extended_org.station_sites:
for x in extended_org.station_sites:
if x:
station_status.append(statuses.get(x._id,{}).get("agg", DeviceStatusType.STATUS_UNKNOWN))
else:
station_status.append(DeviceStatusType.STATUS_UNKNOWN)
extended_org.computed.station_status = ComputedListValue(status=ComputedValueAvailability.PROVIDED, value=station_status)
else:
extended_org.computed.station_status = ComputedListValue(status=ComputedValueAvailability.NOTAVAILABLE)
comms_rollup = statuses.get(org_id,{}).get(AggregateStatusType.AGGREGATE_COMMS,DeviceStatusType.STATUS_UNKNOWN)
power_rollup = statuses.get(org_id,{}).get(AggregateStatusType.AGGREGATE_POWER,DeviceStatusType.STATUS_UNKNOWN)
data_rollup = statuses.get(org_id,{}).get(AggregateStatusType.AGGREGATE_DATA,DeviceStatusType.STATUS_UNKNOWN)
location_rollup = statuses.get(org_id,{}).get(AggregateStatusType.AGGREGATE_LOCATION,DeviceStatusType.STATUS_UNKNOWN)
extended_org.computed.communications_status_roll_up = ComputedIntValue(status=ComputedValueAvailability.PROVIDED, value=comms_rollup)
extended_org.computed.data_status_roll_up = ComputedIntValue(status=ComputedValueAvailability.PROVIDED, value=power_rollup)
extended_org.computed.location_status_roll_up = ComputedIntValue(status=ComputedValueAvailability.PROVIDED, value=data_rollup)
extended_org.computed.power_status_roll_up = ComputedIntValue(status=ComputedValueAvailability.PROVIDED, value=location_rollup)
dep_util = DeploymentUtil(self.container)
extended_org.deployment_info = dep_util.describe_deployments(extended_org.deployments,
status_map=statuses)
from ion.util.extresource import strip_resource_extension, get_matchers, matcher_DataProduct, matcher_DeviceModel, \
matcher_Device, matcher_UserInfo, matcher_MarineAsset
matchers = get_matchers([matcher_DataProduct, matcher_DeviceModel, matcher_Device, matcher_UserInfo, matcher_MarineAsset])
strip_resource_extension(extended_org, matchers=matchers)
return extended_org
def _get_root_platforms(self, RR2, platform_device_list):
# get all relevant assocation objects
filter_fn = lambda a: a.o in platform_device_list
# get child -> parent dict
lookup = dict([(a.o, a.s) for a in RR2.filter_cached_associations(PRED.hasDevice, filter_fn)])
# root platforms have no parent, or a parent that's not in our list
return [r for r in platform_device_list if (r not in lookup or (lookup[r] not in platform_device_list))]
# return a table of device statuses for all given device ids
def _get_master_status_table(self, RR2, site_tree_ids):
platformdevice_tree_ids = []
for s in site_tree_ids:
platformdevice_tree_ids += RR2.find_objects(s, PRED.hasDevice, RT.PlatformDevice, True)
plat_roots = self._get_root_platforms(RR2, platformdevice_tree_ids)
# build id -> aggstatus lookup table
master_status_table = {}
for plat_root_id in plat_roots:
agg_status, _ = self.agent_status_builder.get_cumulative_status_dict(plat_root_id)
if None is agg_status:
log.warn("Can't get agg status for platform %s, ignoring", plat_root_id)
else:
for k, v in agg_status.iteritems():
master_status_table[k] = v
return master_status_table
# based on ALL the site ids in this tree, return a site rollup list corresponding to each site in the site_id_list
def _get_site_rollup_list(self, RR2, master_status_table, site_id_list):
# get rollup for each site
master_status_rollup_list = []
for s in site_id_list:
#_, underlings = self.outil.get_child_sites(parent_site_id=s, id_only=True)
master_status_rollup_list.append(self.agent_status_builder._crush_status_dict(
self._get_site_rollup_dict(RR2, master_status_table, s)))
return master_status_rollup_list
# based on return a site rollup dict corresponding to a site in the site_id_list
def _get_site_rollup_dict(self, RR2, master_status_table, site_id):
outil = ObservatoryUtil(self, enhanced_rr=RR2)
attr1, underlings = outil.get_child_sites(parent_site_id=site_id, id_only=True)
def collect_all_children(site_id, child_site_struct, child_list):
#walk the tree of site children and put all site ids (all the way down the hierarchy) into one list
children = child_site_struct.get(site_id, [])
for child in children:
child_list.append(child)
#see if this child has children
more_children = child_site_struct.get(child, [])
if more_children:
collect_all_children(child, child_site_struct, child_list)
log.debug('collect_all_children child_list: %s', child_list)
child_list = list( set(child_list ) )
return child_list
site_aggregate = {}
all_site_ids = [site_id]
all_site_ids = collect_all_children(site_id, underlings, all_site_ids)
site_aggregate = {}
#all_site_ids = underlings.keys()
all_device_ids = []
for s in all_site_ids:
all_device_ids += RR2.find_objects(s, PRED.hasDevice, RT.PlatformDevice, True)
all_device_ids += RR2.find_objects(s, PRED.hasDevice, RT.InstrumentDevice, True)
log.debug("Calculating cumulative rollup values for all_device_ids = %s", all_device_ids)
for k, v in AggregateStatusType._str_map.iteritems():
aggtype_list = [master_status_table.get(d, {}).get(k, DeviceStatusType.STATUS_UNKNOWN) for d in all_device_ids]
log.debug("aggtype_list for %s is %s", v, zip(all_device_ids, aggtype_list))
site_aggregate[k] = self.agent_status_builder._crush_status_list(aggtype_list)
return site_aggregate
def _get_platform_rollup_list(self, RR2, master_status_table, platform_id_list):
finder = RelatedResourcesCrawler()
get_assns = finder.generate_related_resources_partial(RR2, [PRED.hasDevice])
full_crawllist = [RT.InstrumentDevice, RT.PlatformDevice]
search_down = get_assns({PRED.hasDevice: (True, False)}, full_crawllist)
# get rollup for each platform device
master_status_rollup_list = []
for p in platform_id_list:
# the searches return a list of association objects, so compile all the ids by extracting them
underlings = set([])
# we want only those IDs that are not the input resource id
for a in search_down(p, -1):
underlings.add(a.o)
underlings.add(p)
master_status_rollup_list.append(self.agent_status_builder._crush_status_list(
[self.agent_status_builder._crush_status_dict(master_status_table.get(k, {})) for k in underlings]
))
return master_status_rollup_list
def _convert_negotiations_to_requests(self, extended_marine_facility=None, negotiations=None):
assert isinstance(extended_marine_facility, MarineFacilityOrgExtension)
assert isinstance(negotiations, list)
#Get all associations for user info
assoc_list = self.clients.resource_registry.find_associations(predicate=PRED.hasInfo, id_only=False)
ret_list = []
followup_list = defaultdict(list)
for neg in negotiations:
request = IonObject(OT.OrgUserNegotiationRequest, ts_updated=neg.ts_updated, negotiation_id=neg._id,
negotiation_type=NegotiationTypeEnum._str_map[neg.negotiation_type],
negotiation_status=NegotiationStatusEnum._str_map[neg.negotiation_status],
originator=ProposalOriginatorEnum._str_map[neg.proposals[-1].originator],
request_type=neg.proposals[-1].type_,
description=neg.description, reason=neg.reason,
org_id=neg.proposals[-1].provider)
# since this is a proxy for the Negotiation object, simulate its id to help the UI deal with it
request._id = neg._id
actor_assoc = [ a for a in assoc_list if a.s == neg.proposals[-1].consumer ]
if actor_assoc:
member_assoc = [ m for m in extended_marine_facility.members if m._id == actor_assoc[0].o ]
if member_assoc:
request.user_id = member_assoc[0]._id
request.name = member_assoc[0].name
else:
followup_list[actor_assoc[0].o].append(request)
ret_list.append(request)
# assign names/user_ids to any requests that weren't in the members list, likely enroll requests
if len(followup_list):
user_infos = self.clients.resource_registry.read_mult(followup_list.keys())
udict = {}
for u in user_infos:
udict[u._id] = u
for k, v in followup_list.iteritems():
for request in v:
request.user_id = k
request.name = udict[k].name
return ret_list
def check_deployment_activation_policy(self, process, message, headers):
try:
gov_values = GovernanceHeaderValues(headers=headers, process=process, resource_id_required=False)
except Inconsistent, ex:
return False, ex.message
resource_id = message.get("deployment_id", None)
if not resource_id:
return False, '%s(%s) has been denied - no deployment_id argument provided' % (process.name, gov_values.op)
# Allow actor to activate/deactivate deployment in an org where the actor has the appropriate role
orgs,_ = self.clients.resource_registry.find_subjects(subject_type=RT.Org, predicate=PRED.hasResource, object=resource_id, id_only=False)
for org in orgs:
if (has_org_role(gov_values.actor_roles, org.org_governance_name, [ORG_MANAGER_ROLE, OBSERVATORY_OPERATOR])):
log.error("returning true: "+str(gov_values.actor_roles))
return True, ''
return False, '%s(%s) has been denied since the user is not a member in any org to which the deployment id %s belongs ' % (process.name, gov_values.op, resource_id)
| 50.097403
| 194
| 0.666514
|
e7cedf4188ce9ca58e72fa7f32495b90fd4d3032
| 13,448
|
py
|
Python
|
cinder/tests/unit/volume/drivers/dell_emc/powermax/test_powermax_fc.py
|
traghavendra/cinder-train
|
49af592c61da3216c04f5771b8ebf0927c5ce1c8
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/volume/drivers/dell_emc/powermax/test_powermax_fc.py
|
traghavendra/cinder-train
|
49af592c61da3216c04f5771b8ebf0927c5ce1c8
|
[
"Apache-2.0"
] | 28
|
2017-08-17T14:46:05.000Z
|
2022-03-29T12:42:12.000Z
|
cinder/tests/unit/volume/drivers/dell_emc/powermax/test_powermax_fc.py
|
alokchandra11/cinder
|
121d9f512b4a6d1afe6a690effb7c2b379040a7b
|
[
"Apache-2.0"
] | 3
|
2017-04-27T16:11:40.000Z
|
2020-02-12T21:27:00.000Z
|
# Copyright (c) 2017-2019 Dell Inc. or its subsidiaries.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from cinder import test
from cinder.tests.unit.volume.drivers.dell_emc.powermax import (
powermax_data as tpd)
from cinder.tests.unit.volume.drivers.dell_emc.powermax import (
powermax_fake_objects as tpfo)
from cinder.volume.drivers.dell_emc.powermax import common
from cinder.volume.drivers.dell_emc.powermax import fc
from cinder.volume.drivers.dell_emc.powermax import rest
from cinder.volume import volume_utils
from cinder.zonemanager import utils as fczm_utils
class PowerMaxFCTest(test.TestCase):
def setUp(self):
self.data = tpd.PowerMaxData()
super(PowerMaxFCTest, self).setUp()
volume_utils.get_max_over_subscription_ratio = mock.Mock()
self.configuration = tpfo.FakeConfiguration(
None, 'FCTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='smc',
san_api_port=8443, vmax_port_groups=[self.data.port_group_name_i])
rest.PowerMaxRest._establish_rest_session = mock.Mock(
return_value=tpfo.FakeRequestsSession())
driver = fc.PowerMaxFCDriver(configuration=self.configuration)
self.driver = driver
self.common = self.driver.common
self.masking = self.common.masking
self.utils = self.common.utils
self.utils.get_volumetype_extra_specs = (
mock.Mock(return_value=self.data.vol_type_extra_specs))
def test_create_volume(self):
with mock.patch.object(self.common, 'create_volume') as mock_create:
self.driver.create_volume(self.data.test_volume)
mock_create.assert_called_once_with(self.data.test_volume)
def test_create_volume_from_snapshot(self):
volume = self.data.test_clone_volume
snapshot = self.data.test_snapshot
with mock.patch.object(
self.common, 'create_volume_from_snapshot') as mock_create:
self.driver.create_volume_from_snapshot(volume, snapshot)
mock_create.assert_called_once_with(volume, snapshot)
def test_create_cloned_volume(self):
volume = self.data.test_clone_volume
src_volume = self.data.test_volume
with mock.patch.object(
self.common, 'create_cloned_volume') as mock_create:
self.driver.create_cloned_volume(volume, src_volume)
mock_create.assert_called_once_with(volume, src_volume)
def test_delete_volume(self):
with mock.patch.object(self.common, 'delete_volume') as mock_delete:
self.driver.delete_volume(self.data.test_volume)
mock_delete.assert_called_once_with(self.data.test_volume)
def test_create_snapshot(self):
with mock.patch.object(self.common, 'create_snapshot') as mock_create:
self.driver.create_snapshot(self.data.test_snapshot)
mock_create.assert_called_once_with(
self.data.test_snapshot, self.data.test_snapshot.volume)
def test_delete_snapshot(self):
with mock.patch.object(self.common, 'delete_snapshot') as mock_delete:
self.driver.delete_snapshot(self.data.test_snapshot)
mock_delete.assert_called_once_with(
self.data.test_snapshot, self.data.test_snapshot.volume)
def test_initialize_connection(self):
with mock.patch.object(
self.common, 'initialize_connection',
return_value=self.data.fc_device_info) as mock_initialize:
with mock.patch.object(
self.driver, 'populate_data') as mock_populate:
self.driver.initialize_connection(
self.data.test_volume, self.data.connector)
mock_initialize.assert_called_once_with(
self.data.test_volume, self.data.connector)
mock_populate.assert_called_once_with(
self.data.fc_device_info, self.data.test_volume,
self.data.connector)
def test_populate_data(self):
with mock.patch.object(self.driver, '_build_initiator_target_map',
return_value=([], {})) as mock_build:
ref_data = {
'driver_volume_type': 'fibre_channel',
'data': {'target_lun': self.data.fc_device_info['hostlunid'],
'target_discovered': True,
'target_wwn': [],
'initiator_target_map': {}}}
data = self.driver.populate_data(self.data.fc_device_info,
self.data.test_volume,
self.data.connector)
self.assertEqual(ref_data, data)
mock_build.assert_called_once_with(
self.data.test_volume, self.data.connector)
def test_terminate_connection(self):
with mock.patch.object(
self.common, 'terminate_connection') as mock_terminate:
self.driver.terminate_connection(
self.data.test_volume, self.data.connector)
mock_terminate.assert_called_once_with(
self.data.test_volume, self.data.connector)
def test_terminate_connection_no_zoning_mappings(self):
with mock.patch.object(self.driver, '_get_zoning_mappings',
return_value=None):
with mock.patch.object(
self.common, 'terminate_connection') as mock_terminate:
self.driver.terminate_connection(self.data.test_volume,
self.data.connector)
mock_terminate.assert_not_called()
def test_get_zoning_mappings(self):
ref_mappings = self.data.zoning_mappings
zoning_mappings = self.driver._get_zoning_mappings(
self.data.test_volume, self.data.connector)
self.assertEqual(ref_mappings, zoning_mappings)
# Legacy vol
zoning_mappings2 = self.driver._get_zoning_mappings(
self.data.test_legacy_vol, self.data.connector)
self.assertEqual(ref_mappings, zoning_mappings2)
def test_get_zoning_mappings_no_mv(self):
with mock.patch.object(self.common, 'get_masking_views_from_volume',
return_value=(None, False)):
zoning_mappings = self.driver._get_zoning_mappings(
self.data.test_volume, self.data.connector)
self.assertEqual({}, zoning_mappings)
@mock.patch.object(
common.PowerMaxCommon, 'get_masking_views_from_volume',
return_value=([tpd.PowerMaxData.masking_view_name_f], True))
def test_get_zoning_mappings_metro(self, mock_mv):
ref_mappings = self.data.zoning_mappings_metro
zoning_mappings = self.driver._get_zoning_mappings(
self.data.test_volume, self.data.connector)
self.assertEqual(ref_mappings, zoning_mappings)
def test_cleanup_zones_other_vols_mapped(self):
ref_data = {'driver_volume_type': 'fibre_channel',
'data': {}}
data = self.driver._cleanup_zones(self.data.zoning_mappings)
self.assertEqual(ref_data, data)
def test_cleanup_zones_no_vols_mapped(self):
zoning_mappings = self.data.zoning_mappings
ref_data = {'driver_volume_type': 'fibre_channel',
'data': {'target_wwn': zoning_mappings['target_wwns'],
'initiator_target_map':
zoning_mappings['init_targ_map']}}
with mock.patch.object(self.common, 'get_common_masking_views',
return_value=[]):
data = self.driver._cleanup_zones(self.data.zoning_mappings)
self.assertEqual(ref_data, data)
def test_build_initiator_target_map(self):
ref_target_map = {'123456789012345': ['543210987654321'],
'123456789054321': ['123450987654321']}
with mock.patch.object(fczm_utils, 'create_lookup_service',
return_value=tpfo.FakeLookupService()):
driver = fc.PowerMaxFCDriver(configuration=self.configuration)
with mock.patch.object(driver.common,
'get_target_wwns_from_masking_view',
return_value=(self.data.target_wwns, [])):
targets, target_map = driver._build_initiator_target_map(
self.data.test_volume, self.data.connector)
self.assertEqual(ref_target_map, target_map)
def test_extend_volume(self):
with mock.patch.object(self.common, 'extend_volume') as mock_extend:
self.driver.extend_volume(self.data.test_volume, '3')
mock_extend.assert_called_once_with(self.data.test_volume, '3')
def test_get_volume_stats(self):
with mock.patch.object(
self.driver, 'update_volume_stats') as mock_update:
# no refresh
self.driver.get_volume_stats()
mock_update.assert_not_called()
# with refresh
self.driver.get_volume_stats(True)
mock_update.assert_called_once_with()
def test_update_volume_stats(self):
with mock.patch.object(self.common, 'update_volume_stats',
return_value={}) as mock_update:
self.driver.update_volume_stats()
mock_update.assert_called_once_with()
def test_check_for_setup_error(self):
self.driver.check_for_setup_error()
def test_ensure_export(self):
self.driver.ensure_export('context', 'volume')
def test_create_export(self):
self.driver.create_export('context', 'volume', 'connector')
def test_remove_export(self):
self.driver.remove_export('context', 'volume')
def test_check_for_export(self):
self.driver.check_for_export('context', 'volume_id')
def test_manage_existing(self):
with mock.patch.object(self.common, 'manage_existing',
return_value={}) as mock_manage:
external_ref = {u'source-name': u'00002'}
self.driver.manage_existing(self.data.test_volume, external_ref)
mock_manage.assert_called_once_with(
self.data.test_volume, external_ref)
def test_manage_existing_get_size(self):
with mock.patch.object(self.common, 'manage_existing_get_size',
return_value='1') as mock_manage:
external_ref = {u'source-name': u'00002'}
self.driver.manage_existing_get_size(
self.data.test_volume, external_ref)
mock_manage.assert_called_once_with(
self.data.test_volume, external_ref)
def test_unmanage_volume(self):
with mock.patch.object(self.common, 'unmanage',
return_value={}) as mock_unmanage:
self.driver.unmanage(self.data.test_volume)
mock_unmanage.assert_called_once_with(
self.data.test_volume)
def test_retype(self):
host = {'host': self.data.new_host}
new_type = {'extra_specs': {}}
with mock.patch.object(self.common, 'retype',
return_value=True) as mck_retype:
self.driver.retype({}, self.data.test_volume, new_type, '', host)
mck_retype.assert_called_once_with(
self.data.test_volume, new_type, host)
def test_failover_host(self):
with mock.patch.object(
self.common, 'failover_host',
return_value=(self.data.remote_array, [], [])) as mock_fo:
self.driver.failover_host(self.data.ctx, [self.data.test_volume])
mock_fo.assert_called_once_with([self.data.test_volume], None,
None)
def test_enable_replication(self):
with mock.patch.object(
self.common, 'enable_replication') as mock_er:
self.driver.enable_replication(
self.data.ctx, self.data.test_group, [self.data.test_volume])
mock_er.assert_called_once()
def test_disable_replication(self):
with mock.patch.object(
self.common, 'disable_replication') as mock_dr:
self.driver.disable_replication(
self.data.ctx, self.data.test_group, [self.data.test_volume])
mock_dr.assert_called_once()
def test_failover_replication(self):
with mock.patch.object(
self.common, 'failover_replication') as mock_fo:
self.driver.failover_replication(
self.data.ctx, self.data.test_group, [self.data.test_volume])
mock_fo.assert_called_once()
| 46.857143
| 78
| 0.643516
|
dedc0a7bd10477c179469843e8d6a3702dbb7a01
| 1,795
|
py
|
Python
|
matching_test.py
|
MichaelChuai/modelzoo
|
4744f555784873192168c65e2c10b49b982cb1f1
|
[
"Apache-2.0"
] | null | null | null |
matching_test.py
|
MichaelChuai/modelzoo
|
4744f555784873192168c65e2c10b49b982cb1f1
|
[
"Apache-2.0"
] | null | null | null |
matching_test.py
|
MichaelChuai/modelzoo
|
4744f555784873192168c65e2c10b49b982cb1f1
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import torch
from meta.matching import *
import dlutil as dl
shots = 2
ways = 5
G = Embedding(1, 10)
context_embedding_network = MatchingNetwork.build_context_embedding_network(10, 64, 1)
network = MatchingNetwork(G, context_embedding_network, ways).cuda(0)
optimizer = torch.optim.Adam(network.parameters())
ckpt = dl.Checkpoint('results/matching/omniglot1', max_to_keep=10, device=0, save_best_only=True, saving_metric='test_acc')
acc = dl.MetricAccuracy(name='acc', device=0)
root = '/data/examples/omniglot'
batch_size = 32
def trans(bxs, bys):
bx = bxs[0]
by = bys[0]
bx = bx.astype(np.float32) / 255.
bx = np.expand_dims(bx, axis=1)
by = np.squeeze(by.astype(np.int64))
classes = sorted(list(set(by.tolist())))
for i, c in enumerate(classes):
by[by==c] = i
inp_x = bx[:ways]
sup_x = bx[ways:]
inp_y = by[:ways]
sup_y = by[ways:]
bxs = [inp_x, sup_x, sup_y]
bys = inp_y
return (bxs, bys)
train_file = f'{root}/omniglot_bg.h5'
dstr = dl.DataReader(train_file, num_workers=5, transform_func=trans)
gntr = dstr.few_shot_reader(batch_size, shots+1, ways)
test_file = f'{root}/omniglot_eval.h5'
dste = dl.DataReader(test_file, num_workers=5, transform_func=trans)
gnte = dste.few_shot_seq_reader(batch_size * 2, shots=shots+1, selected_classes=[0,1,2,3,4])
gnte1 = dste.few_shot_seq_reader(batch_size * 2, shots=shots+1, selected_classes=[5,6,7,8,9])
listeners = [dl.Listener('test', gnte, [acc]), dl.Listener('test1', gnte1, [acc])]
def loss_func(y_, y):
return nn.CrossEntropyLoss()(y_.transpose(-2, -1), y)
dlmodel = dl.DlModel(network, ckpt)
dlmodel.train(gntr, loss_func, optimizer, total_steps=200000, ckpt_steps=100, summ_steps=100, metrics=[acc], listeners=listeners, from_scratch=True)
| 32.053571
| 148
| 0.708635
|
4ac1d035a027da9a558d8a416dfeb227c1134d7b
| 4,424
|
py
|
Python
|
boise/mnist_multiple_exploration_var_amp_v6.py
|
nikhil-garg/VDSP_ocl
|
906867f8cd8a899a1ce309c5ec843fa1ce865373
|
[
"MIT"
] | null | null | null |
boise/mnist_multiple_exploration_var_amp_v6.py
|
nikhil-garg/VDSP_ocl
|
906867f8cd8a899a1ce309c5ec843fa1ce865373
|
[
"MIT"
] | null | null | null |
boise/mnist_multiple_exploration_var_amp_v6.py
|
nikhil-garg/VDSP_ocl
|
906867f8cd8a899a1ce309c5ec843fa1ce865373
|
[
"MIT"
] | 1
|
2021-03-17T20:04:08.000Z
|
2021-03-17T20:04:08.000Z
|
import itertools
import random
import logging
import numpy as np
import matplotlib.pyplot as plt
import os
from mnist_vdsp_multiple_var_amp import *
from utilis import *
from args_mnist import args as my_args
# from ax import optimize
import pandas as pd
from itertools import product
import time
if __name__ == '__main__':
args = my_args()
print(args.__dict__)
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
# Fix the seed of all random number generator
seed = 50
random.seed(seed)
np.random.seed(seed)
df = pd.DataFrame({ "vprog":[],
"input_nbr":[],
"tau_in" :[],
"tau_out":[],
"gain_in":[],
"gain_out":[],
"bias_in":[],
"bias_out":[],
"thr_out":[],
"inhibition_time":[],
"lr":[],
"presentation_time":[],
"amp_var":[],
"seed":[],
"accuracy":[],
})
if args.log_file_path is None:
pwd = os.getcwd()
log_dir = pwd+'/log_dir/'
else :
log_dir = args.log_file_path
df.to_csv(log_dir+'test.csv', index=False)
parameters = dict(
vprog = [-0.75]
,input_nbr=[60000]
,tau_in = [0.06]
,tau_out = [0.06]
,gain_in = [2]
,gain_out = [2]
,bias_in = [0]
,bias_out = [0]
,thr_out = [1]
,inhibition_time = [10]
, lr = [0.1]
, presentation_time = [0.35]
, amp_var = [0.8,0.9,1,1.1]
, seed = [700]
)
param_values = [v for v in parameters.values()]
now = time.strftime("%Y%m%d-%H%M%S")
folder = os.getcwd()+"/MNIST_VDSP_explorartion"+now
os.mkdir(folder)
for args.vprog,args.input_nbr,args.tau_in,args.tau_out,args.gain_in,args.gain_out,args.bias_in,args.bias_out,args.thr_out,args.inhibition_time,args.lr,args.presentation_time,args.amp_var,args.seed in product(*param_values):
# args.filename = 'vprog-'+str(args.vprog)+'-g_max-'+str(args.g_max)+'-tau_in-'+str(args.tau_in)+'-tau_out-'+str(args.tau_out)+'-lr-'+str(args.lr)+'-presentation_time-'+str(args.presentation_time)
timestr = time.strftime("%Y%m%d-%H%M%S")
log_file_name = 'accuracy_log'+str(timestr)+'.csv'
pwd = os.getcwd()
# args.vthn = args.vthp
accuracy, weights = evaluate_mnist_multiple_var_amp(args)
df = df.append({ "vprog":args.vprog,
"input_nbr":args.input_nbr,
"tau_in":args.tau_in,
"tau_out": args.tau_out,
"gain_in":args.gain_in,
"gain_out":args.gain_out,
"bias_in":args.bias_in,
"bias_out":args.bias_out,
"thr_out":args.thr_out,
"inhibition_time":args.inhibition_time,
"lr": args.lr,
"presentation_time":args.presentation_time,
"amp_var":args.amp_var,
"seed":args.seed,
"accuracy":accuracy
},ignore_index=True)
plot = False
if plot :
print('accuracy', accuracy)
print(args.filename)
# weights = weights[-1]#Taking only the last weight for plotting
columns = int(args.n_neurons/5)
fig, axes = plt.subplots(int(args.n_neurons/columns), int(columns), figsize=(20,25))
for i in range(0,(args.n_neurons)):
axes[int(i/columns)][int(i%columns)].matshow(np.reshape(weights[i],(28,28)),interpolation='nearest', vmax=1, vmin=0)
plt.tight_layout()
# fig, axes = plt.subplots(1,1, figsize=(3,3))
# fig = plt.figure()
# ax1 = fig.add_subplot()
# cax = ax1.matshow(np.reshape(weights[0],(28,28)),interpolation='nearest', vmax=1, vmin=0)
# fig.colorbar(cax)
# plt.tight_layout()
fig.savefig(folder+'/weights'+str(args.filename)+'.png')
plt.close()
# plt.figure(figsize=(12,10))
# plt.subplot(2, 1, 1)
# plt.title('Input neurons')
# rasterplot(time_points, p_input_layer)
# plt.xlabel("Time [s]")
# plt.ylabel("Neuron index")
# plt.subplot(2, 1, 2)
# plt.title('Output neurons')
# rasterplot(time_points, p_layer_1)
# plt.xlabel("Time [s]")
# plt.ylabel("Neuron index")
# plt.tight_layout()
# plt.savefig(folder+'/raster'+str(args.filename)+'.png')
timestr = time.strftime("%Y%m%d-%H%M%S")
log_file_name = 'accuracy_log'+'.csv'
pwd = os.getcwd()
if args.log_file_path is None:
log_dir = pwd+'/log_dir/'
else :
log_dir = args.log_file_path
df.to_csv(log_dir+log_file_name, index=False)
df.to_csv(log_file_name, index=False)
logger.info('All done.')
| 26.650602
| 224
| 0.617315
|
0b8f3854b111c1f6239aac81eeb1b554f9117a32
| 14,870
|
py
|
Python
|
nssrc/com/citrix/netscaler/nitro/resource/config/cmp/cmppolicy.py
|
mahabs/nitro
|
be74e1e177f5c205c16126bc9b023f2348788409
|
[
"Apache-2.0"
] | null | null | null |
nssrc/com/citrix/netscaler/nitro/resource/config/cmp/cmppolicy.py
|
mahabs/nitro
|
be74e1e177f5c205c16126bc9b023f2348788409
|
[
"Apache-2.0"
] | null | null | null |
nssrc/com/citrix/netscaler/nitro/resource/config/cmp/cmppolicy.py
|
mahabs/nitro
|
be74e1e177f5c205c16126bc9b023f2348788409
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class cmppolicy(base_resource) :
""" Configuration for compression policy resource. """
def __init__(self) :
self._name = ""
self._rule = ""
self._resaction = ""
self._newname = ""
self._expressiontype = ""
self._reqaction = ""
self._hits = 0
self._txbytes = 0
self._rxbytes = 0
self._clientttlb = 0
self._clienttransactions = 0
self._serverttlb = 0
self._servertransactions = 0
self._description = ""
self._builtin = []
self._isdefault = False
self.___count = 0
@property
def name(self) :
"""Name of the HTTP compression policy. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.
Can be changed after the policy is created.
The following requirement applies only to the NetScaler CLI:
If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my cmp policy" or 'my cmp policy').<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name of the HTTP compression policy. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.
Can be changed after the policy is created.
The following requirement applies only to the NetScaler CLI:
If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my cmp policy" or 'my cmp policy').<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def rule(self) :
"""Expression that determines which HTTP requests or responses match the compression policy. Can be a classic expression or a default-syntax expression.
Note:
Maximum length of a string literal in the expression is 255 characters. A longer string can be split into smaller strings of up to 255 characters each, and the smaller strings concatenated with the + operator. For example, you can create a 500-character string as follows: '"<string of 255 characters>" + "<string of 245 characters>"'
The following requirements apply only to the NetScaler CLI:
* If the expression includes one or more spaces, enclose the entire expression in double quotation marks.
* If the expression itself includes double quotation marks, escape the quotations by using the \ character.
* Alternatively, you can use single quotation marks to enclose the rule, in which case you do not have to escape the double quotation marks.
"""
try :
return self._rule
except Exception as e:
raise e
@rule.setter
def rule(self, rule) :
"""Expression that determines which HTTP requests or responses match the compression policy. Can be a classic expression or a default-syntax expression.
Note:
Maximum length of a string literal in the expression is 255 characters. A longer string can be split into smaller strings of up to 255 characters each, and the smaller strings concatenated with the + operator. For example, you can create a 500-character string as follows: '"<string of 255 characters>" + "<string of 245 characters>"'
The following requirements apply only to the NetScaler CLI:
* If the expression includes one or more spaces, enclose the entire expression in double quotation marks.
* If the expression itself includes double quotation marks, escape the quotations by using the \ character.
* Alternatively, you can use single quotation marks to enclose the rule, in which case you do not have to escape the double quotation marks.
"""
try :
self._rule = rule
except Exception as e:
raise e
@property
def resaction(self) :
"""The built-in or user-defined compression action to apply to the response when the policy matches a request or response.<br/>Minimum length = 1.
"""
try :
return self._resaction
except Exception as e:
raise e
@resaction.setter
def resaction(self, resaction) :
"""The built-in or user-defined compression action to apply to the response when the policy matches a request or response.<br/>Minimum length = 1
"""
try :
self._resaction = resaction
except Exception as e:
raise e
@property
def newname(self) :
"""New name for the compression policy. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.
Choose a name that reflects the function that the policy performs.
The following requirement applies only to the NetScaler CLI:
If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my cmp policy" or 'my cmp policy').<br/>Minimum length = 1.
"""
try :
return self._newname
except Exception as e:
raise e
@newname.setter
def newname(self, newname) :
"""New name for the compression policy. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.
Choose a name that reflects the function that the policy performs.
The following requirement applies only to the NetScaler CLI:
If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my cmp policy" or 'my cmp policy').<br/>Minimum length = 1
"""
try :
self._newname = newname
except Exception as e:
raise e
@property
def expressiontype(self) :
"""Type of policy (Classic/Advanced) .<br/>Possible values = Classic Policy, Advanced Policy.
"""
try :
return self._expressiontype
except Exception as e:
raise e
@property
def reqaction(self) :
"""The compression action to be performed on requests.
"""
try :
return self._reqaction
except Exception as e:
raise e
@property
def hits(self) :
"""Number of hits.
"""
try :
return self._hits
except Exception as e:
raise e
@property
def txbytes(self) :
"""Number of bytes transferred.
"""
try :
return self._txbytes
except Exception as e:
raise e
@property
def rxbytes(self) :
"""Number of bytes received.
"""
try :
return self._rxbytes
except Exception as e:
raise e
@property
def clientttlb(self) :
"""Total client TTLB value.
"""
try :
return self._clientttlb
except Exception as e:
raise e
@property
def clienttransactions(self) :
"""Number of client transactions.
"""
try :
return self._clienttransactions
except Exception as e:
raise e
@property
def serverttlb(self) :
"""Total server TTLB value.
"""
try :
return self._serverttlb
except Exception as e:
raise e
@property
def servertransactions(self) :
"""Number of server transactions.
"""
try :
return self._servertransactions
except Exception as e:
raise e
@property
def description(self) :
"""Description of the policy.
"""
try :
return self._description
except Exception as e:
raise e
@property
def builtin(self) :
"""Flag to determine if compression policy is builtin or not.<br/>Possible values = MODIFIABLE, DELETABLE, IMMUTABLE, PARTITION_ALL.
"""
try :
return self._builtin
except Exception as e:
raise e
@property
def isdefault(self) :
"""A value of true is returned if it is a default policy.
"""
try :
return self._isdefault
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(cmppolicy_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.cmppolicy
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
""" Use this API to add cmppolicy.
"""
try :
if type(resource) is not list :
addresource = cmppolicy()
addresource.name = resource.name
addresource.rule = resource.rule
addresource.resaction = resource.resaction
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ cmppolicy() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].name = resource[i].name
addresources[i].rule = resource[i].rule
addresources[i].resaction = resource[i].resaction
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
""" Use this API to delete cmppolicy.
"""
try :
if type(resource) is not list :
deleteresource = cmppolicy()
if type(resource) != type(deleteresource):
deleteresource.name = resource
else :
deleteresource.name = resource.name
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ cmppolicy() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ cmppolicy() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
""" Use this API to update cmppolicy.
"""
try :
if type(resource) is not list :
updateresource = cmppolicy()
updateresource.name = resource.name
updateresource.rule = resource.rule
updateresource.resaction = resource.resaction
return updateresource.update_resource(client)
else :
if (resource and len(resource) > 0) :
updateresources = [ cmppolicy() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].rule = resource[i].rule
updateresources[i].resaction = resource[i].resaction
result = cls.update_bulk_request(client, updateresources)
return result
except Exception as e :
raise e
@classmethod
def rename(cls, client, resource, new_name) :
""" Use this API to rename a cmppolicy resource.
"""
try :
renameresource = cmppolicy()
if type(resource) == cls :
renameresource.name = resource.name
else :
renameresource.name = resource
return renameresource.rename_resource(client,new_name)
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
""" Use this API to fetch all the cmppolicy resources that are configured on netscaler.
"""
try :
if not name :
obj = cmppolicy()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = cmppolicy()
obj.name = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [cmppolicy() for _ in range(len(name))]
obj = [cmppolicy() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = cmppolicy()
obj[i].name = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
""" Use this API to fetch filtered set of cmppolicy resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = cmppolicy()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
""" Use this API to count the cmppolicy resources configured on NetScaler.
"""
try :
obj = cmppolicy()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
""" Use this API to count filtered the set of cmppolicy resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = cmppolicy()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class Expressiontype:
Classic_Policy = "Classic Policy"
Advanced_Policy = "Advanced Policy"
class Builtin:
MODIFIABLE = "MODIFIABLE"
DELETABLE = "DELETABLE"
IMMUTABLE = "IMMUTABLE"
PARTITION_ALL = "PARTITION_ALL"
class cmppolicy_response(base_response) :
def __init__(self, length=1) :
self.cmppolicy = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.cmppolicy = [cmppolicy() for _ in range(length)]
| 32.326087
| 336
| 0.70195
|
c61e24c422bd328c7616aff0909249e3bea1024c
| 5,684
|
py
|
Python
|
topic_analysis/feature_extraction/__init__.py
|
surajiyer/topic-analysis
|
80dff3be2077f4eb859472651ad80653adaaaa26
|
[
"MIT"
] | null | null | null |
topic_analysis/feature_extraction/__init__.py
|
surajiyer/topic-analysis
|
80dff3be2077f4eb859472651ad80653adaaaa26
|
[
"MIT"
] | null | null | null |
topic_analysis/feature_extraction/__init__.py
|
surajiyer/topic-analysis
|
80dff3be2077f4eb859472651ad80653adaaaa26
|
[
"MIT"
] | null | null | null |
from .bngrams import BngramsVectorizer
from .noun_phrases import NounPhraseMatcher
import numpy as np
import pandas as pd
import psutil
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.utils.sparsefuncs import min_max_axis, _get_median, csc_median_axis_0
from topic_analysis.utils import _logging
import warnings
# Global variables
_default_n_jobs = 1
_default_batch_size = 1000
logger = _logging.get_logger()
def get_ranked_phrases(
nlp, raw_documents, timestamps=None, *, include_verb_phrases=False,
minlen=1, maxlen=8, n_jobs=_default_n_jobs, batch_size=_default_batch_size,
stop_phrases=[], vectorizer='bngram', aggfunc='sum', **vectorizer_kws):
"""
Get phrases ranked by either TF-IDF (importance) score or BNgram (novelty) score.
Parameters
----------
nlp : spacy.language.Language
Spacy language model
raw_documents : Iterable[str]
An iterable which yields either str objects.
timestamps : Iterable[str]
timestamp of the documents. An iterable which
yields datetime objects. Only used when
`vectorizer='bngram'`.
include_verb_phrases : bool, default=False
Indicator to include verb phrases also.
minlen : int, default=1
Minimum length of extracted multi-word phrases.
Used for tokenizing the text.
maxlen : int, default=8
Maximum length of extracted multi-word phrases.
Used for tokenizing the text.
n_jobs : int, default=-1
Number of processes to get noun phrases in parallel
from documents.
* -1: Use one process per available CPU cores
* >0: Use `n_jobs` processes
batch_size : int, default=1000
Batch size for tokenizing, tagging and extracting
noun phrases. Use smaller batch sizes on large
number of large texts and vice-versa.
stop_phrases : List[str], default=[]
List of phrases to remove.
vectorizer : str, default='bngram'
One of ('bngram', 'tfidf').
aggfunc : Union[str, callable, NoneType], default='sum'
Function to aggregate over the scores per document
for a single phrase to rank. One of ('sum', 'mean',
'max', 'median', 'median_ignore_0', callable that
accepts sparse matrix, None). If None, this function
will return the vectorized documents and the vectorizer
directly.
vectorizer_kws : dict
Keyword arguments for TfidfVectorizer
Returns
-------
ranked_phrases : Union[pandas.DataFrame, Tuple[array[N, M], vectorizer]]
If aggfunc is not None, returns the dataframe with the extracted
n-gram / phrase and sorted descending by the aggregated bngram /
td-idf scores, else returns the vectorized documents (where
N=len(raw_documents) and M=len(phrases)) and the vectorizer object,
"""
assert vectorizer in ('bngram', 'tfidf')
stop_phrases = set(stop_phrases)
# get candidate phrases
nlp.add_pipe(NounPhraseMatcher(
lowercase=True, lemmatize=True,
include_verb_phrases=include_verb_phrases,
minlen=minlen, maxlen=maxlen))
# extract phrases
def process_chunk(texts):
return list(nlp.pipe(texts))
logger.info('Tokenizing, tagging and extracting noun phrases '
'per documents with spacy')
n_jobs = psutil.cpu_count(logical=False)\
if n_jobs == -1 else n_jobs
raw_documents = list(nlp.pipe(
raw_documents, batch_size=batch_size, n_process=n_jobs))
# vectorize the texts
if 'norm' in vectorizer_kws and aggfunc is not None:
warnings.warn("'vectorizer_kws' should not contain 'norm'. "
"'vectorizer_kws['norm']' will be replaced.", UserWarning)
vectorizer_kws['norm'] = None
if 'analyzer' in vectorizer_kws:
warnings.warn("'vectorizer_kws' should not contain 'analyzer'. "
"'vectorizer_kws['analyzer']' will be replaced.", UserWarning)
vectorizer_kws['analyzer'] = lambda doc: [p for p in doc._.noun_phrases if p not in stop_phrases]
if vectorizer == 'bngram':
if timestamps is None:
raise ValueError('Parameter `timestamps` cannot be None if `vectorizer=bngram`.')
vectorizer = BngramsVectorizer(**vectorizer_kws)
logger.info('Vectorizing documents with BNgrams')
X = vectorizer.fit_transform(raw_documents, timestamps)
elif vectorizer == 'tfidf':
vectorizer = TfidfVectorizer(**vectorizer_kws)
logger.info('Vectorizing documents with TF-IDF')
X = vectorizer.fit_transform(raw_documents)
else:
raise ValueError(f'Unknown vectorizer={vectorizer} given.')
logger.info('Scoring phrases')
if aggfunc == 'sum':
scores = np.array(X.tocsc().sum(0))[0]
elif aggfunc == 'mean':
scores = np.array(X.tocsc().mean(0))[0]
elif aggfunc == 'max':
scores = min_max_axis(X.tocsc(), axis=0, ignore_nan=True)[1]
elif aggfunc == 'median':
scores = csc_median_axis_0(X.tocsc())
elif aggfunc == 'median_ignore_0':
scores = _get_median(X.tocsc(), 0)
elif callable(aggfunc):
scores = aggfunc(X.tocsc())
elif aggfunc is None:
return X, vectorizer
else:
raise ValueError(f'Unknown method: {aggfunc}')
logger.info('Rank phrases based on score')
ranked_phrases = pd.DataFrame(
list(zip(vectorizer.get_feature_names(), scores)),
columns=['phrase', 'score'])
ranked_phrases = ranked_phrases\
.sort_values('score', ascending=False)\
.reset_index(drop=True)
return ranked_phrases
| 36.909091
| 101
| 0.669775
|
5aaa6bca7099437d39ca0c5eab091d3cb1eb3f31
| 5,124
|
py
|
Python
|
meiduo/meiduo/apps/goods/models.py
|
q934744153/meiduo_mall
|
6f5b0ae38cc80c21ddca161c428fea09584d4b95
|
[
"MIT"
] | null | null | null |
meiduo/meiduo/apps/goods/models.py
|
q934744153/meiduo_mall
|
6f5b0ae38cc80c21ddca161c428fea09584d4b95
|
[
"MIT"
] | null | null | null |
meiduo/meiduo/apps/goods/models.py
|
q934744153/meiduo_mall
|
6f5b0ae38cc80c21ddca161c428fea09584d4b95
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
from django.db import models
from meiduo.utils.models import BaseModel
# Create your models here.
class GoodsCategory(BaseModel):
"""商品类别"""
name = models.CharField(max_length=10, verbose_name='名称')
parent = models.ForeignKey('self', related_name='subs', null=True, blank=True, on_delete=models.CASCADE, verbose_name='父类别')
class Meta:
db_table = 'tb_goods_category'
def __str__(self):
return self.name
class GoodsChannelGroup(BaseModel):
"""商品频道组"""
name = models.CharField(max_length=20, verbose_name='频道组名')
class Meta:
db_table = 'tb_channel_group'
def __str__(self):
return self.name
class GoodsChannel(BaseModel):
"""商品频道"""
group = models.ForeignKey(GoodsChannelGroup, on_delete=models.CASCADE, verbose_name='频道组名')
category = models.ForeignKey(GoodsCategory, on_delete=models.CASCADE, verbose_name='顶级商品类别')
url = models.CharField(max_length=50, verbose_name='频道页面链接')
sequence = models.IntegerField(verbose_name='组内顺序')
class Meta:
db_table = 'tb_goods_channel'
def __str__(self):
return self.category.name
class Brand(BaseModel):
"""品牌"""
name = models.CharField(max_length=20, verbose_name='名称')
logo = models.ImageField(verbose_name='Logo图片')
first_letter = models.CharField(max_length=1, verbose_name='品牌首字母')
class Meta:
db_table = 'tb_brand'
def __str__(self):
return self.name
class SPU(BaseModel):
"""商品SPU"""
name = models.CharField(max_length=50, verbose_name='名称')
brand = models.ForeignKey(Brand, on_delete=models.PROTECT, verbose_name='品牌')
category1 = models.ForeignKey(GoodsCategory, on_delete=models.PROTECT, related_name='cat1_spu', verbose_name='一级类别')
category2 = models.ForeignKey(GoodsCategory, on_delete=models.PROTECT, related_name='cat2_spu', verbose_name='二级类别')
category3 = models.ForeignKey(GoodsCategory, on_delete=models.PROTECT, related_name='cat3_spu', verbose_name='三级类别')
sales = models.IntegerField(default=0, verbose_name='销量')
comments = models.IntegerField(default=0, verbose_name='评价数')
desc_detail = models.TextField(default='', verbose_name='详细介绍')
desc_pack = models.TextField(default='', verbose_name='包装信息')
desc_service = models.TextField(default='', verbose_name='售后服务')
class Meta:
db_table = 'tb_spu'
def __str__(self):
return self.name
class SKU(BaseModel):
"""商品SKU"""
name = models.CharField(max_length=50, verbose_name='名称')
caption = models.CharField(max_length=100, verbose_name='副标题')
spu = models.ForeignKey(SPU, on_delete=models.CASCADE, verbose_name='商品')
category = models.ForeignKey(GoodsCategory, on_delete=models.PROTECT, verbose_name='从属类别')
price = models.DecimalField(max_digits=10, decimal_places=2, verbose_name='单价')
cost_price = models.DecimalField(max_digits=10, decimal_places=2, verbose_name='进价')
market_price = models.DecimalField(max_digits=10, decimal_places=2, verbose_name='市场价')
stock = models.IntegerField(default=0, verbose_name='库存')
sales = models.IntegerField(default=0, verbose_name='销量')
comments = models.IntegerField(default=0, verbose_name='评价数')
is_launched = models.BooleanField(default=True, verbose_name='是否上架销售')
default_image = models.ImageField(max_length=200, default='', null=True, blank=True, verbose_name='默认图片')
class Meta:
db_table = 'tb_sku'
def __str__(self):
return '%s: %s' % (self.id, self.name)
class SKUImage(BaseModel):
"""SKU图片"""
sku = models.ForeignKey(SKU, on_delete=models.CASCADE, verbose_name='sku')
image = models.ImageField(verbose_name='图片')
class Meta:
db_table = 'tb_sku_image'
def __str__(self):
return '%s %s' % (self.sku.name, self.id)
class SPUSpecification(BaseModel):
"""商品SPU规格"""
spu = models.ForeignKey(SPU, on_delete=models.CASCADE, related_name='specs', verbose_name='商品SPU')
name = models.CharField(max_length=20, verbose_name='规格名称')
class Meta:
db_table = 'tb_spu_specification'
def __str__(self):
return '%s: %s' % (self.spu.name, self.name)
class SpecificationOption(BaseModel):
"""规格选项"""
spec = models.ForeignKey(SPUSpecification, related_name='options', on_delete=models.CASCADE, verbose_name='规格')
value = models.CharField(max_length=20, verbose_name='选项值')
class Meta:
db_table = 'tb_specification_option'
def __str__(self):
return '%s - %s' % (self.spec, self.value)
class SKUSpecification(BaseModel):
"""SKU具体规格"""
sku = models.ForeignKey(SKU, related_name='specs', on_delete=models.CASCADE, verbose_name='sku')
spec = models.ForeignKey(SPUSpecification, on_delete=models.PROTECT, verbose_name='规格名称')
option = models.ForeignKey(SpecificationOption, on_delete=models.PROTECT, verbose_name='规格值')
class Meta:
db_table = 'tb_sku_specification'
def __str__(self):
return '%s: %s - %s' % (self.sku, self.spec.name, self.option.value)
| 34.621622
| 128
| 0.702381
|
c3224a2d754f30d6f1a074ff381603106d8a7c90
| 783
|
py
|
Python
|
pythonfuzz/dictionnary.py
|
MJ-SEO/py_fuzz
|
789fbfea21bf644ba4d00554fe4141694b0a190a
|
[
"Apache-2.0"
] | null | null | null |
pythonfuzz/dictionnary.py
|
MJ-SEO/py_fuzz
|
789fbfea21bf644ba4d00554fe4141694b0a190a
|
[
"Apache-2.0"
] | null | null | null |
pythonfuzz/dictionnary.py
|
MJ-SEO/py_fuzz
|
789fbfea21bf644ba4d00554fe4141694b0a190a
|
[
"Apache-2.0"
] | null | null | null |
import random
import re
import os
class Dictionary:
line_re = re.compile('"(.+)"$')
def __init__(self, dict_path=None):
if not dict_path or not os.path.exists(dict_path):
self._dict = list()
return
_dict = set()
with open(dict_path) as f:
for line in f:
line = line.lstrip()
if line.startswith('#'):
continue
word = self.line_re.search(line)
if word:
_dict.add(word.group(1))
print("[Dict] ", word, " Added")
self._dict = list(_dict)
def get_word(self):
if not self._dict:
return None
return bytearray(random.choice(self._dict), encoding="utf-8")
| 27
| 69
| 0.503193
|
6acc8cedaa92e903aecea32f5a64ad9b60dc9146
| 215
|
py
|
Python
|
src/utils/message_id.py
|
Abdul-Muiz-Iqbal/PostMan
|
31c94f751c87ac6f3079b014da7402a4bbf22cc3
|
[
"Apache-2.0"
] | null | null | null |
src/utils/message_id.py
|
Abdul-Muiz-Iqbal/PostMan
|
31c94f751c87ac6f3079b014da7402a4bbf22cc3
|
[
"Apache-2.0"
] | 3
|
2021-05-05T16:05:26.000Z
|
2021-05-08T10:24:18.000Z
|
src/utils/message_id.py
|
Abdul-Muiz-Iqbal/PostMan
|
31c94f751c87ac6f3079b014da7402a4bbf22cc3
|
[
"Apache-2.0"
] | 1
|
2021-05-01T10:14:40.000Z
|
2021-05-01T10:14:40.000Z
|
from bson.objectid import ObjectId
from dataclasses import dataclass
@dataclass
class MessageId:
"""Id of a Message. Is always unique and created automatically by MongoDb, or a Server"""
_id: ObjectId
| 30.714286
| 94
| 0.748837
|
062bb227a07042c615f18ba4572f9a919a275589
| 575
|
py
|
Python
|
setup/ssh_install.py
|
garstka/idact-test-environment
|
a9b430636f5198de7f3c5de517ab93b9b57c6ca6
|
[
"MIT"
] | null | null | null |
setup/ssh_install.py
|
garstka/idact-test-environment
|
a9b430636f5198de7f3c5de517ab93b9b57c6ca6
|
[
"MIT"
] | null | null | null |
setup/ssh_install.py
|
garstka/idact-test-environment
|
a9b430636f5198de7f3c5de517ab93b9b57c6ca6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""Installs the ssh daemon in the container."""
import subprocess
INSTALL_COMMAND = ['yum', '-y', 'install', 'openssh-server']
SSH_INTERNAL_PORTS = [22, 8022, 8023, 8024, 8025]
def get_append_port_command(port: int):
return ['sed', '-i', r"$ a\Port {port}".format(port=port),
'/etc/ssh/sshd_config']
def main():
"""Main script function."""
subprocess.check_call(INSTALL_COMMAND)
for port in SSH_INTERNAL_PORTS:
subprocess.check_call(get_append_port_command(port=port))
if __name__ == '__main__':
main()
| 23
| 65
| 0.669565
|
d6b295ea7aa03a7d1580f536945ed9642861a0f0
| 6,831
|
py
|
Python
|
model/sentiment_analysics_model/sentiment_analysis.py
|
ayanchoudhary/digialpha-inter-IIT
|
4a683830949b44029ddfd73ebd52f27d441ad8ab
|
[
"MIT"
] | null | null | null |
model/sentiment_analysics_model/sentiment_analysis.py
|
ayanchoudhary/digialpha-inter-IIT
|
4a683830949b44029ddfd73ebd52f27d441ad8ab
|
[
"MIT"
] | 3
|
2022-03-19T17:37:30.000Z
|
2022-03-26T07:47:33.000Z
|
model/sentiment_analysics_model/sentiment_analysis.py
|
ayanchoudhary/digialpha-inter-IIT
|
4a683830949b44029ddfd73ebd52f27d441ad8ab
|
[
"MIT"
] | 1
|
2022-03-16T11:32:26.000Z
|
2022-03-16T11:32:26.000Z
|
# Import the required libraries
import os
import re
import pandas as pd
import json
import nltk
nltk.download('punkt')
from nltk.tokenize import sent_tokenize
from collections import Counter
from tqdm.auto import tqdm
from transformers import AutoTokenizer, AutoModelForSequenceClassification
from transformers import pipeline
def get_file_paths():
"""
Gets the list of paths of all the filings required
"""
file_paths = []
# set the base path for the data
base_path = os.path.join('data', 'sec-edgar-filings')
for company in os.listdir(base_path):
comp_dir = os.path.join(base_path, company)
for filing in os.listdir(comp_dir):
filing_dir = os.path.join(comp_dir, filing)
for text in os.listdir(filing_dir):
file_path = os.path.join(filing_dir, text, 'full-submission.txt')
file_paths.append(file_path)
return file_paths
def clean_filing(input_filename, filing_type, output_filename):
"""
Cleans a 8-K, 10-K or 10-Q filing. All arguments take strings as input
input_filename: name of the file to be cleaned
filing_type: either 8-K or 10-K or 10-Q
output_filename: name of output file
"""
# open file and get rid of all lines
with open (input_filename, 'r') as f:
data = f.read().replace('\n', ' ')
# get text in between the appropriate 10-K tags
search_tag = re.search("(?s)(?m)<TYPE>{}.*?(</TEXT>)".format(filing_type), data)
try:
data_processed = search_tag.group(0)
# delete formatting text used to identify the section as its not relevant
data_processed = re.sub(pattern="((?i)<TYPE>).*?(?=<)", repl='', string=data_processed)
# Five more formatting tags are deleted
data_processed = re.sub(pattern="((?i)<SEQUENCE>).*?(?=<)", repl='', string=data_processed)
data_processed = re.sub(pattern="((?i)<FILENAME>).*?(?=<)", repl='', string=data_processed)
data_processed = re.sub(pattern="((?i)<DESCRIPTION>).*?(?=<)", repl='', string=data_processed)
data_processed = re.sub(pattern="(?s)(?i)<head>.*?</head>", repl='', string=data_processed)
data_processed = re.sub(pattern="(?s)(?i)<(table).*?(</table>)", repl='', string=data_processed)
# Tags each section of the financial statement with prefix '°Item' for future analysis
data_processed = re.sub(pattern="(?s)(?i)(?m)> +Item|>Item|^Item", repl=">°Item", string=data_processed, count=0)
# Removes all HTML tags
data_processed = re.sub(pattern="(?s)<.*?>", repl=" ", string=data_processed, count=0)
# Replaces all Unicode strings
data_processed = re.sub(pattern="&(.{2,6});", repl=" ", string=data_processed, count=0)
# Replaces multiple spaces with a single space
data_processed = re.sub(pattern="\s+", repl=" ", string=data_processed, count=0)
# Remove multiple continuous underscores
data_processed = re.compile(r'_{1,}\s*').sub('', data_processed)
with open(output_filename, 'w+') as output:
output.write(data_processed)
except BaseException as e:
print('{} could not be cleaned. Exception: {}'.format(input_filename, e))
pass
def clean_all_files():
"""
Cleans all 8-K or 10-K or 10-Q filings for the selected companies
"""
cleaned_paths = []
file_paths = get_file_paths()
for input_file in file_paths:
file_path = os.path.normpath(input_file)
parts = file_path.split(os.sep)
# get the filing type
if parts[3] == '10-K':
filing_type = '10-K'
elif parts[3] == '10-Q':
filing_type = '10-Q'
else:
filing_type = '8-K'
# change the file name to denote the cleaned one
output_file = input_file[:-19] + "cleaned_submission.txt"
if os.path.exists(output_file):
os.remove(output_file)
clean_filing(input_file, filing_type, output_file)
cleaned_paths.append(output_file)
return cleaned_paths
def get_sentiment(txt_path, generator):
"""
Gets the sentiment for the given filing using the FinBERT model.
All arguments take strings as input
txt_path: path of the file whose sentiment is to be determined
generator: the generator object used for the model pipeline
"""
with open(txt_path, "r") as f:
# tokenize individual sentences so as to fit the pretrained model params
sentences = sent_tokenize(f.read())
sentences = [sentence for sentence in sentences if len(sentence) < 512]
outputs = generator(sentences)
sa_dict = dict(Counter(out['label'] for out in outputs))
if sa_dict['negative'] > sa_dict['positive']:
return "negative"
elif sa_dict['positive'] > sa_dict['negative']:
return "positive"
else:
return "neutral"
def get_all_sentiments(paths, generator):
"""
Gets the sentiment for all the files. All arguments take strings as input
paths: paths of the files whose sentiment are to be determined
generator: the generator object used for the model pipeline
"""
sentiments = []
for path in tqdm(paths):
sentiment = get_sentiment(path, generator)
sentiments.append(sentiment)
return sentiments
def create_json(generator):
"""
Creates a json object consisting of the companies and their sentiments.
All arguments take strings as input
generator: the generator object used for the model pipeline
"""
cleaned_paths = clean_all_files()
sentiments = get_all_sentiments(cleaned_paths, generator)
companies = []
for doc_file in cleaned_paths:
file_path = os.path.normpath(doc_file)
parts = file_path.split(os.sep)
companies.append(parts[2])
main_data = {'Company': companies,
'Sentiment': sentiments}
sentiment_dict = {}
for i in range(len(main_data['Company'])):
comp = main_data['Company'][i]
st = main_data['Sentiment'][i]
val = -1 if st == "negative" else 1
if comp in sentiment_dict.keys():
sentiment_dict[comp] += val
else:
sentiment_dict[comp] = val
for k, v in sentiment_dict.items():
st_val = "negative" if v < 0 else "positive"
sentiment_dict[k] = st_val
with open('sentiments.json') as f:
json.dumps(sentiment_dict, f)
if __name__ == "__main__":
# Load the pretrained tokenizer and model into a pipeline
tokenizer = AutoTokenizer.from_pretrained("ProsusAI/finbert")
model = AutoModelForSequenceClassification.from_pretrained("ProsusAI/finbert")
generator = pipeline(task="sentiment-analysis", model=model, tokenizer=tokenizer)
create_json(generator)
| 36.529412
| 122
| 0.648661
|
8d4ffcd76b2f6051f1c970f14418e8f910844702
| 2,384
|
py
|
Python
|
flint/optim/adadelta.py
|
Renovamen/tinyark
|
da536e8f8132ef531c5bef3feebd3178c1877fce
|
[
"MIT"
] | 15
|
2021-02-08T16:01:52.000Z
|
2021-02-10T07:49:26.000Z
|
flint/optim/adadelta.py
|
Renovamen/tinyark
|
da536e8f8132ef531c5bef3feebd3178c1877fce
|
[
"MIT"
] | null | null | null |
flint/optim/adadelta.py
|
Renovamen/tinyark
|
da536e8f8132ef531c5bef3feebd3178c1877fce
|
[
"MIT"
] | 2
|
2021-05-10T06:40:45.000Z
|
2021-05-10T14:47:03.000Z
|
import numpy as np
from .optimizer import Optimizer
class Adadelta(Optimizer):
"""
Implementation of Adadelta algorithm proposed in [1].
.. math::
h_t = \\rho h_{t-1} + (1 - \\rho) g_t^2
.. math::
g'_t = \sqrt{\\frac{\Delta \\theta_{t-1} + \epsilon}{h_t + \epsilon}} \cdot g_t
.. math::
\Delta \\theta_t = \\rho \Delta \\theta_{t-1} + (1 - \\rho) (g'_t)^2
.. math::
\\theta_t = \\theta_{t-1} - g'_t
where :math:`h` is the moving average of the squared gradients,
:math:`\epsilon` is for improving numerical stability.
Parameters
----------
params : iterable
An iterable of Tensor
rho : float, optional, default=0.9
Coefficient used for computing a running average of squared gradients
eps : float, optional, default=1e-6
Term added to the denominator to improve numerical stability
lr : float, optional, default=1.0
Coefficient that scale delta before it is applied to the parameters
weight_decay : float, optional, default=0
Weight decay (L2 penalty)
References
----------
1. "`ADADELTA: An Adaptive Learning Rate Method. Matthew D. Zeiler. <https://arxiv.org/abs/1212.5701>`_" arxiv 2012.
"""
def __init__(
self,
params = None,
rho: float = 0.99,
eps: float = 1e-6,
lr: float = 1.0,
weight_decay: float = 0.
):
super(Adadelta, self).__init__(params, lr, weight_decay)
self.eps = eps
self.rho = rho
self.h = [np.zeros_like(p.data) for p in self.params]
self.delta = [np.zeros_like(p.data) for p in self.params]
def step(self):
for i, (h, delta, p) in enumerate(zip(self.h, self.delta, self.params)):
if p.requires_grad:
# l2 penalty
p_grad = p.grad + self.weight_decay * p.data
# moving average of the squared gradients
h = self.rho * h + (1 - self.rho) * (p.grad ** 2)
self.h[i] = h
# compute g'_t and delta_t
g_ = np.sqrt(delta + self.eps) / np.sqrt(h + self.eps) * p.grad
delta = self.rho * delta + (1 - self.rho) * (g_ ** 2)
self.delta[i] = delta
# update parameters
p.data -= self.lr * g_
super(Adadelta, self).step()
| 33.111111
| 120
| 0.55495
|
f06514df3a3fc4024ff761cb93855aa182c95b5f
| 7,751
|
py
|
Python
|
test/integration/local/test_container.py
|
akhilmehra/sagemaker-tensorflow-serving-container
|
e7f37f0f25bd8e57f5518794c2ffe66a52a4b158
|
[
"Apache-2.0"
] | null | null | null |
test/integration/local/test_container.py
|
akhilmehra/sagemaker-tensorflow-serving-container
|
e7f37f0f25bd8e57f5518794c2ffe66a52a4b158
|
[
"Apache-2.0"
] | null | null | null |
test/integration/local/test_container.py
|
akhilmehra/sagemaker-tensorflow-serving-container
|
e7f37f0f25bd8e57f5518794c2ffe66a52a4b158
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import json
import os
import subprocess
import sys
import time
import pytest
import requests
BASE_URL = 'http://localhost:8080/invocations'
@pytest.fixture(scope='session', autouse=True)
def volume():
try:
model_dir = os.path.abspath('test/resources/models')
subprocess.check_call(
'docker volume create --name model_volume --opt type=none '
'--opt device={} --opt o=bind'.format(model_dir).split())
yield model_dir
finally:
subprocess.check_call('docker volume rm model_volume'.split())
@pytest.fixture(scope='module', autouse=True, params=[True, False])
def container(request, docker_base_name, tag, runtime_config):
try:
if request.param:
batching_config = ' -e SAGEMAKER_TFS_ENABLE_BATCHING=true'
else:
batching_config = ''
command = (
'docker run {}--name sagemaker-tensorflow-serving-test -p 8080:8080'
' --mount type=volume,source=model_volume,target=/opt/ml/model,readonly'
' -e SAGEMAKER_TFS_DEFAULT_MODEL_NAME=half_plus_three'
' -e SAGEMAKER_TFS_NGINX_LOGLEVEL=info'
' -e SAGEMAKER_BIND_TO_PORT=8080'
' -e SAGEMAKER_SAFE_PORT_RANGE=9000-9999'
' {}'
' {}:{} serve'
).format(runtime_config, batching_config, docker_base_name, tag)
proc = subprocess.Popen(command.split(), stdout=sys.stdout, stderr=subprocess.STDOUT)
attempts = 0
while attempts < 5:
time.sleep(3)
try:
requests.get('http://localhost:8080/ping')
break
except:
attempts += 1
pass
yield proc.pid
finally:
subprocess.check_call('docker rm -f sagemaker-tensorflow-serving-test'.split())
def make_request(data, content_type='application/json', method='predict'):
headers = {
'Content-Type': content_type,
'X-Amzn-SageMaker-Custom-Attributes':
'tfs-model-name=half_plus_three,tfs-method=%s' % method
}
response = requests.post(BASE_URL, data=data, headers=headers)
return json.loads(response.content.decode('utf-8'))
def test_predict():
x = {
'instances': [1.0, 2.0, 5.0]
}
y = make_request(json.dumps(x))
assert y == {'predictions': [3.5, 4.0, 5.5]}
def test_predict_twice():
x = {
'instances': [1.0, 2.0, 5.0]
}
y = make_request(json.dumps(x))
z = make_request(json.dumps(x))
assert y == {'predictions': [3.5, 4.0, 5.5]}
assert z == {'predictions': [3.5, 4.0, 5.5]}
def test_predict_two_instances():
x = {
'instances': [[1.0, 2.0, 5.0], [1.0, 2.0, 5.0]]
}
y = make_request(json.dumps(x))
assert y == {'predictions': [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}
def test_predict_jsons_json_content_type():
x = '[1.0, 2.0, 5.0]\n[1.0, 2.0, 5.0]'
y = make_request(x)
assert y == {'predictions': [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}
def test_predict_jsonlines():
x = '[1.0, 2.0, 5.0]\n[1.0, 2.0, 5.0]'
y = make_request(x, 'application/jsonlines')
assert y == {'predictions': [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}
def test_predict_jsons():
x = '[1.0, 2.0, 5.0]\n[1.0, 2.0, 5.0]'
y = make_request(x, 'application/jsons')
assert y == {'predictions': [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}
def test_predict_jsons_2():
x = '{"x": [1.0, 2.0, 5.0]}\n{"x": [1.0, 2.0, 5.0]}'
y = make_request(x)
assert y == {'predictions': [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}
def test_predict_generic_json():
x = [1.0, 2.0, 5.0]
y = make_request(json.dumps(x))
assert y == {'predictions': [[3.5, 4.0, 5.5]]}
def test_predict_generic_json_two_instances():
x = [[1.0, 2.0, 5.0], [1.0, 2.0, 5.0]]
y = make_request(json.dumps(x))
assert y == {'predictions': [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}
def test_predict_csv():
x = '1.0'
y = make_request(x, 'text/csv')
assert y == {'predictions': [3.5]}
def test_predict_csv_with_zero():
x = '0.0'
y = make_request(x, 'text/csv')
assert y == {'predictions': [3.0]}
def test_predict_csv_one_instance_three_values_with_zero():
x = '0.0,2.0,5.0'
y = make_request(x, 'text/csv')
assert y == {'predictions': [[3.0, 4.0, 5.5]]}
def test_predict_csv_one_instance_three_values():
x = '1.0,2.0,5.0'
y = make_request(x, 'text/csv')
assert y == {'predictions': [[3.5, 4.0, 5.5]]}
def test_predict_csv_two_instances_three_values():
x = '1.0,2.0,5.0\n1.0,2.0,5.0'
y = make_request(x, 'text/csv')
assert y == {'predictions': [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}
def test_predict_csv_three_instances():
x = '1.0\n2.0\n5.0'
y = make_request(x, 'text/csv')
assert y == {'predictions': [3.5, 4.0, 5.5]}
def test_predict_csv_wide_categorical_input():
x = ('0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0\n' # noqa
'0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,6.0,0.0\n') # noqa
y = make_request(x, 'text/csv')
predictions = y['predictions']
assert 2 == len(predictions)
assert 30 == len(predictions[0])
assert 97 == sum(predictions[0]) # half_plus_three with row sum 14 and n = 30
assert 100 == sum(predictions[1]) # half_plus_three with row sum 20 and n = 30
def test_regress():
x = {
'signature_name': 'tensorflow/serving/regress',
'examples': [{'x': 1.0}, {'x': 2.0}]
}
y = make_request(json.dumps(x), method='regress')
assert y == {'results': [3.5, 4.0]}
def test_regress_one_instance():
# tensorflow serving docs indicate response should have 'result' key,
# but it is actually 'results'
# this test will catch if they change api to match docs (unlikely)
x = {
'signature_name': 'tensorflow/serving/regress',
'examples': [{'x': 1.0}]
}
y = make_request(json.dumps(x), method='regress')
assert y == {'results': [3.5]}
def test_predict_bad_input():
y = make_request('whatever')
assert 'error' in y
def test_predict_bad_input_instances():
x = json.dumps({'junk': 'data'})
y = make_request(x)
assert y['error'].startswith('Failed to process element: 0 key: junk of \'instances\' list.')
def test_predict_no_custom_attributes_header():
x = {
'instances': [1.0, 2.0, 5.0]
}
headers = {
'Content-Type': 'application/json'
}
response = requests.post(BASE_URL, data=json.dumps(x), headers=headers)
y = json.loads(response.content.decode('utf-8'))
assert y == {'predictions': [3.5, 4.0, 5.5]}
def test_predict_with_jsonlines():
x = {
'instances': [1.0, 2.0, 5.0]
}
headers = {
'Content-Type': 'application/json',
'Accept': 'application/jsonlines'
}
response = requests.post(BASE_URL, data=json.dumps(x), headers=headers)
assert response.headers['Content-Type'] == 'application/jsonlines'
assert response.content.decode('utf-8') == '{ "predictions": [3.5, 4.0, 5.5 ]}'
| 30.159533
| 141
| 0.598632
|
28878c2c0482b1d4835ee4a77877e571c6d09867
| 2,915
|
py
|
Python
|
CIM16/IEC61970/Dynamics/SynchronousMachineTimeConstantReactance.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | null | null | null |
CIM16/IEC61970/Dynamics/SynchronousMachineTimeConstantReactance.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | null | null | null |
CIM16/IEC61970/Dynamics/SynchronousMachineTimeConstantReactance.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | 1
|
2021-04-02T18:04:49.000Z
|
2021-04-02T18:04:49.000Z
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# Modified by Gustav Holm (guholm@kth.se) & Francis J. Gomez (fragom@kth.se)
# Modified date: 05/06/2017
from CIM16.IEC61970.Dynamics.SynchronousMachineDetailed import SynchronousMachineDetailed
class SynchronousMachineTimeConstantReactance(SynchronousMachineDetailed):
def __init__(self, ks=0.0, xDirectSync=0.0, xDirectTrans=0.0, xDirectSubtrans=0.0, xQuadSync=0.0, xQuadTrans=0.0, xQuadSubtrans=0.0, tpdo=0.0, tppdo=0.0, tpqo=0.0, tppqo=0.0, tc=0.0, rotorType=None, modelType=None, *args, **kw_args):
self.ks = ks
self.xDirectSync = xDirectSync
self.xDirectTrans = xDirectTrans
self.xDirectSubtrans = xDirectSubtrans
self.xQuadSync = xQuadSync
self.xQuadTrans =xQuadTrans
self.xQuadSubtrans = xQuadSubtrans
self.tpdo = tpdo
self.tppdo = tppdo
self.tpqo = tpqo
self.tppqo = tppqo
self.tc = tc
self.rotorType = rotorType
self.modelType = modelType
super(SynchronousMachineTimeConstantReactance, self).__init__(*args, **kw_args)
_attrs = ["ks", "xDirectSync", "xDirectTrans", "xDirectSubtrans", "xQuadSync", "xQuadTrans", "xQuadSubtrans", "tpdo", "tppdo", "tpqo", "tppqo", "tc"]
_attr_types = {"ks": float, "xDirectSync": float, "xDirectTrans": float, "xDirectSubtrans": float, "xQuadSync": float, "xQuadTrans": float, "xQuadSubtrans": float, "tpdo": float, "tppdo": float, "tpqo": float, "tppqo": float, "tc": float}
_defaults = {"ks": 0.0, "xDirectSync": 0.0, "xDirectTrans": 0.0, "xDirectSubtrans": 0.0, "xQuadSync": 0.0, "xQuadTrans": 0.0, "xQuadSubtrans": 0.0, "tpdo": 0.0, "tppdo": 0.0, "tpqo": 0.0, "tppqo": 0.0, "tc": 0.0}
_enums = {"rotorType": "RotorKind", "modelType": "SynchronousMachineModelKind"}
_refs = []
_many_refs = []
| 42.867647
| 242
| 0.706003
|
1fa2187703d2a10356fd74f51a3dc0b0e60a0f56
| 1,188
|
py
|
Python
|
plots_and_images/barplot_tasks.py
|
princeton-nlp/MultilingualAnalysis
|
b0d61c93c0c020a698a06264897dde14c9db471c
|
[
"MIT"
] | 6
|
2021-11-03T05:10:35.000Z
|
2022-03-25T20:28:06.000Z
|
plots_and_images/barplot_tasks.py
|
princeton-nlp/MultilingualAnalysis
|
b0d61c93c0c020a698a06264897dde14c9db471c
|
[
"MIT"
] | null | null | null |
plots_and_images/barplot_tasks.py
|
princeton-nlp/MultilingualAnalysis
|
b0d61c93c0c020a698a06264897dde14c9db471c
|
[
"MIT"
] | 1
|
2022-01-24T15:07:35.000Z
|
2022-01-24T15:07:35.000Z
|
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
sns.set(style="dark")
# labels = ['Inversion', 'Permutation', 'Syntax', 'Transliteration']
labels = ['Inversion', 'Permutation', 'Syntax']
# Data
xnli = [10.2, 3.6, 0.9]
ner = [49.1, 26.3, 14.6]
pos = [30.2, 11.2, 4.4]
xquad = [32.8, 0, 0]
# Label location
x = np.arange(len(labels)) # the label locations
width = 0.20 # the width of the bars
# The numbers that need to be plotted
fig, ax = plt.subplots()
rects1 = ax.bar(x - 1.5 * width, xnli, width, label='XNLI')
rects2 = ax.bar(x - 0.5 * width, ner, width, label='NER')
rects3 = ax.bar(x + 0.5 * width, pos, width, label='POS')
rects4 = ax.bar(x + 1.5 * width, xquad, width, label='XQuAD')
# Add some text for labels, title and custom x-axis tick labels, etc.
# ax.set_ylabel('Scores')
ax.set_xlabel(r'$\mathbf{-\Delta_{\,SUP}}$'+' for different tasks')
ax.set_xticks(x)
ax.set_xticklabels(labels)
ax.legend()
# ax.set_ylim(top=110, bottom=-5)
# Add numbers on top of the bars
ax.bar_label(rects1)
ax.bar_label(rects2)
ax.bar_label(rects3)
ax.bar_label(rects4)
fig.tight_layout()
# Default DPI is 100
plt.savefig('images/barplot_tasks.png', dpi=100)
| 26.4
| 69
| 0.680135
|
81a508426f93bb316511d0f49fdd684c3712bc16
| 752
|
py
|
Python
|
setynuco/core/djangomodels.py
|
ahmad88me/setynuco
|
703322dac088554b2ec4af37fda1a3d151a00e9f
|
[
"Apache-2.0"
] | 1
|
2018-09-04T09:08:17.000Z
|
2018-09-04T09:08:17.000Z
|
setynuco/core/djangomodels.py
|
ahmad88me/setynuco
|
703322dac088554b2ec4af37fda1a3d151a00e9f
|
[
"Apache-2.0"
] | null | null | null |
setynuco/core/djangomodels.py
|
ahmad88me/setynuco
|
703322dac088554b2ec4af37fda1a3d151a00e9f
|
[
"Apache-2.0"
] | null | null | null |
#################################################################
# TO make this app compatible with Django #
#################################################################
import os
import sys
proj_path = (os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
venv_python = os.path.join(proj_path, '..', '.venv', 'bin', 'python')
# This is so Django knows where to find stuff.
sys.path.append(os.path.join(proj_path, '..'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "setynuco.settings")
sys.path.append(proj_path)
# This is so my local_settings.py gets loaded.
os.chdir(proj_path)
# This is so models get loaded.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| 35.809524
| 82
| 0.599734
|
8d259a956d4fe657a8d2d444e7fc5a6dcd8bb2a0
| 2,631
|
py
|
Python
|
mmcv/parallel/collate.py
|
iasawseen/mmcv
|
70d457f74008710da2eb9a398650e88a7e2ae027
|
[
"Apache-2.0"
] | 54
|
2021-11-05T02:15:15.000Z
|
2022-03-23T13:40:43.000Z
|
mmcv/parallel/collate.py
|
Luodian/Refitting_mmdet
|
46245c499ce92d87c2724dd0f5033ad8c7a58533
|
[
"Apache-2.0"
] | 8
|
2019-06-13T06:00:08.000Z
|
2021-07-24T05:25:33.000Z
|
mmcv/parallel/collate.py
|
Luodian/Refitting_mmdet
|
46245c499ce92d87c2724dd0f5033ad8c7a58533
|
[
"Apache-2.0"
] | 6
|
2021-11-09T02:26:38.000Z
|
2022-03-05T01:38:30.000Z
|
import collections
import torch
import torch.nn.functional as F
from torch.utils.data.dataloader import default_collate
from .data_container import DataContainer
def collate(batch, samples_per_gpu=1):
"""Puts each data field into a tensor/DataContainer with outer dimension
batch size.
Extend default_collate to add support for
:type:`~mmcv.parallel.DataContainer`. There are 3 cases.
1. cpu_only = True, e.g., meta data
2. cpu_only = False, stack = True, e.g., images tensors
3. cpu_only = False, stack = False, e.g., gt bboxes
"""
if not isinstance(batch, collections.Sequence):
raise TypeError("{} is not supported.".format(batch.dtype))
if isinstance(batch[0], DataContainer):
assert len(batch) % samples_per_gpu == 0
stacked = []
if batch[0].cpu_only:
for i in range(0, len(batch), samples_per_gpu):
stacked.append(
[sample.data for sample in batch[i:i + samples_per_gpu]])
return DataContainer(
stacked, batch[0].stack, batch[0].padding_value, cpu_only=True)
elif batch[0].stack:
for i in range(0, len(batch), samples_per_gpu):
assert isinstance(batch[i].data, torch.Tensor)
# TODO: handle tensors other than 3d
assert batch[i].dim() == 3
c, h, w = batch[i].size()
for sample in batch[i:i + samples_per_gpu]:
assert c == sample.size(0)
h = max(h, sample.size(1))
w = max(w, sample.size(2))
padded_samples = [
F.pad(
sample.data,
(0, w - sample.size(2), 0, h - sample.size(1)),
value=sample.padding_value)
for sample in batch[i:i + samples_per_gpu]
]
stacked.append(default_collate(padded_samples))
else:
for i in range(0, len(batch), samples_per_gpu):
stacked.append(
[sample.data for sample in batch[i:i + samples_per_gpu]])
return DataContainer(stacked, batch[0].stack, batch[0].padding_value)
elif isinstance(batch[0], collections.Sequence):
transposed = zip(*batch)
return [collate(samples, samples_per_gpu) for samples in transposed]
elif isinstance(batch[0], collections.Mapping):
return {
key: collate([d[key] for d in batch], samples_per_gpu)
for key in batch[0]
}
else:
return default_collate(batch)
| 39.268657
| 79
| 0.574686
|
fc58c9f2ece7d428df2d5b475b68c93579a6b4f6
| 11,428
|
py
|
Python
|
apps/training/views.py
|
Houston-ARTCC/zhuartcc.org
|
82c73e66c74181ea7679fd633f8a66c2c156cbe6
|
[
"MIT"
] | 8
|
2020-07-20T08:04:21.000Z
|
2020-10-23T14:21:39.000Z
|
apps/training/views.py
|
MikeRomaa/zhuartcc.org
|
82c73e66c74181ea7679fd633f8a66c2c156cbe6
|
[
"MIT"
] | 33
|
2020-09-05T03:30:00.000Z
|
2020-12-03T16:49:46.000Z
|
apps/training/views.py
|
Houston-ARTCC/zhuartcc.org
|
82c73e66c74181ea7679fd633f8a66c2c156cbe6
|
[
"MIT"
] | 2
|
2020-09-20T00:14:46.000Z
|
2020-10-23T14:20:51.000Z
|
import json
import os
import pytz
import requests
from datetime import datetime, timedelta
from discord_webhook import DiscordEmbed, DiscordWebhook
from django.http import HttpResponse
from django.shortcuts import render, redirect
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils import timezone
from django.views.decorators.http import require_POST
from zhuartcc.decorators import require_member, require_mentor, require_staff_or_mentor
from zhuartcc.overrides import send_mail
from .models import TrainingSession, TrainingRequest
from ..administration.models import ActionLog
from ..event.models import Event
from ..user.models import User
@require_member
def view_training_center(request):
sessions = request.user_obj.student_sessions.all()
return render(request, 'training_center.html', {
'page_title': 'Training Center',
'user': request.user_obj,
'training_time': sum([session.duration for session in sessions.filter(status=1)], timedelta()),
'sessions_json': json.dumps({session.id: session.start.isoformat() for session in sessions}),
})
@require_member
def view_session(request, session_id):
session = TrainingSession.objects.get(id=session_id)
if (
request.user_obj.cid == session.student.cid
or request.user_obj.is_mentor or request.user_obj.is_staff
):
return render(request, 'session.html', {
'page_title': 'View Session',
'session': session,
})
else:
return HttpResponse('You are unauthorized to view somebody else\'s training session!', status=401)
def modify_session(session, request):
session.instructor = User.objects.get(id=request.POST.get('instructor'))
session.start = pytz.utc.localize(datetime.fromisoformat(request.POST.get('start')))
session.end = pytz.utc.localize(datetime.fromisoformat(request.POST.get('end')))
session.movements = request.POST.get('movements')
session.progress = request.POST.get('progress')
session.position = request.POST.get('position')
session.type = request.POST.get('type')
session.level = request.POST.get('level')
session.status = request.POST.get('status', 1)
session.ots_status = request.POST.get('ots_status')
session.notes = request.POST.get('notes')
session.solo_granted = request.POST.get('solo_grated', False)
session.save()
# Visitors and oceanic sessions don't get training records posted to VATUSA CTRS
if session.student.main_role == 'HC' and session.level != 7 and session.status == 1:
post_ctrs(session)
def post_ctrs(session):
hours, remainder = divmod(session.duration.total_seconds(), 3600)
minutes, seconds = divmod(remainder, 60)
data = {
'apikey': os.getenv('API_KEY'),
'instructor_id': session.instructor.cid,
'session_date': session.start.strftime('%Y-%m-%d %H:%M'),
'position': session.position,
'duration': f'{int(hours):02}:{int(minutes):02}',
'movements': session.movements,
'score': session.progress,
'notes': 'No notes provided.' if session.notes == '' else session.notes,
'location': 1 if session.type == 2 else 2 if session.type == 1 else 0,
'ots_status': session.ots_status,
}
if session.ctrs_id is not None:
requests.put(f'https://api.vatusa.net/v2/training/record/{session.ctrs_id}', data=data)
else:
response = requests.post(f'https://api.vatusa.net/v2/user/{session.student.cid}/training/record', data=data)
if response.json().get('data').get('status') == 'OK':
session.ctrs_id = response.json().get('data').get('id')
session.save()
@require_staff_or_mentor
def file_session(request, session_id):
session = TrainingSession.objects.get(id=session_id)
if session.status == 0:
if request.method == 'POST':
modify_session(session, request)
return redirect(reverse('view_session', args=[session.id]))
return render(request, 'file_session.html', {
'page_title': 'File Session',
'session': session,
'instructors': User.objects.filter(training_role='INS'),
'mentors': User.objects.filter(training_role='MTR'),
})
else:
return HttpResponse('You cannot file a completed, cancelled, or no-show session!', status=401)
@require_staff_or_mentor
def edit_session(request, session_id):
session = TrainingSession.objects.get(id=session_id)
if request.method == 'POST':
modify_session(session, request)
return redirect(reverse('view_session', args=[session.id]))
return render(request, 'edit_session.html', {
'page_title': 'Edit Session',
'session': session,
'instructors': User.objects.filter(training_role='INS'),
'mentors': User.objects.filter(training_role='MTR'),
})
@require_member
def request_training(request):
if request.method == 'POST':
start = pytz.utc.localize(datetime.fromisoformat(request.POST.get('start')))
end = pytz.utc.localize(datetime.fromisoformat(request.POST.get('end')))
if start < end:
training_request = TrainingRequest(
student=request.user_obj,
start=pytz.utc.localize(datetime.fromisoformat(request.POST.get('start'))),
end=pytz.utc.localize(datetime.fromisoformat(request.POST.get('end'))),
type=int(request.POST.get('type')),
level=int(request.POST.get('level')),
remarks=request.POST.get('remarks', None)
)
training_request.save()
send_mail(
'Training Request Received',
render_to_string('emails/request_received.html', {'request': training_request}),
os.getenv('NO_REPLY'),
[training_request.student.email],
)
format = '%b %d, %Y @ %H%Mz'
webhook = DiscordWebhook(url=os.getenv('TRAINING_WEBHOOK_URL'))
embed = DiscordEmbed(
title=':pencil: New Training Request!',
description='See all requests at https://www.zhuartcc.org/training/requests.',
color=2966946
)
embed.add_embed_field(
name='User',
value=f'[{request.user_obj.cid}] {request.user_obj.full_name}',
inline=False,
)
embed.add_embed_field(
name='Availability',
value=f'{training_request.start.strftime(format)} - {training_request.end.strftime(format)}',
inline=False,
)
embed.add_embed_field(name='Level', value=training_request.get_level_display())
embed.add_embed_field(name='Type', value=training_request.get_type_display())
embed.add_embed_field(
name='Remarks',
value=training_request.remarks if training_request.remarks != '' else 'No Remarks Provided',
inline=False,
)
webhook.add_embed(embed)
webhook.execute()
else:
return HttpResponse('The start time must be before the end time.', status=400)
return redirect(reverse('training'))
else:
return render(request, 'request_training.html', {
'page_title': 'Request Training',
'events': Event.objects.all().filter(hidden=False),
'sessions': TrainingSession.objects.all(),
'training_requests': request.user_obj.training_requests.all(),
'types': TrainingRequest._meta.get_field('type').choices,
'levels': TrainingRequest._meta.get_field('level').choices,
})
@require_staff_or_mentor
def view_mentor_history(request):
mentors = User.objects.filter(training_role__in=['MTR', 'INS'])
return render(request, 'mentor_history.html', {
'page_title': 'Mentor History',
'mentors': [(
mentor.full_name,
mentor.instructor_sessions.filter(start__gte=timezone.now() - timedelta(days=30)).filter(status=1)
) for mentor in mentors]
})
@require_staff_or_mentor
def view_scheduled_sessions(request):
sessions = TrainingSession.objects.filter(status=0)
return render(request, 'scheduled_sessions.html', {
'page_title': 'Scheduled Sessions',
'sessions': sessions,
'sessions_json': json.dumps({session.id: session.start.isoformat() for session in sessions}),
})
@require_staff_or_mentor
def view_student_profile(request, cid):
student = User.objects.get(cid=cid)
sessions = student.student_sessions.all()
return render(request, 'student_profile.html', {
'page_title': student.full_name,
'student': student,
'training_time': sum([session.duration for session in sessions.filter(status=1)], timedelta()),
'sessions_json': json.dumps({session.id: session.start.isoformat() for session in sessions}),
})
@require_member
def view_training_requests(request):
if request.user_obj.is_mentor or request.user_obj.is_staff:
return render(request, 'training_requests.html', {
'page_title': 'Training Requests',
'requests': TrainingRequest.objects.filter(end__gt=timezone.now()).order_by('start'),
})
else:
return HttpResponse(status=403)
@require_POST
@require_mentor
def accept_training_request(request, request_id):
training_request = TrainingRequest.objects.get(id=request_id)
training_session = TrainingSession(
student=training_request.student,
instructor=request.user_obj,
start=pytz.utc.localize(datetime.strptime(request.POST.get('start'), '%Y-%m-%dT%H:%M:%S.%f')),
end=pytz.utc.localize(datetime.strptime(request.POST.get('end'), '%Y-%m-%dT%H:%M:%S.%f')),
type=training_request.type,
level=training_request.level,
)
training_session.save()
send_mail(
'Training Scheduled!',
render_to_string('emails/request_accepted.html', {'session': training_session}),
os.getenv('NO_REPLY'),
[training_session.student.email, training_session.instructor.email],
)
ActionLog(action=f'{request.user_obj} accepted {training_request.student.full_name}\'s training request.').save()
training_request.delete()
return redirect(reverse('training_requests'))
@require_POST
@require_mentor
def reject_training_request(request, request_id):
training_request = TrainingRequest.objects.get(id=request_id)
send_mail(
'Training Request Rejected',
render_to_string('emails/request_rejected.html', {'request': training_request}),
os.getenv('NO_REPLY'),
[training_request.student.email],
)
ActionLog(action=f'{request.user_obj} rejected {training_request.student.full_name}\'s training request.').save()
training_request.delete()
return redirect(reverse('training_requests'))
@require_POST
def cancel_training_request(request, request_id):
training_request = TrainingRequest.objects.get(id=request_id)
if request.user_obj == training_request.student:
training_request.delete()
return HttpResponse(status=200)
else:
return HttpResponse('You are unauthorized to perform this action!', status=403)
| 38.738983
| 117
| 0.663546
|
804c5bc2cd8611168cfe6ca8f5c2380936188227
| 578
|
py
|
Python
|
bin/image_resizer.py
|
smrmkt/keras-modeling
|
735c579deb722b4be080c4362fe2696787ab6378
|
[
"BSD-3-Clause"
] | null | null | null |
bin/image_resizer.py
|
smrmkt/keras-modeling
|
735c579deb722b4be080c4362fe2696787ab6378
|
[
"BSD-3-Clause"
] | null | null | null |
bin/image_resizer.py
|
smrmkt/keras-modeling
|
735c579deb722b4be080c4362fe2696787ab6378
|
[
"BSD-3-Clause"
] | null | null | null |
import os
from PIL import Image
size = 64, 64
data_path = '../data/images'
in_path = os.path.join(data_path, 'original')
out_path = os.path.join(data_path, 'resized')
in_files = os.listdir(in_path)
for in_file in in_files:
out_file = os.path.join(out_path, in_file)
if in_file != out_file:
try:
im = Image.open(os.path.join(in_path, in_file))
im = im.resize(size, Image.ANTIALIAS)
im.save(out_file, 'JPEG', quality=100, optimize=True)
except IOError:
print "cannot create thumbnail for '%s'" % in_file
| 27.52381
| 65
| 0.641869
|
e851cd9458d0c949a83c7aa2bac50223a50e557f
| 2,122
|
py
|
Python
|
my_happy_pandas/core/window/numba_.py
|
ggservice007/my-happy-pandas
|
63145d54e452177f7d5b2fc8fdbc1fdf37dd5b16
|
[
"Apache-2.0"
] | null | null | null |
my_happy_pandas/core/window/numba_.py
|
ggservice007/my-happy-pandas
|
63145d54e452177f7d5b2fc8fdbc1fdf37dd5b16
|
[
"Apache-2.0"
] | null | null | null |
my_happy_pandas/core/window/numba_.py
|
ggservice007/my-happy-pandas
|
63145d54e452177f7d5b2fc8fdbc1fdf37dd5b16
|
[
"Apache-2.0"
] | null | null | null |
from typing import Any, Callable, Dict, Optional, Tuple
import numpy as np
from my_happy_pandas._typing import Scalar
from my_happy_pandas.compat._optional import import_optional_dependency
from my_happy_pandas.core.util.numba_ import (
check_kwargs_and_nopython,
get_jit_arguments,
jit_user_function,
)
def generate_numba_apply_func(
args: Tuple,
kwargs: Dict[str, Any],
func: Callable[..., Scalar],
engine_kwargs: Optional[Dict[str, bool]],
):
"""
Generate a numba jitted apply function specified by values from engine_kwargs.
1. jit the user's function
2. Return a rolling apply function with the jitted function inline
Configurations specified in engine_kwargs apply to both the user's
function _AND_ the rolling apply function.
Parameters
----------
args : tuple
*args to be passed into the function
kwargs : dict
**kwargs to be passed into the function
func : function
function to be applied to each window and will be JITed
engine_kwargs : dict
dictionary of arguments to be passed into numba.jit
Returns
-------
Numba function
"""
nopython, nogil, parallel = get_jit_arguments(engine_kwargs)
check_kwargs_and_nopython(kwargs, nopython)
numba_func = jit_user_function(func, nopython, nogil, parallel)
numba = import_optional_dependency("numba")
if parallel:
loop_range = numba.prange
else:
loop_range = range
@numba.jit(nopython=nopython, nogil=nogil, parallel=parallel)
def roll_apply(
values: np.ndarray, begin: np.ndarray, end: np.ndarray, minimum_periods: int,
) -> np.ndarray:
result = np.empty(len(begin))
for i in loop_range(len(result)):
start = begin[i]
stop = end[i]
window = values[start:stop]
count_nan = np.sum(np.isnan(window))
if len(window) - count_nan >= minimum_periods:
result[i] = numba_func(window, *args)
else:
result[i] = np.nan
return result
return roll_apply
| 28.293333
| 85
| 0.661169
|
70456aed0b5a8816948efc60d5669a914749e169
| 3,362
|
py
|
Python
|
qcengine/programs/terachem_pbs.py
|
MolSSI/dqm_compute
|
c171e80c51afc5bc08ac8a84971b526fd33671d3
|
[
"BSD-3-Clause"
] | 105
|
2018-08-15T14:47:27.000Z
|
2022-02-14T01:53:28.000Z
|
qcengine/programs/terachem_pbs.py
|
MolSSI/dqm_compute
|
c171e80c51afc5bc08ac8a84971b526fd33671d3
|
[
"BSD-3-Clause"
] | 338
|
2018-08-18T15:48:25.000Z
|
2022-03-30T09:02:40.000Z
|
qcengine/programs/terachem_pbs.py
|
MolSSI/dqm_compute
|
c171e80c51afc5bc08ac8a84971b526fd33671d3
|
[
"BSD-3-Clause"
] | 74
|
2018-08-28T04:37:04.000Z
|
2022-03-31T06:57:51.000Z
|
"""
Calls TeraChem in its "server mode" via a protobuf interface.
"""
import logging
import os
from typing import TYPE_CHECKING
from qcelemental.models import AtomicResult
from qcelemental.util import which_import
from .model import ProgramHarness
if TYPE_CHECKING:
from qcelemental.models import AtomicInput
from ..config import TaskConfig
logger = logging.getLogger(__name__)
class TeraChemPBSHarness(ProgramHarness):
_defaults = {
"name": "terachem_pbs",
"scratch": False,
"thread_safe": False,
"thread_parallel": False,
"node_parallel": False,
"managed_memory": True,
}
class Config(ProgramHarness.Config):
pass
@staticmethod
def found(raise_error: bool = False) -> bool:
"""Whether TeraChemPBS harness is ready for operation.
Parameters
----------
raise_error: bool
Passed on to control negative return between False and ModuleNotFoundError raised.
Returns
-------
bool
If tcpb package is found and server available, returns True.
If raise_error is False and tcpb package missing and/or server us unavailable, returns False.
If raise_error is True and tcpb package missing and/or server us unavailable, the error message is raised.
"""
tcpb_pkg_available = which_import(
"tcpb",
return_bool=True,
raise_error=raise_error,
raise_msg="TeraChem protobuf client package (tcpb) not found. Please install tcpb>=0.7.0.",
)
if not tcpb_pkg_available:
return False
from tcpb.exceptions import ServerError
from tcpb.tcpb import TCProtobufClient
try:
with TCProtobufClient(
host=os.getenv("TERACHEM_PBS_HOST"), port=int(os.getenv("TERACHEM_PBS_PORT"))
) as client:
return client.is_available()
except TypeError as e:
# TERACHEM_PBS_HOST/PORT environment variables unset
msg = "Environment variables 'TERACHEM_PBS_HOST' and 'TERACHEM_PBS_PORT' must be set!"
logger.error(msg)
if raise_error:
raise ValueError(msg) from e
except ServerError as e:
msg = (
f"Unable to connect to TeraChem server at "
f"{os.getenv('TERACHEM_PBS_HOST')}:{os.getenv('TERACHEM_PBS_PORT')}"
)
logger.error(msg)
if raise_error:
raise OSError(msg) from e
return False
def get_version(self) -> str:
"""Returns version of TeraChem Protocol Buffer Server"""
try:
import tcpb
except ModuleNotFoundError:
return None
else:
try:
return tcpb.__version__
except AttributeError:
return None
def compute(self, input_model: "AtomicInput", config: "TaskConfig" = None) -> "AtomicResult":
"""
Submit AtomicInput to TeraChem running in "server mode"
"""
self.found(raise_error=True)
from tcpb.tcpb import TCProtobufClient
with TCProtobufClient(host=os.getenv("TERACHEM_PBS_HOST"), port=int(os.getenv("TERACHEM_PBS_PORT"))) as client:
return client.compute(input_model)
| 32.326923
| 119
| 0.616597
|
882b1842b298522477635d60fa3a851a3bd4d65b
| 137
|
py
|
Python
|
001_HelloWorld/simple_math.py
|
ufoscout/python_examples
|
efeab0e4c220e76ef4131ff6f76c4c09ee99f4b3
|
[
"MIT"
] | null | null | null |
001_HelloWorld/simple_math.py
|
ufoscout/python_examples
|
efeab0e4c220e76ef4131ff6f76c4c09ee99f4b3
|
[
"MIT"
] | null | null | null |
001_HelloWorld/simple_math.py
|
ufoscout/python_examples
|
efeab0e4c220e76ef4131ff6f76c4c09ee99f4b3
|
[
"MIT"
] | null | null | null |
print "2 + 3 =", 2+3, "!"
print "9/2 =", 9/2
print "9.0/2 =", 9.0/2
print "9%2 =", 9%2
print "5 > -2?", 5 > -2
print "5 <= -2?", 5 <= -2
| 19.571429
| 25
| 0.408759
|
a9c503a3c7cb8989d9ad5e15e01d8b34d6bcf8d3
| 5,477
|
py
|
Python
|
convertor/formats.py
|
mugoh/audioConvertor
|
c462aec96fffe281d438fac12ce6b046693f15e1
|
[
"Apache-2.0"
] | 7
|
2020-08-05T12:27:45.000Z
|
2022-02-08T23:48:55.000Z
|
convertor/formats.py
|
mugoh/audioConvertor
|
c462aec96fffe281d438fac12ce6b046693f15e1
|
[
"Apache-2.0"
] | null | null | null |
convertor/formats.py
|
mugoh/audioConvertor
|
c462aec96fffe281d438fac12ce6b046693f15e1
|
[
"Apache-2.0"
] | 2
|
2021-04-23T13:39:04.000Z
|
2021-09-01T06:59:54.000Z
|
"""
This module holds the class that makes
subprocess calls to ffmpeg with the received
CLI commands.
"""
import subprocess
import os
import platform
from click import echo, style
from convertor.utils.file_types import require_ffmepg, check_is_video
class Convertor:
"""
Makes calls to subprocesses with arguments
and commands received from the CLI.
"""
def to_audio(self, _in, _out, bitrate, file_format):
"""
Converts input file to audio format
"""
# Default output parameter
# If not current directory, append '/'
if os.path.isdir(_out):
_out = '' if _out == '.' else _out + '/'
_out += self.get_name_from_path(_in,
replace=True) + '.' + file_format
_out = _out.replace('//', '/')
self.out = _out
# File format unchecked for single inputs
if not check_is_video(_in):
msg = " is not a supported media type"
self.abort_conversion(
self.get_name_from_path(_in) + msg)
"""
else:
base_name = os.path.basename(_out)
ext = os.path.splitext(base_name)[1]
_out = _out.replace(ext, '.mp3')
"""
commands = ['ffmpeg', '-i', _in,
'-vn', '-ar', '44100',
'-ac', '2', '-ab',
bitrate, _out]
try:
self.run_convert_commands(commands)
except FileNotFoundError as er:
res = require_ffmepg()
if not res:
self.abort_conversion("Dependecy not installed.")
def get_name_from_path(self, file_path, replace=False):
"""
Extracts file name from absolute file path.
"""
if replace:
base_name = os.path.basename(file_path)
ext = os.path.splitext(base_name)[1]
_out = base_name.replace(ext, '')
return _out
head, tail = os.path.split(file_path)
return tail or os.path.basename(head)
def run_convert_commands(self, cmds):
"""
Invokes subprocess with commands
required to process a user input call.
"""
try:
subprocess.check_output(cmds)
except subprocess.CalledProcessError as er:
print("Unable to complete conversion\n", er)
else:
echo(style("\nConversion Complete\n", fg='green'))
echo("Saved: " + cmds[len(cmds) - 1])
def is_video(self, given_file):
"""
Checks if given file has a video format based on the
file extension.
"""
video_extensions = ['mp4', 'flv', 'avi', 'mp3', 'flaac']
if not isinstance(given_file, str):
try: # iter in play cmd
given_file = given_file[0]
except TypeError:
given_file = given_file
return any([ext for ext in video_extensions
if given_file.endswith(ext)])
def show_process_message(self):
"""
Displays conversion process start to the user.
"""
return "Converting"
def get_file_save_path(self):
"""
Gives the location of converted files
"""
return self.out
def convert_multiple(self, video_files, out, brate, _format):
"""
Converts all files specified in directory.
"""
for video in video_files:
self.to_audio(os.path.abspath(video),
out, brate, _format)
def load_player(self, playitems, preferred_player):
"""
Opens up audio files in user audio player.
"""
error = False
current_platform = platform.system()
if preferred_player:
try:
open_status = self.open_player(preferred_player, playitems)
if not open_status:
error = True
return error
except Exception as e:
msg = f'Player {preferred_player} missing. '
echo(msg + "Try installing it" +
" or use something different.")
error = True
else:
pass
finally:
return error
if current_platform == 'Linux':
self.open_player('xdg-open', playitems)
elif current_platform == 'Darwin':
self.open_player('open', playitems)
elif current_platform == 'Windows':
self.open_player(play_items=playitems)
def open_player(self, cmd=[], play_items=[]):
"""
Opens user audio player depending on present
system architecture.
"""
commands = [cmd] + play_items
try:
subprocess.check_call(commands)
except subprocess.CalledProcessError as er:
return False
else:
return True
def abort_conversion(self, message=''):
"""
Terminates app process with message.
"""
exit_message = ' Aborted'
message += exit_message
echo(style('\n' + message, fg='red'))
os.abort()
def split_input_dirs(self, paths):
"""
Gives individual input paths from
a tuple of input paths
"""
for path in paths:
yield path
| 29.605405
| 77
| 0.532956
|
225a7421b8b4bb6c8437a1bcf38f9af98951086d
| 2,742
|
py
|
Python
|
wx_bot.py
|
wolfg1969/my-wechat-app
|
521720de1e91dcce2f37cb034c9557bc7ca7bdf7
|
[
"MIT"
] | 1
|
2016-03-18T14:37:12.000Z
|
2016-03-18T14:37:12.000Z
|
wx_bot.py
|
wolfg1969/my-wechat-app
|
521720de1e91dcce2f37cb034c9557bc7ca7bdf7
|
[
"MIT"
] | null | null | null |
wx_bot.py
|
wolfg1969/my-wechat-app
|
521720de1e91dcce2f37cb034c9557bc7ca7bdf7
|
[
"MIT"
] | null | null | null |
# coding=utf-8
import StringIO
import hashlib
import io
import pytz
import requests
from PIL import Image
from datetime import datetime, timedelta
from wx_app import app, redis_store
__author__ = 'guoyong'
NASA_OPEN_API_KEY = app.config['NASA_OPEN_API_KEY']
BASE_URL = app.config['BASE_URL']
APOD_CACHE_KEY = app.config['APOD_CACHE_KEY']
COMMANDS = {
'h': u'打印此帮助信息',
'apod': u'欣赏每日天文美图',
}
def h(message, wechat):
"""帮助命令"""
help_text = u'命令列表:\n%s\n更多命令, 敬请期待' % ''.join(
['%s - %s\n' % (command, COMMANDS[command]) for command in COMMANDS.keys()])
return wechat.response_text(content=help_text)
def apod(message, wechat):
"""
欣赏每日天文美图
:param message 微信消息
:param wechat 微信接口
:return 包含每日天文美图的微信消息
"""
now = datetime.now(tz=pytz.timezone('Asia/Shanghai'))
yesterday = (now - timedelta(days=1)).strftime('%Y-%m-%d')
cache_key = '%s:%s' % (APOD_CACHE_KEY, yesterday)
apod_image_message = redis_store.hgetall(cache_key)
app.logger.debug(apod_image_message)
if not apod_image_message:
app.logger.info('get new apod')
r = requests.get('https://api.nasa.gov/planetary/apod?api_key=%s&date=%s' % (NASA_OPEN_API_KEY, yesterday))
if r.status_code != 200:
return wechat.response_text(content=u'图片获取失败, 请稍后再试')
data = r.json()
# download APOD
image_url = data.get('url')
r = requests.get(image_url, stream=True)
if r.status_code != 200:
return wechat.response_text(content=u'图片获取失败, 请稍后再试')
r.raw.decode_content = True
image_file = io.BytesIO(r.raw.read())
im = Image.open(image_file)
image_w, image_h = im.size
aspect_ratio = image_w / float(image_h)
new_width = 360
new_height = int(new_width / aspect_ratio)
imaged = im.resize((360, new_height), Image.ANTIALIAS)
output = StringIO.StringIO()
imaged.save(output, quality=90, format='jpeg')
m = hashlib.md5()
m.update(output.getvalue())
digest = m.hexdigest()
image_cache_key = '%s:%s' % (APOD_CACHE_KEY, digest)
redis_store.set(image_cache_key, output.getvalue())
output.close()
apod_image_message = {
'title': data.get('title'),
'description': u'日期: %s \n图片版权: %s \n数据提供: <open>api.NASA.gov</data>' % (
data.get('date'), data.get('copyright', 'Public')),
'url': 'http://apod.nasa.gov/apod/',
'picurl': '%s/apod-%s.jpg' % (BASE_URL, digest)
}
redis_store.hmset(cache_key, apod_image_message)
redis_store.expire(cache_key, 86400)
return wechat.response_news([apod_image_message])
| 27.148515
| 115
| 0.624362
|
50ef16212820e9f5a7b582abd8942256b2b3176e
| 626
|
py
|
Python
|
packt-social-media-mining/Chap04/facebook_get_page_info.py
|
bitwhys/mining-social-web
|
8d84c85a415d63bd53b8eb441a4258dc914f4d9f
|
[
"BSD-2-Clause"
] | 1
|
2018-04-28T12:38:55.000Z
|
2018-04-28T12:38:55.000Z
|
twitterpackage/Chap04/facebook_get_page_info.py
|
albre116/SyscoSocialMedia
|
a5c3746258cf3804bf8316fe5ae8968e3bb1182a
|
[
"MIT"
] | 7
|
2020-03-24T18:01:12.000Z
|
2021-06-08T20:47:00.000Z
|
packt-social-media-mining/Chap04/facebook_get_page_info.py
|
bitwhys/mining-social-web
|
8d84c85a415d63bd53b8eb441a4258dc914f4d9f
|
[
"BSD-2-Clause"
] | null | null | null |
# Chap04/facebook_get_page_info.py
import os
import json
import facebook
from argparse import ArgumentParser
def get_parser():
parser = ArgumentParser()
parser.add_argument('--page')
return parser
if __name__ == '__main__':
parser = get_parser()
args = parser.parse_args()
token = os.environ.get('FACEBOOK_TEMP_TOKEN')
fields = [
'id',
'name',
'about',
'likes',
'website',
'link'
]
fields = ','.join(fields)
graph = facebook.GraphAPI(token)
page = graph.get_object(args.page, fields=fields)
print(json.dumps(page, indent=4))
| 20.193548
| 53
| 0.623003
|
916c8e5d187c2a2b4c64748e8ba36ddc37a78e4a
| 49,535
|
py
|
Python
|
src/bootstrap/bootstrap.py
|
rakshith-ravi/rust
|
acb5c16fa8acf7fd3b48fc218881f006577bab1a
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1
|
2022-01-03T22:01:07.000Z
|
2022-01-03T22:01:07.000Z
|
src/bootstrap/bootstrap.py
|
rakshith-ravi/rust
|
acb5c16fa8acf7fd3b48fc218881f006577bab1a
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
src/bootstrap/bootstrap.py
|
rakshith-ravi/rust
|
acb5c16fa8acf7fd3b48fc218881f006577bab1a
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
from __future__ import absolute_import, division, print_function
import argparse
import contextlib
import datetime
import distutils.version
import hashlib
import json
import os
import re
import shutil
import subprocess
import sys
import tarfile
import tempfile
from time import time, sleep
# Acquire a lock on the build directory to make sure that
# we don't cause a race condition while building
# Lock is created in `build_dir/lock.db`
def acquire_lock(build_dir):
try:
import sqlite3
path = os.path.join(build_dir, "lock.db")
try:
con = sqlite3.Connection(path, timeout=0)
curs = con.cursor()
curs.execute("BEGIN EXCLUSIVE")
# The lock is released when the cursor is dropped
return curs
# If the database is busy then lock has already been acquired
# so we wait for the lock.
# We retry every quarter second so that execution is passed back to python
# so that it can handle signals
except sqlite3.OperationalError:
del con
del curs
print("Waiting for lock on build directory")
con = sqlite3.Connection(path, timeout=0.25)
curs = con.cursor()
while True:
try:
curs.execute("BEGIN EXCLUSIVE")
break
except sqlite3.OperationalError:
pass
sleep(0.25)
return curs
except ImportError:
print("warning: sqlite3 not available in python, skipping build directory lock")
print("please file an issue on rust-lang/rust")
print("this is not a problem for non-concurrent x.py invocations")
return None
def support_xz():
try:
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
temp_path = temp_file.name
with tarfile.open(temp_path, "w:xz"):
pass
return True
except tarfile.CompressionError:
return False
def get(base, url, path, checksums, verbose=False, do_verify=True, help_on_error=None):
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
temp_path = temp_file.name
try:
if do_verify:
if url not in checksums:
raise RuntimeError(("src/stage0.json doesn't contain a checksum for {}. "
"Pre-built artifacts might not available for this "
"target at this time, see https://doc.rust-lang.org/nightly"
"/rustc/platform-support.html for more information.")
.format(url))
sha256 = checksums[url]
if os.path.exists(path):
if verify(path, sha256, False):
if verbose:
print("using already-download file", path)
return
else:
if verbose:
print("ignoring already-download file",
path, "due to failed verification")
os.unlink(path)
download(temp_path, "{}/{}".format(base, url), True, verbose, help_on_error=help_on_error)
if do_verify and not verify(temp_path, sha256, verbose):
raise RuntimeError("failed verification")
if verbose:
print("moving {} to {}".format(temp_path, path))
shutil.move(temp_path, path)
finally:
if os.path.isfile(temp_path):
if verbose:
print("removing", temp_path)
os.unlink(temp_path)
def download(path, url, probably_big, verbose, help_on_error=None):
for _ in range(0, 4):
try:
_download(path, url, probably_big, verbose, True, help_on_error=help_on_error)
return
except RuntimeError:
print("\nspurious failure, trying again")
_download(path, url, probably_big, verbose, False, help_on_error=help_on_error)
def _download(path, url, probably_big, verbose, exception, help_on_error=None):
# Try to use curl (potentially available on win32
# https://devblogs.microsoft.com/commandline/tar-and-curl-come-to-windows/)
# If an error occurs:
# - If we are on win32 fallback to powershell
# - Otherwise raise the error if appropriate
if probably_big or verbose:
print("downloading {}".format(url))
platform_is_win32 = sys.platform == 'win32'
try:
if probably_big or verbose:
option = "-#"
else:
option = "-s"
# If curl is not present on Win32, we shoud not sys.exit
# but raise `CalledProcessError` or `OSError` instead
require(["curl", "--version"], exception=platform_is_win32)
run(["curl", option,
"-L", # Follow redirect.
"-y", "30", "-Y", "10", # timeout if speed is < 10 bytes/sec for > 30 seconds
"--connect-timeout", "30", # timeout if cannot connect within 30 seconds
"--retry", "3", "-Sf", "-o", path, url],
verbose=verbose,
exception=True, # Will raise RuntimeError on failure
help_on_error=help_on_error)
except (subprocess.CalledProcessError, OSError, RuntimeError):
# see http://serverfault.com/questions/301128/how-to-download
if platform_is_win32:
run(["PowerShell.exe", "/nologo", "-Command",
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
"(New-Object System.Net.WebClient).DownloadFile('{}', '{}')".format(url, path)],
verbose=verbose,
exception=exception)
# Check if the RuntimeError raised by run(curl) should be silenced
elif verbose or exception:
raise
def verify(path, expected, verbose):
"""Check if the sha256 sum of the given path is valid"""
if verbose:
print("verifying", path)
with open(path, "rb") as source:
found = hashlib.sha256(source.read()).hexdigest()
verified = found == expected
if not verified:
print("invalid checksum:\n"
" found: {}\n"
" expected: {}".format(found, expected))
return verified
def unpack(tarball, tarball_suffix, dst, verbose=False, match=None):
"""Unpack the given tarball file"""
print("extracting", tarball)
fname = os.path.basename(tarball).replace(tarball_suffix, "")
with contextlib.closing(tarfile.open(tarball)) as tar:
for member in tar.getnames():
if "/" not in member:
continue
name = member.replace(fname + "/", "", 1)
if match is not None and not name.startswith(match):
continue
name = name[len(match) + 1:]
dst_path = os.path.join(dst, name)
if verbose:
print(" extracting", member)
tar.extract(member, dst)
src_path = os.path.join(dst, member)
if os.path.isdir(src_path) and os.path.exists(dst_path):
continue
shutil.move(src_path, dst_path)
shutil.rmtree(os.path.join(dst, fname))
def run(args, verbose=False, exception=False, is_bootstrap=False, help_on_error=None, **kwargs):
"""Run a child program in a new process"""
if verbose:
print("running: " + ' '.join(args))
sys.stdout.flush()
# Use Popen here instead of call() as it apparently allows powershell on
# Windows to not lock up waiting for input presumably.
ret = subprocess.Popen(args, **kwargs)
code = ret.wait()
if code != 0:
err = "failed to run: " + ' '.join(args)
if help_on_error is not None:
err += "\n" + help_on_error
if verbose or exception:
raise RuntimeError(err)
# For most failures, we definitely do want to print this error, or the user will have no
# idea what went wrong. But when we've successfully built bootstrap and it failed, it will
# have already printed an error above, so there's no need to print the exact command we're
# running.
if is_bootstrap:
sys.exit(1)
else:
sys.exit(err)
def require(cmd, exit=True, exception=False):
'''Run a command, returning its output.
On error,
If `exception` is `True`, raise the error
Otherwise If `exit` is `True`, exit the process
Else return None.'''
try:
return subprocess.check_output(cmd).strip()
except (subprocess.CalledProcessError, OSError) as exc:
if exception:
raise
elif exit:
print("error: unable to run `{}`: {}".format(' '.join(cmd), exc))
print("Please make sure it's installed and in the path.")
sys.exit(1)
return None
def format_build_time(duration):
"""Return a nicer format for build time
>>> format_build_time('300')
'0:05:00'
"""
return str(datetime.timedelta(seconds=int(duration)))
def default_build_triple(verbose):
"""Build triple as in LLVM"""
# If the user already has a host build triple with an existing `rustc`
# install, use their preference. This fixes most issues with Windows builds
# being detected as GNU instead of MSVC.
default_encoding = sys.getdefaultencoding()
try:
version = subprocess.check_output(["rustc", "--version", "--verbose"],
stderr=subprocess.DEVNULL)
version = version.decode(default_encoding)
host = next(x for x in version.split('\n') if x.startswith("host: "))
triple = host.split("host: ")[1]
if verbose:
print("detected default triple {} from pre-installed rustc".format(triple))
return triple
except Exception as e:
if verbose:
print("pre-installed rustc not detected: {}".format(e))
print("falling back to auto-detect")
required = sys.platform != 'win32'
ostype = require(["uname", "-s"], exit=required)
cputype = require(['uname', '-m'], exit=required)
# If we do not have `uname`, assume Windows.
if ostype is None or cputype is None:
return 'x86_64-pc-windows-msvc'
ostype = ostype.decode(default_encoding)
cputype = cputype.decode(default_encoding)
# The goal here is to come up with the same triple as LLVM would,
# at least for the subset of platforms we're willing to target.
ostype_mapper = {
'Darwin': 'apple-darwin',
'DragonFly': 'unknown-dragonfly',
'FreeBSD': 'unknown-freebsd',
'Haiku': 'unknown-haiku',
'NetBSD': 'unknown-netbsd',
'OpenBSD': 'unknown-openbsd'
}
# Consider the direct transformation first and then the special cases
if ostype in ostype_mapper:
ostype = ostype_mapper[ostype]
elif ostype == 'Linux':
os_from_sp = subprocess.check_output(
['uname', '-o']).strip().decode(default_encoding)
if os_from_sp == 'Android':
ostype = 'linux-android'
else:
ostype = 'unknown-linux-gnu'
elif ostype == 'SunOS':
ostype = 'pc-solaris'
# On Solaris, uname -m will return a machine classification instead
# of a cpu type, so uname -p is recommended instead. However, the
# output from that option is too generic for our purposes (it will
# always emit 'i386' on x86/amd64 systems). As such, isainfo -k
# must be used instead.
cputype = require(['isainfo', '-k']).decode(default_encoding)
# sparc cpus have sun as a target vendor
if 'sparc' in cputype:
ostype = 'sun-solaris'
elif ostype.startswith('MINGW'):
# msys' `uname` does not print gcc configuration, but prints msys
# configuration. so we cannot believe `uname -m`:
# msys1 is always i686 and msys2 is always x86_64.
# instead, msys defines $MSYSTEM which is MINGW32 on i686 and
# MINGW64 on x86_64.
ostype = 'pc-windows-gnu'
cputype = 'i686'
if os.environ.get('MSYSTEM') == 'MINGW64':
cputype = 'x86_64'
elif ostype.startswith('MSYS'):
ostype = 'pc-windows-gnu'
elif ostype.startswith('CYGWIN_NT'):
cputype = 'i686'
if ostype.endswith('WOW64'):
cputype = 'x86_64'
ostype = 'pc-windows-gnu'
elif sys.platform == 'win32':
# Some Windows platforms might have a `uname` command that returns a
# non-standard string (e.g. gnuwin32 tools returns `windows32`). In
# these cases, fall back to using sys.platform.
return 'x86_64-pc-windows-msvc'
else:
err = "unknown OS type: {}".format(ostype)
sys.exit(err)
if cputype in ['powerpc', 'riscv'] and ostype == 'unknown-freebsd':
cputype = subprocess.check_output(
['uname', '-p']).strip().decode(default_encoding)
cputype_mapper = {
'BePC': 'i686',
'aarch64': 'aarch64',
'amd64': 'x86_64',
'arm64': 'aarch64',
'i386': 'i686',
'i486': 'i686',
'i686': 'i686',
'i786': 'i686',
'm68k': 'm68k',
'powerpc': 'powerpc',
'powerpc64': 'powerpc64',
'powerpc64le': 'powerpc64le',
'ppc': 'powerpc',
'ppc64': 'powerpc64',
'ppc64le': 'powerpc64le',
'riscv64': 'riscv64gc',
's390x': 's390x',
'x64': 'x86_64',
'x86': 'i686',
'x86-64': 'x86_64',
'x86_64': 'x86_64'
}
# Consider the direct transformation first and then the special cases
if cputype in cputype_mapper:
cputype = cputype_mapper[cputype]
elif cputype in {'xscale', 'arm'}:
cputype = 'arm'
if ostype == 'linux-android':
ostype = 'linux-androideabi'
elif ostype == 'unknown-freebsd':
cputype = subprocess.check_output(
['uname', '-p']).strip().decode(default_encoding)
ostype = 'unknown-freebsd'
elif cputype == 'armv6l':
cputype = 'arm'
if ostype == 'linux-android':
ostype = 'linux-androideabi'
else:
ostype += 'eabihf'
elif cputype in {'armv7l', 'armv8l'}:
cputype = 'armv7'
if ostype == 'linux-android':
ostype = 'linux-androideabi'
else:
ostype += 'eabihf'
elif cputype == 'mips':
if sys.byteorder == 'big':
cputype = 'mips'
elif sys.byteorder == 'little':
cputype = 'mipsel'
else:
raise ValueError("unknown byteorder: {}".format(sys.byteorder))
elif cputype == 'mips64':
if sys.byteorder == 'big':
cputype = 'mips64'
elif sys.byteorder == 'little':
cputype = 'mips64el'
else:
raise ValueError('unknown byteorder: {}'.format(sys.byteorder))
# only the n64 ABI is supported, indicate it
ostype += 'abi64'
elif cputype == 'sparc' or cputype == 'sparcv9' or cputype == 'sparc64':
pass
else:
err = "unknown cpu type: {}".format(cputype)
sys.exit(err)
return "{}-{}".format(cputype, ostype)
@contextlib.contextmanager
def output(filepath):
tmp = filepath + '.tmp'
with open(tmp, 'w') as f:
yield f
try:
if os.path.exists(filepath):
os.remove(filepath) # PermissionError/OSError on Win32 if in use
except OSError:
shutil.copy2(tmp, filepath)
os.remove(tmp)
return
os.rename(tmp, filepath)
class Stage0Toolchain:
def __init__(self, stage0_payload):
self.date = stage0_payload["date"]
self.version = stage0_payload["version"]
def channel(self):
return self.version + "-" + self.date
class RustBuild(object):
"""Provide all the methods required to build Rust"""
def __init__(self):
self.checksums_sha256 = {}
self.stage0_compiler = None
self.stage0_rustfmt = None
self._download_url = ''
self.build = ''
self.build_dir = ''
self.clean = False
self.config_toml = ''
self.rust_root = ''
self.use_locked_deps = ''
self.use_vendored_sources = ''
self.verbose = False
self.git_version = None
self.nix_deps_dir = None
self.rustc_commit = None
def download_toolchain(self, stage0=True, rustc_channel=None):
"""Fetch the build system for Rust, written in Rust
This method will build a cache directory, then it will fetch the
tarball which has the stage0 compiler used to then bootstrap the Rust
compiler itself.
Each downloaded tarball is extracted, after that, the script
will move all the content to the right place.
"""
if rustc_channel is None:
rustc_channel = self.stage0_compiler.version
bin_root = self.bin_root(stage0)
key = self.stage0_compiler.date
if not stage0:
key += str(self.rustc_commit)
if self.rustc(stage0).startswith(bin_root) and \
(not os.path.exists(self.rustc(stage0)) or
self.program_out_of_date(self.rustc_stamp(stage0), key)):
if os.path.exists(bin_root):
shutil.rmtree(bin_root)
tarball_suffix = '.tar.xz' if support_xz() else '.tar.gz'
filename = "rust-std-{}-{}{}".format(
rustc_channel, self.build, tarball_suffix)
pattern = "rust-std-{}".format(self.build)
self._download_component_helper(filename, pattern, tarball_suffix, stage0)
filename = "rustc-{}-{}{}".format(rustc_channel, self.build,
tarball_suffix)
self._download_component_helper(filename, "rustc", tarball_suffix, stage0)
# download-rustc doesn't need its own cargo, it can just use beta's.
if stage0:
filename = "cargo-{}-{}{}".format(rustc_channel, self.build,
tarball_suffix)
self._download_component_helper(filename, "cargo", tarball_suffix)
self.fix_bin_or_dylib("{}/bin/cargo".format(bin_root))
else:
filename = "rustc-dev-{}-{}{}".format(rustc_channel, self.build, tarball_suffix)
self._download_component_helper(
filename, "rustc-dev", tarball_suffix, stage0
)
self.fix_bin_or_dylib("{}/bin/rustc".format(bin_root))
self.fix_bin_or_dylib("{}/bin/rustdoc".format(bin_root))
lib_dir = "{}/lib".format(bin_root)
for lib in os.listdir(lib_dir):
if lib.endswith(".so"):
self.fix_bin_or_dylib(os.path.join(lib_dir, lib))
with output(self.rustc_stamp(stage0)) as rust_stamp:
rust_stamp.write(key)
if self.rustfmt() and self.rustfmt().startswith(bin_root) and (
not os.path.exists(self.rustfmt())
or self.program_out_of_date(
self.rustfmt_stamp(),
"" if self.stage0_rustfmt is None else self.stage0_rustfmt.channel()
)
):
if self.stage0_rustfmt is not None:
tarball_suffix = '.tar.xz' if support_xz() else '.tar.gz'
filename = "rustfmt-{}-{}{}".format(
self.stage0_rustfmt.version, self.build, tarball_suffix,
)
self._download_component_helper(
filename, "rustfmt-preview", tarball_suffix, key=self.stage0_rustfmt.date
)
self.fix_bin_or_dylib("{}/bin/rustfmt".format(bin_root))
self.fix_bin_or_dylib("{}/bin/cargo-fmt".format(bin_root))
with output(self.rustfmt_stamp()) as rustfmt_stamp:
rustfmt_stamp.write(self.stage0_rustfmt.channel())
def _download_component_helper(
self, filename, pattern, tarball_suffix, stage0=True, key=None
):
if key is None:
if stage0:
key = self.stage0_compiler.date
else:
key = self.rustc_commit
cache_dst = os.path.join(self.build_dir, "cache")
rustc_cache = os.path.join(cache_dst, key)
if not os.path.exists(rustc_cache):
os.makedirs(rustc_cache)
if stage0:
base = self._download_url
url = "dist/{}".format(key)
else:
base = "https://ci-artifacts.rust-lang.org"
url = "rustc-builds/{}".format(self.rustc_commit)
tarball = os.path.join(rustc_cache, filename)
if not os.path.exists(tarball):
get(
base,
"{}/{}".format(url, filename),
tarball,
self.checksums_sha256,
verbose=self.verbose,
do_verify=stage0,
)
unpack(tarball, tarball_suffix, self.bin_root(stage0), match=pattern, verbose=self.verbose)
def fix_bin_or_dylib(self, fname):
"""Modifies the interpreter section of 'fname' to fix the dynamic linker,
or the RPATH section, to fix the dynamic library search path
This method is only required on NixOS and uses the PatchELF utility to
change the interpreter/RPATH of ELF executables.
Please see https://nixos.org/patchelf.html for more information
"""
default_encoding = sys.getdefaultencoding()
try:
ostype = subprocess.check_output(
['uname', '-s']).strip().decode(default_encoding)
except subprocess.CalledProcessError:
return
except OSError as reason:
if getattr(reason, 'winerror', None) is not None:
return
raise reason
if ostype != "Linux":
return
# If the user has asked binaries to be patched for Nix, then
# don't check for NixOS or `/lib`, just continue to the patching.
if self.get_toml('patch-binaries-for-nix', 'build') != 'true':
# Use `/etc/os-release` instead of `/etc/NIXOS`.
# The latter one does not exist on NixOS when using tmpfs as root.
try:
with open("/etc/os-release", "r") as f:
if not any(l.strip() in ["ID=nixos", "ID='nixos'", 'ID="nixos"'] for l in f):
return
except FileNotFoundError:
return
if os.path.exists("/lib"):
return
# At this point we're pretty sure the user is running NixOS or
# using Nix
nix_os_msg = "info: you seem to be using Nix. Attempting to patch"
print(nix_os_msg, fname)
# Only build `.nix-deps` once.
nix_deps_dir = self.nix_deps_dir
if not nix_deps_dir:
# Run `nix-build` to "build" each dependency (which will likely reuse
# the existing `/nix/store` copy, or at most download a pre-built copy).
#
# Importantly, we create a gc-root called `.nix-deps` in the `build/`
# directory, but still reference the actual `/nix/store` path in the rpath
# as it makes it significantly more robust against changes to the location of
# the `.nix-deps` location.
#
# bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`).
# zlib: Needed as a system dependency of `libLLVM-*.so`.
# patchelf: Needed for patching ELF binaries (see doc comment above).
nix_deps_dir = "{}/{}".format(self.build_dir, ".nix-deps")
nix_expr = '''
with (import <nixpkgs> {});
symlinkJoin {
name = "rust-stage0-dependencies";
paths = [
zlib
patchelf
stdenv.cc.bintools
];
}
'''
try:
subprocess.check_output([
"nix-build", "-E", nix_expr, "-o", nix_deps_dir,
])
except subprocess.CalledProcessError as reason:
print("warning: failed to call nix-build:", reason)
return
self.nix_deps_dir = nix_deps_dir
patchelf = "{}/bin/patchelf".format(nix_deps_dir)
rpath_entries = [
# Relative default, all binary and dynamic libraries we ship
# appear to have this (even when `../lib` is redundant).
"$ORIGIN/../lib",
os.path.join(os.path.realpath(nix_deps_dir), "lib")
]
patchelf_args = ["--set-rpath", ":".join(rpath_entries)]
if not fname.endswith(".so"):
# Finally, set the corret .interp for binaries
with open("{}/nix-support/dynamic-linker".format(nix_deps_dir)) as dynamic_linker:
patchelf_args += ["--set-interpreter", dynamic_linker.read().rstrip()]
try:
subprocess.check_output([patchelf] + patchelf_args + [fname])
except subprocess.CalledProcessError as reason:
print("warning: failed to call patchelf:", reason)
return
# If `download-rustc` is set, download the most recent commit with CI artifacts
def maybe_download_ci_toolchain(self):
# If `download-rustc` is not set, default to rebuilding.
download_rustc = self.get_toml("download-rustc", section="rust")
if download_rustc is None or download_rustc == "false":
return None
assert download_rustc == "true" or download_rustc == "if-unchanged", download_rustc
# Handle running from a directory other than the top level
rev_parse = ["git", "rev-parse", "--show-toplevel"]
top_level = subprocess.check_output(rev_parse, universal_newlines=True).strip()
compiler = "{}/compiler/".format(top_level)
library = "{}/library/".format(top_level)
# Look for a version to compare to based on the current commit.
# Only commits merged by bors will have CI artifacts.
merge_base = [
"git", "rev-list", "--author=bors@rust-lang.org", "-n1",
"--first-parent", "HEAD"
]
commit = subprocess.check_output(merge_base, universal_newlines=True).strip()
if not commit:
print("error: could not find commit hash for downloading rustc")
print("help: maybe your repository history is too shallow?")
print("help: consider disabling `download-rustc`")
print("help: or fetch enough history to include one upstream commit")
exit(1)
# Warn if there were changes to the compiler or standard library since the ancestor commit.
status = subprocess.call(["git", "diff-index", "--quiet", commit, "--", compiler, library])
if status != 0:
if download_rustc == "if-unchanged":
if self.verbose:
print("warning: saw changes to compiler/ or library/ since {}; " \
"ignoring `download-rustc`".format(commit))
return None
print("warning: `download-rustc` is enabled, but there are changes to " \
"compiler/ or library/")
if self.verbose:
print("using downloaded stage2 artifacts from CI (commit {})".format(commit))
self.rustc_commit = commit
# FIXME: support downloading artifacts from the beta channel
self.download_toolchain(False, "nightly")
def rustc_stamp(self, stage0):
"""Return the path for .rustc-stamp at the given stage
>>> rb = RustBuild()
>>> rb.build_dir = "build"
>>> rb.rustc_stamp(True) == os.path.join("build", "stage0", ".rustc-stamp")
True
>>> rb.rustc_stamp(False) == os.path.join("build", "ci-rustc", ".rustc-stamp")
True
"""
return os.path.join(self.bin_root(stage0), '.rustc-stamp')
def rustfmt_stamp(self):
"""Return the path for .rustfmt-stamp
>>> rb = RustBuild()
>>> rb.build_dir = "build"
>>> rb.rustfmt_stamp() == os.path.join("build", "stage0", ".rustfmt-stamp")
True
"""
return os.path.join(self.bin_root(True), '.rustfmt-stamp')
def program_out_of_date(self, stamp_path, key):
"""Check if the given program stamp is out of date"""
if not os.path.exists(stamp_path) or self.clean:
return True
with open(stamp_path, 'r') as stamp:
return key != stamp.read()
def bin_root(self, stage0):
"""Return the binary root directory for the given stage
>>> rb = RustBuild()
>>> rb.build_dir = "build"
>>> rb.bin_root(True) == os.path.join("build", "stage0")
True
>>> rb.bin_root(False) == os.path.join("build", "ci-rustc")
True
When the 'build' property is given should be a nested directory:
>>> rb.build = "devel"
>>> rb.bin_root(True) == os.path.join("build", "devel", "stage0")
True
"""
if stage0:
subdir = "stage0"
else:
subdir = "ci-rustc"
return os.path.join(self.build_dir, self.build, subdir)
def get_toml(self, key, section=None):
"""Returns the value of the given key in config.toml, otherwise returns None
>>> rb = RustBuild()
>>> rb.config_toml = 'key1 = "value1"\\nkey2 = "value2"'
>>> rb.get_toml("key2")
'value2'
If the key does not exist, the result is None:
>>> rb.get_toml("key3") is None
True
Optionally also matches the section the key appears in
>>> rb.config_toml = '[a]\\nkey = "value1"\\n[b]\\nkey = "value2"'
>>> rb.get_toml('key', 'a')
'value1'
>>> rb.get_toml('key', 'b')
'value2'
>>> rb.get_toml('key', 'c') is None
True
>>> rb.config_toml = 'key1 = true'
>>> rb.get_toml("key1")
'true'
"""
cur_section = None
for line in self.config_toml.splitlines():
section_match = re.match(r'^\s*\[(.*)\]\s*$', line)
if section_match is not None:
cur_section = section_match.group(1)
match = re.match(r'^{}\s*=(.*)$'.format(key), line)
if match is not None:
value = match.group(1)
if section is None or section == cur_section:
return self.get_string(value) or value.strip()
return None
def cargo(self):
"""Return config path for cargo"""
return self.program_config('cargo')
def rustc(self, stage0):
"""Return config path for rustc"""
return self.program_config('rustc', stage0)
def rustfmt(self):
"""Return config path for rustfmt"""
if self.stage0_rustfmt is None:
return None
return self.program_config('rustfmt')
def program_config(self, program, stage0=True):
"""Return config path for the given program at the given stage
>>> rb = RustBuild()
>>> rb.config_toml = 'rustc = "rustc"\\n'
>>> rb.program_config('rustc')
'rustc'
>>> rb.config_toml = ''
>>> cargo_path = rb.program_config('cargo', True)
>>> cargo_path.rstrip(".exe") == os.path.join(rb.bin_root(True),
... "bin", "cargo")
True
>>> cargo_path = rb.program_config('cargo', False)
>>> cargo_path.rstrip(".exe") == os.path.join(rb.bin_root(False),
... "bin", "cargo")
True
"""
config = self.get_toml(program)
if config:
return os.path.expanduser(config)
return os.path.join(self.bin_root(stage0), "bin", "{}{}".format(
program, self.exe_suffix()))
@staticmethod
def get_string(line):
"""Return the value between double quotes
>>> RustBuild.get_string(' "devel" ')
'devel'
>>> RustBuild.get_string(" 'devel' ")
'devel'
>>> RustBuild.get_string('devel') is None
True
>>> RustBuild.get_string(' "devel ')
''
"""
start = line.find('"')
if start != -1:
end = start + 1 + line[start + 1:].find('"')
return line[start + 1:end]
start = line.find('\'')
if start != -1:
end = start + 1 + line[start + 1:].find('\'')
return line[start + 1:end]
return None
@staticmethod
def exe_suffix():
"""Return a suffix for executables"""
if sys.platform == 'win32':
return '.exe'
return ''
def bootstrap_binary(self):
"""Return the path of the bootstrap binary
>>> rb = RustBuild()
>>> rb.build_dir = "build"
>>> rb.bootstrap_binary() == os.path.join("build", "bootstrap",
... "debug", "bootstrap")
True
"""
return os.path.join(self.build_dir, "bootstrap", "debug", "bootstrap")
def build_bootstrap(self):
"""Build bootstrap"""
print("Building rustbuild")
build_dir = os.path.join(self.build_dir, "bootstrap")
if self.clean and os.path.exists(build_dir):
shutil.rmtree(build_dir)
env = os.environ.copy()
# `CARGO_BUILD_TARGET` breaks bootstrap build.
# See also: <https://github.com/rust-lang/rust/issues/70208>.
if "CARGO_BUILD_TARGET" in env:
del env["CARGO_BUILD_TARGET"]
env["CARGO_TARGET_DIR"] = build_dir
env["RUSTC"] = self.rustc(True)
env["LD_LIBRARY_PATH"] = os.path.join(self.bin_root(True), "lib") + \
(os.pathsep + env["LD_LIBRARY_PATH"]) \
if "LD_LIBRARY_PATH" in env else ""
env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(True), "lib") + \
(os.pathsep + env["DYLD_LIBRARY_PATH"]) \
if "DYLD_LIBRARY_PATH" in env else ""
env["LIBRARY_PATH"] = os.path.join(self.bin_root(True), "lib") + \
(os.pathsep + env["LIBRARY_PATH"]) \
if "LIBRARY_PATH" in env else ""
# preserve existing RUSTFLAGS
env.setdefault("RUSTFLAGS", "")
build_section = "target.{}".format(self.build)
target_features = []
if self.get_toml("crt-static", build_section) == "true":
target_features += ["+crt-static"]
elif self.get_toml("crt-static", build_section) == "false":
target_features += ["-crt-static"]
if target_features:
env["RUSTFLAGS"] += " -C target-feature=" + (",".join(target_features))
target_linker = self.get_toml("linker", build_section)
if target_linker is not None:
env["RUSTFLAGS"] += " -C linker=" + target_linker
env["RUSTFLAGS"] += " -Wrust_2018_idioms -Wunused_lifetimes"
env["RUSTFLAGS"] += " -Wsemicolon_in_expressions_from_macros"
if self.get_toml("deny-warnings", "rust") != "false":
env["RUSTFLAGS"] += " -Dwarnings"
env["PATH"] = os.path.join(self.bin_root(True), "bin") + \
os.pathsep + env["PATH"]
if not os.path.isfile(self.cargo()):
raise Exception("no cargo executable found at `{}`".format(
self.cargo()))
args = [self.cargo(), "build", "--manifest-path",
os.path.join(self.rust_root, "src/bootstrap/Cargo.toml")]
for _ in range(0, self.verbose):
args.append("--verbose")
if self.use_locked_deps:
args.append("--locked")
if self.use_vendored_sources:
args.append("--frozen")
run(args, env=env, verbose=self.verbose)
def build_triple(self):
"""Build triple as in LLVM
Note that `default_build_triple` is moderately expensive,
so use `self.build` where possible.
"""
config = self.get_toml('build')
if config:
return config
return default_build_triple(self.verbose)
def check_submodule(self, module):
checked_out = subprocess.Popen(["git", "rev-parse", "HEAD"],
cwd=os.path.join(self.rust_root, module),
stdout=subprocess.PIPE)
return checked_out
def update_submodule(self, module, checked_out, recorded_submodules):
module_path = os.path.join(self.rust_root, module)
default_encoding = sys.getdefaultencoding()
checked_out = checked_out.communicate()[0].decode(default_encoding).strip()
if recorded_submodules[module] == checked_out:
return
print("Updating submodule", module)
run(["git", "submodule", "-q", "sync", module],
cwd=self.rust_root, verbose=self.verbose)
update_args = ["git", "submodule", "update", "--init", "--recursive", "--depth=1"]
if self.git_version >= distutils.version.LooseVersion("2.11.0"):
update_args.append("--progress")
update_args.append(module)
try:
run(update_args, cwd=self.rust_root, verbose=self.verbose, exception=True)
except RuntimeError:
print("Failed updating submodule. This is probably due to uncommitted local changes.")
print('Either stash the changes by running "git stash" within the submodule\'s')
print('directory, reset them by running "git reset --hard", or commit them.')
print("To reset all submodules' changes run", end=" ")
print('"git submodule foreach --recursive git reset --hard".')
raise SystemExit(1)
run(["git", "reset", "-q", "--hard"],
cwd=module_path, verbose=self.verbose)
run(["git", "clean", "-qdfx"],
cwd=module_path, verbose=self.verbose)
def update_submodules(self):
"""Update submodules"""
has_git = os.path.exists(os.path.join(self.rust_root, ".git"))
# This just arbitrarily checks for cargo, but any workspace member in
# a submodule would work.
has_submodules = os.path.exists(os.path.join(self.rust_root, "src/tools/cargo/Cargo.toml"))
if not has_git and not has_submodules:
print("This is not a git repository, and the requisite git submodules were not found.")
print("If you downloaded the source from https://github.com/rust-lang/rust/releases,")
print("those sources will not work. Instead, consider downloading from the source")
print("releases linked at")
print("https://forge.rust-lang.org/infra/other-installation-methods.html#source-code")
print("or clone the repository at https://github.com/rust-lang/rust/.")
raise SystemExit(1)
if not has_git or self.get_toml('submodules') == "false":
return
default_encoding = sys.getdefaultencoding()
# check the existence and version of 'git' command
git_version_str = require(['git', '--version']).split()[2].decode(default_encoding)
self.git_version = distutils.version.LooseVersion(git_version_str)
start_time = time()
print('Updating only changed submodules')
default_encoding = sys.getdefaultencoding()
# Only update submodules that are needed to build bootstrap. These are needed because Cargo
# currently requires everything in a workspace to be "locally present" when starting a
# build, and will give a hard error if any Cargo.toml files are missing.
# FIXME: Is there a way to avoid cloning these eagerly? Bootstrap itself doesn't need to
# share a workspace with any tools - maybe it could be excluded from the workspace?
# That will still require cloning the submodules the second you check the standard
# library, though...
# FIXME: Is there a way to avoid hard-coding the submodules required?
# WARNING: keep this in sync with the submodules hard-coded in bootstrap/lib.rs
submodules = [
"src/tools/rust-installer",
"src/tools/cargo",
"src/tools/rls",
"src/tools/miri",
"library/backtrace",
"library/stdarch"
]
# If build.vendor is set in config.toml, we must update rust-analyzer also.
# Otherwise, the bootstrap will fail (#96456).
if self.use_vendored_sources:
submodules.append("src/tools/rust-analyzer")
filtered_submodules = []
submodules_names = []
for module in submodules:
check = self.check_submodule(module)
filtered_submodules.append((module, check))
submodules_names.append(module)
recorded = subprocess.Popen(["git", "ls-tree", "HEAD"] + submodules_names,
cwd=self.rust_root, stdout=subprocess.PIPE)
recorded = recorded.communicate()[0].decode(default_encoding).strip().splitlines()
# { filename: hash }
recorded_submodules = {}
for data in recorded:
# [mode, kind, hash, filename]
data = data.split()
recorded_submodules[data[3]] = data[2]
for module in filtered_submodules:
self.update_submodule(module[0], module[1], recorded_submodules)
print(" Submodules updated in %.2f seconds" % (time() - start_time))
def set_dist_environment(self, url):
"""Set download URL for normal environment"""
if 'RUSTUP_DIST_SERVER' in os.environ:
self._download_url = os.environ['RUSTUP_DIST_SERVER']
else:
self._download_url = url
def check_vendored_status(self):
"""Check that vendoring is configured properly"""
vendor_dir = os.path.join(self.rust_root, 'vendor')
if 'SUDO_USER' in os.environ and not self.use_vendored_sources:
if os.getuid() == 0:
self.use_vendored_sources = True
print('info: looks like you\'re trying to run this command as root')
print(' and so in order to preserve your $HOME this will now')
print(' use vendored sources by default.')
if not os.path.exists(vendor_dir):
print('error: vendoring required, but vendor directory does not exist.')
print(' Run `cargo vendor` without sudo to initialize the '
'vendor directory.')
raise Exception("{} not found".format(vendor_dir))
if self.use_vendored_sources:
config = ("[source.crates-io]\n"
"replace-with = 'vendored-sources'\n"
"registry = 'https://example.com'\n"
"\n"
"[source.vendored-sources]\n"
"directory = '{}/vendor'\n"
.format(self.rust_root))
if not os.path.exists('.cargo'):
os.makedirs('.cargo')
with output('.cargo/config') as cargo_config:
cargo_config.write(config)
else:
print('info: using vendored source, but .cargo/config is already present.')
print(' Reusing the current configuration file. But you may want to '
'configure vendoring like this:')
print(config)
else:
if os.path.exists('.cargo'):
shutil.rmtree('.cargo')
def ensure_vendored(self):
"""Ensure that the vendored sources are available if needed"""
vendor_dir = os.path.join(self.rust_root, 'vendor')
# Note that this does not handle updating the vendored dependencies if
# the rust git repository is updated. Normal development usually does
# not use vendoring, so hopefully this isn't too much of a problem.
if self.use_vendored_sources and not os.path.exists(vendor_dir):
run([
self.cargo(),
"vendor",
"--sync=./src/tools/rust-analyzer/Cargo.toml",
"--sync=./compiler/rustc_codegen_cranelift/Cargo.toml",
], verbose=self.verbose, cwd=self.rust_root)
def bootstrap(help_triggered):
"""Configure, fetch, build and run the initial bootstrap"""
# If the user is asking for help, let them know that the whole download-and-build
# process has to happen before anything is printed out.
if help_triggered:
print("info: Downloading and building bootstrap before processing --help")
print(" command. See src/bootstrap/README.md for help with common")
print(" commands.")
parser = argparse.ArgumentParser(description='Build rust')
parser.add_argument('--config')
parser.add_argument('--build')
parser.add_argument('--clean', action='store_true')
parser.add_argument('-v', '--verbose', action='count', default=0)
args = [a for a in sys.argv if a != '-h' and a != '--help']
args, _ = parser.parse_known_args(args)
# Configure initial bootstrap
build = RustBuild()
build.rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
build.verbose = args.verbose
build.clean = args.clean
# Read from `--config`, then `RUST_BOOTSTRAP_CONFIG`, then `./config.toml`,
# then `config.toml` in the root directory.
toml_path = args.config or os.getenv('RUST_BOOTSTRAP_CONFIG')
using_default_path = toml_path is None
if using_default_path:
toml_path = 'config.toml'
if not os.path.exists(toml_path):
toml_path = os.path.join(build.rust_root, toml_path)
# Give a hard error if `--config` or `RUST_BOOTSTRAP_CONFIG` are set to a missing path,
# but not if `config.toml` hasn't been created.
if not using_default_path or os.path.exists(toml_path):
with open(toml_path) as config:
build.config_toml = config.read()
profile = build.get_toml('profile')
if profile is not None:
include_file = 'config.{}.toml'.format(profile)
include_dir = os.path.join(build.rust_root, 'src', 'bootstrap', 'defaults')
include_path = os.path.join(include_dir, include_file)
# HACK: This works because `build.get_toml()` returns the first match it finds for a
# specific key, so appending our defaults at the end allows the user to override them
with open(include_path) as included_toml:
build.config_toml += os.linesep + included_toml.read()
config_verbose = build.get_toml('verbose', 'build')
if config_verbose is not None:
build.verbose = max(build.verbose, int(config_verbose))
build.use_vendored_sources = build.get_toml('vendor', 'build') == 'true'
build.use_locked_deps = build.get_toml('locked-deps', 'build') == 'true'
build.check_vendored_status()
build_dir = build.get_toml('build-dir', 'build') or 'build'
build.build_dir = os.path.abspath(build_dir)
with open(os.path.join(build.rust_root, "src", "stage0.json")) as f:
data = json.load(f)
build.checksums_sha256 = data["checksums_sha256"]
build.stage0_compiler = Stage0Toolchain(data["compiler"])
if data.get("rustfmt") is not None:
build.stage0_rustfmt = Stage0Toolchain(data["rustfmt"])
build.set_dist_environment(data["dist_server"])
build.build = args.build or build.build_triple()
# Acquire the lock before doing any build actions
# The lock is released when `lock` is dropped
if not os.path.exists(build.build_dir):
os.makedirs(build.build_dir)
lock = acquire_lock(build.build_dir)
build.update_submodules()
# Fetch/build the bootstrap
build.download_toolchain()
# Download the master compiler if `download-rustc` is set
build.maybe_download_ci_toolchain()
sys.stdout.flush()
build.ensure_vendored()
build.build_bootstrap()
sys.stdout.flush()
# Run the bootstrap
args = [build.bootstrap_binary()]
args.extend(sys.argv[1:])
env = os.environ.copy()
env["BOOTSTRAP_PARENT_ID"] = str(os.getpid())
env["BOOTSTRAP_PYTHON"] = sys.executable
if build.rustc_commit is not None:
env["BOOTSTRAP_DOWNLOAD_RUSTC"] = '1'
run(args, env=env, verbose=build.verbose, is_bootstrap=True)
def main():
"""Entry point for the bootstrap process"""
start_time = time()
# x.py help <cmd> ...
if len(sys.argv) > 1 and sys.argv[1] == 'help':
sys.argv = [sys.argv[0], '-h'] + sys.argv[2:]
help_triggered = (
'-h' in sys.argv) or ('--help' in sys.argv) or (len(sys.argv) == 1)
try:
bootstrap(help_triggered)
if not help_triggered:
print("Build completed successfully in {}".format(
format_build_time(time() - start_time)))
except (SystemExit, KeyboardInterrupt) as error:
if hasattr(error, 'code') and isinstance(error.code, int):
exit_code = error.code
else:
exit_code = 1
print(error)
if not help_triggered:
print("Build completed unsuccessfully in {}".format(
format_build_time(time() - start_time)))
sys.exit(exit_code)
if __name__ == '__main__':
main()
| 40.569206
| 100
| 0.586293
|
f74e5f8f74616921ddfa3c986e30955bccb0887a
| 5,457
|
py
|
Python
|
code/lstm_mixed_loso.py
|
bagustris/deep_mlp_ser
|
079bc6414287dbfb23a52e1e1869b91584eb037e
|
[
"BSD-3-Clause"
] | 7
|
2020-04-21T07:27:31.000Z
|
2022-01-18T10:24:37.000Z
|
code/lstm_mixed_loso.py
|
bagustris/deep_mlp_ser
|
079bc6414287dbfb23a52e1e1869b91584eb037e
|
[
"BSD-3-Clause"
] | 2
|
2020-10-09T03:04:35.000Z
|
2022-01-18T11:37:52.000Z
|
code/lstm_mixed_loso.py
|
bagustris/deep_mlp_ser
|
079bc6414287dbfb23a52e1e1869b91584eb037e
|
[
"BSD-3-Clause"
] | 2
|
2020-06-19T23:37:00.000Z
|
2021-12-19T22:07:44.000Z
|
# CSL Paper: Dimensional speech emotion recognition from acoustic and text
# Changelog:
# 2019-09-01: initial version
# 2019-10-06: optimizer MTL parameters with linear search (in progress)
# 2012-12-25: modified fot ser_iemocap_loso_hfs.py
# feature is either std+mean or std+mean+silence (uncomment line 44)
import numpy as np
import pickle
import pandas as pd
import keras.backend as K
from keras.models import Model
from keras.layers import Input, Dense, CuDNNLSTM, Flatten, \
Embedding, Dropout, BatchNormalization, \
RNN, concatenate, Activation
from keras.callbacks import EarlyStopping
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from keras.preprocessing.text import Tokenizer
from keras.preprocessing import sequence
import random as rn
import tensorflow as tf
rn.seed(123)
np.random.seed(99)
tf.set_random_seed(1234)
# load feature and labels
feat_iemocap = np.load('/home/s1820002/spro2020/data/feat_ws_3.npy')
vad_iemocap = np.load('/home/s1820002/IEMOCAP-Emotion-Detection/y_egemaps.npy')
feat_improv_train = np.load('/home/s1820002/deepMLP/data/feat_hfs_gemaps_msp_train.npy')
feat_improv_test = np.load('/home/s1820002/deepMLP/data/feat_hfs_gemaps_msp_test.npy')
feat_improv = np.vstack([feat_improv_train, feat_improv_test])
list_path = '/home/s1820002/msp-improv/helper/improv_data.csv'
list_file = pd.read_csv(list_path, index_col=None)
list_sorted = list_file.sort_values(by=['wavfile'])
vad_list = [list_sorted['v'], list_sorted['a'], list_sorted['d']]
vad_improv = np.array(vad_list).T
# for LSTM input shape (batch, steps, features/channel)
feat = np.vstack([feat_iemocap, feat_improv])
vad = np.vstack([vad_iemocap, vad_improv])
feat = feat.reshape(feat.shape[0], 1, feat.shape[1])
# remove outlier, < 1, > 5
vad = np.where(vad==5.5, 5.0, vad)
vad = np.where(vad==0.5, 1.0, vad)
# standardization
scaled_feature = True
# set Dropout
do = 0.3
if scaled_feature == True:
scaler = StandardScaler()
scaler = scaler.fit(feat.reshape(feat.shape[0]*feat.shape[1], feat.shape[2]))
scaled_feat = scaler.transform(feat.reshape(feat.shape[0]*feat.shape[1], feat.shape[2]))
scaled_feat = scaled_feat.reshape(feat.shape[0], feat.shape[1], feat.shape[2])
feat = scaled_feat
else:
feat = feat
scaled_vad = True
# standardization
if scaled_vad:
scaler = MinMaxScaler(feature_range=(-1, 1))
scaler = scaler.fit(vad) #.reshape(vad.shape[0]*vad.shape[1], vad.shape[2]))
scaled_vad = scaler.transform(vad) #.reshape(vad.shape[0]*vad.shape[1], vad.shape[2]))
vad = scaled_vad
else:
vad = vad
# Concordance correlation coefficient (CCC)-based loss function - using non-inductive statistics
def ccc(gold, pred):
gold = K.squeeze(gold, axis=-1)
pred = K.squeeze(pred, axis=-1)
gold_mean = K.mean(gold, axis=-1, keepdims=True)
pred_mean = K.mean(pred, axis=-1, keepdims=True)
covariance = (gold-gold_mean)*(pred-pred_mean)
gold_var = K.mean(K.square(gold-gold_mean), axis=-1, keepdims=True)
pred_var = K.mean(K.square(pred-pred_mean), axis=-1, keepdims=True)
ccc = K.constant(2.) * covariance / (gold_var + pred_var + K.square(gold_mean - pred_mean) + K.common.epsilon())
return ccc
def ccc_loss(gold, pred):
# input (num_batches, seq_len, 1)
ccc_loss = K.constant(1.) - ccc(gold, pred)
return ccc_loss
# API model, if use RNN, first two rnn layer must return_sequences=True
def api_model(alpha, beta, gamma):
# speech network
input_speech = Input(shape=(feat.shape[1], feat.shape[2]), name='speech_input')
net_speech = BatchNormalization()(input_speech)
net_speech = CuDNNLSTM(256, return_sequences=True)(net_speech)
net_speech = CuDNNLSTM(128, return_sequences=True)(net_speech)
net_speech = CuDNNLSTM(64, return_sequences=True)(net_speech)
net_speech = CuDNNLSTM(32, return_sequences=True)(net_speech)
net_speech = CuDNNLSTM(16, return_sequences=True)(net_speech)
model_speech = Flatten()(net_speech)
#model_speech = Dropout(0.1)(net_speech)
target_names = ('v', 'a', 'd')
model_combined = [Dense(1, name=name)(model_speech) for name in target_names]
model = Model(input_speech, model_combined)
#model.compile(loss=ccc_loss, optimizer='rmsprop', metrics=[ccc])
model.compile(loss=ccc_loss,
loss_weights={'v': alpha, 'a': beta, 'd': gamma},
optimizer='adam', metrics=[ccc])
return model
#def main(alpha, beta, gamma):
model = api_model(0.1, 0.5, 0.4)
model.summary()
idx_train = np.hstack([np.arange(0, 7869), np.arange(10039, len(feat_improv_train))])
idx_test = np.hstack([np.arange(7869,10039), np.arange(10039 +
len(feat_improv_train), 18387)])
# 7869 first data of session 5 (for LOSO), 8000 for SD
earlystop = EarlyStopping(monitor='val_loss', mode='min', patience=10,
restore_best_weights=True)
hist = model.fit(feat[idx_train], vad[idx_train].T.tolist(), batch_size=200, #best:8
validation_split=0.2, epochs=180, verbose=1, shuffle=True,
callbacks=[earlystop])
metrik = model.evaluate(feat[idx_test], vad[idx_test].T.tolist())
print(metrik)
# save prediction, comment to avoid overwriting
#predict = model.predict(feat[6296:], batch_size=200)
#np.save('../data/predict_lstm_iemocap_sd',
# np.array(predict).reshape(3, 3743).T)
| 36.871622
| 123
| 0.702767
|
357348c79d3dce2ba09a37244eb969583cf29bbb
| 9,848
|
py
|
Python
|
safe_transaction_service/history/tests/test_tx_processor.py
|
rsksmart/safe-transaction-service
|
7697ed54d5a68e8fd81b1738e9b5ee4b5265ef0e
|
[
"MIT"
] | null | null | null |
safe_transaction_service/history/tests/test_tx_processor.py
|
rsksmart/safe-transaction-service
|
7697ed54d5a68e8fd81b1738e9b5ee4b5265ef0e
|
[
"MIT"
] | 2
|
2021-06-09T17:58:44.000Z
|
2021-06-10T19:42:32.000Z
|
safe_transaction_service/history/tests/test_tx_processor.py
|
rsksmart/safe-transaction-service
|
7697ed54d5a68e8fd81b1738e9b5ee4b5265ef0e
|
[
"MIT"
] | 1
|
2021-07-05T21:55:50.000Z
|
2021-07-05T21:55:50.000Z
|
import logging
from django.test import TestCase
from eth_account import Account
from eth_utils import keccak
from web3 import Web3
from gnosis.safe.safe_signature import SafeSignatureType
from ..indexers.tx_processor import SafeTxProcessor
from ..models import (InternalTxDecoded, ModuleTransaction,
MultisigConfirmation, MultisigTransaction, SafeContract,
SafeStatus)
from .factories import EthereumTxFactory, InternalTxDecodedFactory
logger = logging.getLogger(__name__)
class TestSafeTxProcessor(TestCase):
def test_tx_processor_with_factory(self):
tx_processor = SafeTxProcessor()
owner = Account.create().address
safe_address = Account.create().address
fallback_handler = Account.create().address
master_copy = Account.create().address
threshold = 1
tx_processor.process_decoded_transaction(
InternalTxDecodedFactory(function_name='setup', owner=owner, threshold=threshold,
fallback_handler=fallback_handler,
internal_tx__to=master_copy,
internal_tx___from=safe_address)
)
self.assertTrue(SafeContract.objects.get(address=safe_address))
safe_status = SafeStatus.objects.get(address=safe_address)
self.assertEqual(safe_status.enabled_modules, [])
self.assertEqual(safe_status.fallback_handler, fallback_handler)
self.assertEqual(safe_status.master_copy, master_copy)
self.assertEqual(safe_status.owners, [owner])
self.assertEqual(safe_status.threshold, threshold)
# execTransaction should be calling addOwnerWithThreshold, so we process it together
threshold = 2
new_owner = Account.create().address
tx_processor.process_decoded_transactions(
[
InternalTxDecodedFactory(function_name='execTransaction',
internal_tx___from=safe_address),
InternalTxDecodedFactory(function_name='addOwnerWithThreshold', owner=new_owner, threshold=threshold,
internal_tx___from=safe_address)
])
self.assertEqual(SafeStatus.objects.count(), 3)
safe_status = SafeStatus.objects.last_for_address(safe_address)
self.assertCountEqual(safe_status.owners, [owner, new_owner])
self.assertEqual(safe_status.nonce, 1)
self.assertEqual(safe_status.threshold, threshold)
another_owner = Account.create().address
tx_processor.process_decoded_transactions(
[
InternalTxDecodedFactory(function_name='execTransaction',
internal_tx___from=safe_address),
InternalTxDecodedFactory(function_name='swapOwner', old_owner=owner, owner=another_owner,
internal_tx___from=safe_address)
])
self.assertEqual(SafeStatus.objects.count(), 5)
safe_status = SafeStatus.objects.last_for_address(safe_address)
self.assertCountEqual(safe_status.owners, [another_owner, new_owner])
self.assertEqual(safe_status.nonce, 2)
self.assertEqual(safe_status.threshold, threshold)
threshold = 1
tx_processor.process_decoded_transactions(
[
InternalTxDecodedFactory(function_name='execTransaction',
internal_tx___from=safe_address),
InternalTxDecodedFactory(function_name='removeOwner', old_owner=another_owner, threshold=threshold,
internal_tx___from=safe_address)
])
self.assertEqual(SafeStatus.objects.count(), 7)
safe_status = SafeStatus.objects.last_for_address(safe_address)
self.assertEqual(safe_status.owners, [new_owner])
self.assertEqual(safe_status.nonce, 3)
self.assertEqual(safe_status.threshold, threshold)
fallback_handler = Account.create().address
tx_processor.process_decoded_transactions(
[
InternalTxDecodedFactory(function_name='execTransaction',
internal_tx___from=safe_address),
InternalTxDecodedFactory(function_name='setFallbackHandler', fallback_handler=fallback_handler,
internal_tx___from=safe_address)
])
self.assertEqual(SafeStatus.objects.count(), 9)
safe_status = SafeStatus.objects.last_for_address(safe_address)
self.assertEqual(safe_status.fallback_handler, fallback_handler)
self.assertEqual(safe_status.nonce, 4)
master_copy = Account.create().address
tx_processor.process_decoded_transactions(
[
InternalTxDecodedFactory(function_name='execTransaction',
internal_tx___from=safe_address),
InternalTxDecodedFactory(function_name='changeMasterCopy', master_copy=master_copy,
internal_tx___from=safe_address)
])
safe_status = SafeStatus.objects.last_for_address(safe_address)
self.assertEqual(safe_status.master_copy, master_copy)
self.assertEqual(safe_status.nonce, 5)
self.assertEqual(safe_status.enabled_modules, [])
module = Account.create().address
tx_processor.process_decoded_transactions(
[
InternalTxDecodedFactory(function_name='execTransaction',
internal_tx___from=safe_address),
InternalTxDecodedFactory(function_name='enableModule', module=module,
internal_tx___from=safe_address)
])
safe_status = SafeStatus.objects.last_for_address(safe_address)
self.assertEqual(safe_status.enabled_modules, [module])
self.assertEqual(safe_status.nonce, 6)
tx_processor.process_decoded_transactions(
[
InternalTxDecodedFactory(function_name='execTransaction',
internal_tx___from=safe_address),
InternalTxDecodedFactory(function_name='disableModule', module=module,
internal_tx___from=safe_address)
])
safe_status = SafeStatus.objects.last_for_address(safe_address)
self.assertEqual(safe_status.enabled_modules, [])
self.assertEqual(safe_status.nonce, 7)
tx_processor.process_decoded_transactions(
[
InternalTxDecodedFactory(function_name='execTransactionFromModule',
internal_tx___from=safe_address),
])
safe_status = SafeStatus.objects.last_for_address(safe_address)
self.assertEqual(safe_status.nonce, 7) # Nonce not incrementing
self.assertEqual(ModuleTransaction.objects.count(), 1)
self.assertEqual(MultisigTransaction.objects.count(),
InternalTxDecoded.objects.filter(function_name='execTransaction').count())
# Test ApproveHash. For that we need the `previous_trace` to get the owner
hash_to_approve = keccak(text='HariSeldon').hex()
owner_approving = Account.create().address
approve_hash_decoded_tx = InternalTxDecodedFactory(function_name='approveHash',
hash_to_approve=hash_to_approve,
internal_tx___from=safe_address,
internal_tx__trace_address='0,1,0')
previous_approve_hash_internal_tx = InternalTxDecodedFactory(
internal_tx__ethereum_tx=approve_hash_decoded_tx.internal_tx.ethereum_tx,
internal_tx__trace_address='0,1',
internal_tx___from=owner_approving,
)
tx_processor.process_decoded_transactions(
[
InternalTxDecodedFactory(function_name='execTransaction',
internal_tx___from=safe_address),
approve_hash_decoded_tx,
])
safe_status = SafeStatus.objects.last_for_address(safe_address)
self.assertEqual(safe_status.nonce, 8)
multisig_confirmation = MultisigConfirmation.objects.get(multisig_transaction_hash=hash_to_approve)
self.assertEqual(multisig_confirmation.signature_type, SafeSignatureType.APPROVED_HASH.value)
def test_tx_processor_failed(self):
tx_processor = SafeTxProcessor()
# Event for Safes < 1.1.1
logs = [{'data': '0x0034bff0dedc4c75f43df64a179ff26d56b99fa742fcfaeeee51e2da4e279b67',
'topics': ['0xabfd711ecdd15ae3a6b3ad16ff2e9d81aec026a39d16725ee164be4fbf857a7c']}]
ethereum_tx = EthereumTxFactory(logs=logs)
self.assertTrue(tx_processor.is_failed(ethereum_tx, logs[0]['data']))
self.assertFalse(tx_processor.is_failed(ethereum_tx, Web3.keccak(text='hola').hex()))
# Event for Safes >= 1.1.1
safe_tx_hash = '0x4c15b21b9c3b57aebba3c274bf0a437950bd0eea46bc7a7b2df892f91f720311'
logs = [{'data': '0x4c15b21b9c3b57aebba3c274bf0a437950bd0eea46bc7a7b2df892f91f720311'
'0000000000000000000000000000000000000000000000000000000000000000',
'topics': ['0x23428b18acfb3ea64b08dc0c1d296ea9c09702c09083ca5272e64d115b687d23']}]
ethereum_tx = EthereumTxFactory(logs=logs)
self.assertTrue(tx_processor.is_failed(ethereum_tx, safe_tx_hash))
self.assertFalse(tx_processor.is_failed(ethereum_tx, Web3.keccak(text='hola').hex()))
| 52.10582
| 117
| 0.653229
|
3dc300b3ce1ca5a5934d2ffd0aeba59b71eef2d1
| 1,456
|
py
|
Python
|
data_augmentation.py
|
wantero/desafio-1-2020
|
a0d042d63e1cf122a26e3b5f7fe1a855f24a99d2
|
[
"Apache-2.0"
] | null | null | null |
data_augmentation.py
|
wantero/desafio-1-2020
|
a0d042d63e1cf122a26e3b5f7fe1a855f24a99d2
|
[
"Apache-2.0"
] | null | null | null |
data_augmentation.py
|
wantero/desafio-1-2020
|
a0d042d63e1cf122a26e3b5f7fe1a855f24a99d2
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from keras.preprocessing.image import load_img
from keras.preprocessing.image import img_to_array
from keras.preprocessing.image import ImageDataGenerator
from os import listdir
from os.path import isfile, join
INPUT_PATH = "./doc/source/dataset/v3/negative/"
OUTPUT_PATH = "./doc/source/dataset/v3/negative/output/"
onlyfiles = [f for f in listdir(INPUT_PATH) if isfile(join(INPUT_PATH, f))]
print(len(onlyfiles))
for file in onlyfiles:
print(INPUT_PATH + file)
# definir caminhos da imagem original e diretório do output
IMAGE_PATH = INPUT_PATH + file
# carregar a imagem original e converter em array
image = load_img(IMAGE_PATH)
image = img_to_array(image)
# adicionar uma dimensão extra no array
image = np.expand_dims(image, axis=0)
# criar um gerador (generator) com as imagens do
# data augmentation
imgAug = ImageDataGenerator(rotation_range=45, width_shift_range=0.1,
height_shift_range=0.1, zoom_range=0.25,
fill_mode='nearest', horizontal_flip=True)
imgGen = imgAug.flow(image, save_to_dir=OUTPUT_PATH,
save_format='jpg', save_prefix='img_')
# gerar 10 imagens por data augmentation
counter = 0
for (i, newImage) in enumerate(imgGen):
counter += 1
# ao gerar 10 imagens, parar o loop
if counter == 3:
break
| 33.860465
| 75
| 0.669643
|
21667ac72fce358380217a24efae164316a4a63e
| 331
|
py
|
Python
|
day1/part2/rocket.py
|
gmelodie/adventofcode2019
|
4f18945b8b671590d6998fa7df8c373fbc641dbf
|
[
"MIT"
] | null | null | null |
day1/part2/rocket.py
|
gmelodie/adventofcode2019
|
4f18945b8b671590d6998fa7df8c373fbc641dbf
|
[
"MIT"
] | null | null | null |
day1/part2/rocket.py
|
gmelodie/adventofcode2019
|
4f18945b8b671590d6998fa7df8c373fbc641dbf
|
[
"MIT"
] | null | null | null |
import fileinput
total_fuel = 0
for line in fileinput.input():
modmass = int(line)
modfuel = int(modmass/3) - 2
fuelfuel = int(modfuel/3) - 2
while fuelfuel > 0:
modfuel += fuelfuel
fuelfuel = int(fuelfuel/3) - 2
print(modmass, " => ", modfuel)
total_fuel += modfuel
print(total_fuel)
| 17.421053
| 38
| 0.613293
|
9d0cff883ba16d069f685272fccbf13341bc6379
| 2,591
|
py
|
Python
|
models/utils.py
|
tttzof351/lrpd-paper-code
|
e61af61a9fe6a30e2111aacd80fe43e629ed45cc
|
[
"MIT"
] | null | null | null |
models/utils.py
|
tttzof351/lrpd-paper-code
|
e61af61a9fe6a30e2111aacd80fe43e629ed45cc
|
[
"MIT"
] | null | null | null |
models/utils.py
|
tttzof351/lrpd-paper-code
|
e61af61a9fe6a30e2111aacd80fe43e629ed45cc
|
[
"MIT"
] | 1
|
2021-10-05T20:50:45.000Z
|
2021-10-05T20:50:45.000Z
|
import json
from pprint import pprint
import torch
import torch.nn as nn
from .model_builders import AudioClassificationModel
def set_convert_mode_on(model, flag=True):
for name, child in model.named_children():
if hasattr(child, "convert_mode_on"):
print(f"{child} convert_mode_on")
child.convert_mode_on = flag
set_convert_mode_on(child, flag=flag)
def load_weights_from_pl_pipeline(net,
weights_path: str,
key_replace_dict={"nnet.": ""},
remove_unessacary: bool = True,
strict: bool = True,
map_loc: str = None):
# Change keys of state_dict so they can be used inside
# bare torch model from builder not from pytorch-ligthning module
# 0. Changes keys in state_dict according to key_replace_dict
if map_loc is not None:
state_dict = torch.load(weights_path, map_location=torch.device(map_loc))['state_dict']
else:
state_dict = torch.load(weights_path)['state_dict']
for k in list(state_dict.keys()):
old_k = str(k)
for replace_what, replace_by in key_replace_dict.items():
k = k.replace(replace_what, replace_by)
if old_k != k:
state_dict[k] = state_dict[old_k]
del state_dict[old_k]
pretrained_dict = state_dict
if remove_unessacary:
model_dict = net.state_dict()
# 1. filter out unnecessary keys
pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict}
# 2. load the new state dict
missing_keys, unexpected_keys = net.load_state_dict(pretrained_dict, strict=strict)
print(f"unexpected_keys : {unexpected_keys}")
print(f"missing_keys : {missing_keys}")
def set_batchnorms_momentum(model: nn.Module, momentum: float = 0.99):
layers = torch.flatten(model)
for l in layers:
if isinstance(l, nn.BatchNorm1d) or isinstance(l, nn.BatchNorm2d):
l.momentum = momentum
def load_classification_model_from_experiment(experiment_dir, epoch):
model_config = json.loads((experiment_dir / "model_config.json").read_text())
model = AudioClassificationModel(**model_config)
model.cls_head.return_embeddings = True
model = model.eval()
weights_path = list((experiment_dir / "checkpoints").glob(f"epoch={epoch}-step=*"))[0]
print(weights_path)
load_weights_from_pl_pipeline(model, str(weights_path), remove_unessacary=False, strict=False)
return model
| 38.102941
| 98
| 0.658819
|
44ba71fb1f8aa9c38b8aff833fc579d04925246e
| 197
|
py
|
Python
|
main.py
|
sadid07/AI-Prolog-Recipe-Suggestion
|
1a2e303e0603f68d4f11381703c70d048c595e2f
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
sadid07/AI-Prolog-Recipe-Suggestion
|
1a2e303e0603f68d4f11381703c70d048c595e2f
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
sadid07/AI-Prolog-Recipe-Suggestion
|
1a2e303e0603f68d4f11381703c70d048c595e2f
|
[
"Apache-2.0"
] | null | null | null |
import wx
from main_frame import MainFrame
if __name__ == "__main__":
#Initialize APP, chat window and display it
app = wx.App()
frame = MainFrame()
frame.Show()
app.MainLoop()
| 21.888889
| 47
| 0.670051
|
cf06112c75d4b5e015ee531c794187df5e30b97f
| 9,702
|
py
|
Python
|
file_oper/iCloud/crowl-icloud.py
|
urahito/python_proj
|
08e7bdb0b85d5bdaff7dc12123a6cf529d773b51
|
[
"MIT"
] | null | null | null |
file_oper/iCloud/crowl-icloud.py
|
urahito/python_proj
|
08e7bdb0b85d5bdaff7dc12123a6cf529d773b51
|
[
"MIT"
] | null | null | null |
file_oper/iCloud/crowl-icloud.py
|
urahito/python_proj
|
08e7bdb0b85d5bdaff7dc12123a6cf529d773b51
|
[
"MIT"
] | null | null | null |
# -- coding: utf-8 --
# 操作系
import configparser
import tqdm
import shutil
from operator import attrgetter
from pathlib import Path
from PIL import Image
import zipfile
# ログ系
import logging
logger = None
# 基本ライブラリ
import sys, os, csv, time
from datetime import datetime as ddt
from datetime import timedelta as dlt
# 外部クラス
sys.path.append(os.getcwd())
from file_attr import file_attr
from logger_setting import logger_setting as logger_s
# info_dicに貯めた情報を出力する
def output_logger(logger, info_dic, time_dic):
for key in info_dic.keys():
logger.info('{}: {}'.format(key, info_dic[key]))
prev_time = 0
for key in time_dic.keys():
time_dlt = dlt(seconds=time_dic[key] - prev_time)
logger.info('{}: {:.1f}(s)'.format(key, time_dlt.total_seconds()))
prev_time = time_dic[key]
# ログ代わりの出力結果csvの出力
def output_csv_log(files, output_dir, csv_sub):
# ログファイル名
now_str = file_attr.get_datetime_str(ddt.now(), '%Y%m%d-%H%M%S')
csv_name = 'result-{}.csv'.format(now_str)
# ログフォルダ
log_dir = file_attr.make_parent_dir(output_dir, csv_sub)
out_csv = log_dir / csv_name
print('log file: ' + str(out_csv))
with out_csv.open('w', encoding=file_attr.get_enc('w'), newline='') as out_file_obj:
csv_obj = csv.writer(out_file_obj, dialect='excel')
csv_obj.writerow(['ファイル名', '作成日時', '(KB)', '転送先フォルダ', '転送対象'])
for fi in tqdm.tqdm(files):
csv_obj.writerow(fi.output_line())
# 一時フォルダをzip化する
def comp_to_zip(png_files, out_gifs, dest_dir, now_str):
out_path = str(Path(dest_dir) / 'PNGs-{}.zip'.format(now_str))
print('zipファイルへの圧縮...')
with zipfile.ZipFile(out_path, mode="w", compression=zipfile.ZIP_DEFLATED) as zip_obj:
for png_file in tqdm.tqdm(png_files):
png_name = str(png_file.name).replace('(編集済み)', '_edit')
png_name = file_attr.ignore_str(png_name, file_attr.get_enc('w'))
zip_obj.write(str(png_file), png_name, zipfile.ZIP_DEFLATED)
print('圧縮済みのPNG/GIFファイルを削除する')
try:
remove_files = list(zip(png_files, out_gifs))
for fi in tqdm.tqdm(remove_files):
os.remove(fi)
except:
raise
return out_path
# GIFアニメーションを作成する
def make_gif_animation(out_gifs, dest_dir, now_str, thumb_max, dul_ms=100):
out_path = Path(dest_dir) / 'thumb-{}.gif'.format(now_str)
print('サムネイルGIFアニメの保存')
try:
out = Image.new('RGB', (thumb_max, thumb_max), (255, 255, 255))
img_flms = []
for fi in tqdm.tqdm(out_gifs):
try:
im = Image.open(fi)
while True:
new_frame = Image.new('RGBA', im.size)
new_frame.paste(im, (0, 0), im.convert('RGBA'))
img_flms.append(new_frame)
im.seek(im.tell() + 1)
del new_frame
del im
except EOFError:
pass
try:
os.remove(fi)
except Exception:
pass
print('サムネイルGIFアニメの保存-saving...')
out.save(out_path, save_all=True, append_images=img_flms[1:], optimize=False, duration=dul_ms, loop=0)
except:
raise
# GIFのコピーを作る(GIFアニメーション用)
def save_to_gif(backup_dir, gif_dir, thumb_max):
png_files = list(Path(backup_dir).glob('*.PNG'))
org_gifs = list(Path(backup_dir).glob('*.gif'))
out_gifs = []
wide_size = 0
print('既にあったgifファイルは先に専用フォルダへ')
for fi in tqdm.tqdm(org_gifs):
try:
shutil.move(fi, str(gif_dir / Path(fi).name))
except:
raise
print('PNGファイルをリサイズしてgifファイルへ保存')
for fi in tqdm.tqdm(png_files):
gif_path = fi.with_suffix('.gif')
try:
img = Image.open(fi)
# ファイルのリサイズ
wide_size = max([img.width, img.height])
wide_rate = max([wide_size / thumb_max, 1])
img_resize = img.resize((int(img.width / wide_rate), int(img.height/wide_rate)))
# 保存
img_resize.save(gif_path, 'gif')
del img
del img_resize
except:
raise
# GIFアニメーション用にリスト化
out_gifs.append(gif_path)
return out_gifs, png_files
# 特定したファイルを一時フォルダへ
def move_files(files, dest_dir, logger):
print('一時ファイルへの移動...')
for fi in tqdm.tqdm(files):
# 存在しないファイルは警告して飛ばす
if not Path(fi.org_path).exists():
logger.warn("not exist file! {}".format(fi))
continue
# 送信先を指定してコピー
dest_path = Path(dest_dir) / fi.org_path.name
try:
shutil.copy2(fi.org_path, dest_path)
except:
raise
# 古いファイルを特定する(1年以上前なら500MBまで)
def get_old_pictures(files, size_max):
print('ファイルの特定の開始...')
size_sum = 0
past_ng = False
for fi in tqdm.tqdm(files):
file_size = fi.allow_file_copy(90, past_ng)
if size_max < size_sum:
past_ng = True
else:
size_sum = size_sum + file_size
# ファイルパターンを指定して、入力フォルダからのファイルを絞り込む
def append_to_list(input_dir, pattern_list):
all_list = []
for pattern in pattern_list:
pattern_files = [path for path in input_dir.glob(pattern)]
all_list = list(zip(all_list, pattern_files))
return all_list
# ファイル情報の取得
def get_files(input_dir, output_dir):
file_list = append_to_list(input_dir, ['*.gif', '*.PNG'])
print('ファイル情報の取得')
files = [file_attr(file_path, output_dir) for file_path in file_list]
return sorted(files, key=attrgetter("create_time"))
# ディレクトリ取得
def get_dirs(ini_data):
input_dir = Path(ini_data['settings']['input'])
output_dir = Path(ini_data['settings']['output'])
backup_dir = file_attr.make_parent_dir(output_dir / ini_data['picture']['backup'])
gif_dir = file_attr.make_parent_dir(output_dir / ini_data['picture']['gif'])
return input_dir, output_dir, backup_dir, gif_dir
# 記録を取る
def rec_time(dic, key, start, logger):
dic[key] = time.time() - start
logger.info('{} 完了'.format(key))
# iniファイルから色々読み込む
def get_ini_data(path_obj, ini_name):
ini_data = configparser.SafeConfigParser()
if not path_obj.exists():
print('ファイル名を{}にしてください', ini_name)
return None
with path_obj.open('r', encoding='utf-8') as ini_file_obj:
ini_data.read_file(ini_file_obj)
return ini_data
def main():
# csvへの情報用dict
info_dic = {}
time_dic = {}
time_dic['処理開始'] = time.time()
time_start = time_dic['処理開始']
now_str = file_attr.get_datetime_str(ddt.now(), '%Y%m%d-%H%M%S')
# iniファイルの準備
ini_name = 'for-iCloud.ini'
ini_path = Path(__file__).parent / ini_name
ini_data = get_ini_data(ini_path, ini_name)
if ini_data == None:
return
# loggerの取得
logger = logger_s(ini_data, __name__, now_str)
logger.info('初期設定 開始')
# ディレクトリの取得
input_dir, output_dir, backup_dir, gif_dir = get_dirs(ini_data)
info_dic['対象フォルダ'] = input_dir
info_dic['転送先フォルダ'] = output_dir
# 取り込みファイルの最大値を決める
size_mb = int(ini_data['settings']['size_mb'])
size_max = file_attr.get_big_size(size_mb, 'MB')
info_dic['取り込み最大値'] = '{}(MB)'.format(size_mb)
# サムネイルの幅を決める
thumb_max = int(ini_data['picture']['thumb_px'])
rec_time(time_dic, '初期設定', time_start, logger)
# ファイルリストの取得
files = get_files(input_dir, output_dir)
rec_time(time_dic, 'ファイルリストの取得', time_start, logger)
# ファイルの仕分け
get_old_pictures(files, size_max)
rec_time(time_dic, 'ファイルの仕分け', time_start, logger)
# ファイル移動関係は、エラー時もログファイルを出力する
zip_path = ''
try:
# ファイルの絞り込み
info_dic['全ファイル数'] = '{}(files)'.format(len(files))
# files = list(filter(lambda x: x.allow_copy, files))
files = [file_obj for file_obj in files if file_obj.allow_copy == True]
info_dic['対象ファイル数'] = '{}(files)'.format(len(files))
rec_time(time_dic, '転送準備', time_start, logger)
# 特定したファイルをファイル転送
move_files(files, backup_dir, logger)
rec_time(time_dic, 'ファイル転送', time_start, logger)
# GIFのコピーを作る(GIFアニメーション用)
out_gifs, png_files = save_to_gif(backup_dir, gif_dir, thumb_max)
rec_time(time_dic, 'GIFコピー', time_start, logger)
# GIFアニメーションを作成する
dul_ms = int(ini_data['picture']['dulation_ms'])
make_gif_animation(out_gifs, output_dir, now_str, thumb_max, dul_ms)
rec_time(time_dic, 'GIFアニメーションの作成', time_start, logger)
# PNGファイルをzip化する
zip_path = comp_to_zip(png_files, out_gifs, output_dir, now_str)
rec_time(time_dic, 'PNGファイルをzip化', time_start, logger)
except Exception as ex:
logger.logger.error(ex)
print(ex)
finally:
time_dic['処理開始'] = 0
# 入力ファイル情報
file_latest = max([ti.create_time for ti in files])
file_sum = int(sum([si.file_size for si in files]))
info_dic['対象ファイルの最終日時'] = file_attr.get_datetime_str(file_latest, '%Y/%m/%d %H:%M:%S')
info_dic['対象ファイルの総サイズ'] = '{:.1f}(MB)'.format(file_attr.get_size_str(file_sum, 'MB'))
# 主力ファイル情報
info_dic['zipファイルのパス'] = '{}'.format(zip_path)
zip_size = file_attr.get_size_str(Path(zip_path).stat().st_size, 'MB')
info_dic['zipファイルのファイルサイズ'] = '{:.1f}(MB)'.format(zip_size)
try:
# ログファイルの出力
csv_sub = ini_data['log']['csv']
output_csv_log(files, output_dir, csv_sub)
output_logger(logger, info_dic, time_dic)
except Exception as ex:
logger.error('!!ログ出力エラー', ex)
print('finished')
if __name__ == '__main__':
main()
| 32.019802
| 110
| 0.623892
|
c17beac2eab61cd0cb1942ee365b108d38bea6b5
| 27,598
|
py
|
Python
|
sdk/python/pulumi_azure_native/network/v20201101/get_application_gateway.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20201101/get_application_gateway.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20201101/get_application_gateway.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetApplicationGatewayResult',
'AwaitableGetApplicationGatewayResult',
'get_application_gateway',
]
@pulumi.output_type
class GetApplicationGatewayResult:
"""
Application gateway resource.
"""
def __init__(__self__, authentication_certificates=None, autoscale_configuration=None, backend_address_pools=None, backend_http_settings_collection=None, custom_error_configurations=None, enable_fips=None, enable_http2=None, etag=None, firewall_policy=None, force_firewall_policy_association=None, frontend_ip_configurations=None, frontend_ports=None, gateway_ip_configurations=None, http_listeners=None, id=None, identity=None, location=None, name=None, operational_state=None, private_endpoint_connections=None, private_link_configurations=None, probes=None, provisioning_state=None, redirect_configurations=None, request_routing_rules=None, resource_guid=None, rewrite_rule_sets=None, sku=None, ssl_certificates=None, ssl_policy=None, ssl_profiles=None, tags=None, trusted_client_certificates=None, trusted_root_certificates=None, type=None, url_path_maps=None, web_application_firewall_configuration=None, zones=None):
if authentication_certificates and not isinstance(authentication_certificates, list):
raise TypeError("Expected argument 'authentication_certificates' to be a list")
pulumi.set(__self__, "authentication_certificates", authentication_certificates)
if autoscale_configuration and not isinstance(autoscale_configuration, dict):
raise TypeError("Expected argument 'autoscale_configuration' to be a dict")
pulumi.set(__self__, "autoscale_configuration", autoscale_configuration)
if backend_address_pools and not isinstance(backend_address_pools, list):
raise TypeError("Expected argument 'backend_address_pools' to be a list")
pulumi.set(__self__, "backend_address_pools", backend_address_pools)
if backend_http_settings_collection and not isinstance(backend_http_settings_collection, list):
raise TypeError("Expected argument 'backend_http_settings_collection' to be a list")
pulumi.set(__self__, "backend_http_settings_collection", backend_http_settings_collection)
if custom_error_configurations and not isinstance(custom_error_configurations, list):
raise TypeError("Expected argument 'custom_error_configurations' to be a list")
pulumi.set(__self__, "custom_error_configurations", custom_error_configurations)
if enable_fips and not isinstance(enable_fips, bool):
raise TypeError("Expected argument 'enable_fips' to be a bool")
pulumi.set(__self__, "enable_fips", enable_fips)
if enable_http2 and not isinstance(enable_http2, bool):
raise TypeError("Expected argument 'enable_http2' to be a bool")
pulumi.set(__self__, "enable_http2", enable_http2)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if firewall_policy and not isinstance(firewall_policy, dict):
raise TypeError("Expected argument 'firewall_policy' to be a dict")
pulumi.set(__self__, "firewall_policy", firewall_policy)
if force_firewall_policy_association and not isinstance(force_firewall_policy_association, bool):
raise TypeError("Expected argument 'force_firewall_policy_association' to be a bool")
pulumi.set(__self__, "force_firewall_policy_association", force_firewall_policy_association)
if frontend_ip_configurations and not isinstance(frontend_ip_configurations, list):
raise TypeError("Expected argument 'frontend_ip_configurations' to be a list")
pulumi.set(__self__, "frontend_ip_configurations", frontend_ip_configurations)
if frontend_ports and not isinstance(frontend_ports, list):
raise TypeError("Expected argument 'frontend_ports' to be a list")
pulumi.set(__self__, "frontend_ports", frontend_ports)
if gateway_ip_configurations and not isinstance(gateway_ip_configurations, list):
raise TypeError("Expected argument 'gateway_ip_configurations' to be a list")
pulumi.set(__self__, "gateway_ip_configurations", gateway_ip_configurations)
if http_listeners and not isinstance(http_listeners, list):
raise TypeError("Expected argument 'http_listeners' to be a list")
pulumi.set(__self__, "http_listeners", http_listeners)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if identity and not isinstance(identity, dict):
raise TypeError("Expected argument 'identity' to be a dict")
pulumi.set(__self__, "identity", identity)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if operational_state and not isinstance(operational_state, str):
raise TypeError("Expected argument 'operational_state' to be a str")
pulumi.set(__self__, "operational_state", operational_state)
if private_endpoint_connections and not isinstance(private_endpoint_connections, list):
raise TypeError("Expected argument 'private_endpoint_connections' to be a list")
pulumi.set(__self__, "private_endpoint_connections", private_endpoint_connections)
if private_link_configurations and not isinstance(private_link_configurations, list):
raise TypeError("Expected argument 'private_link_configurations' to be a list")
pulumi.set(__self__, "private_link_configurations", private_link_configurations)
if probes and not isinstance(probes, list):
raise TypeError("Expected argument 'probes' to be a list")
pulumi.set(__self__, "probes", probes)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if redirect_configurations and not isinstance(redirect_configurations, list):
raise TypeError("Expected argument 'redirect_configurations' to be a list")
pulumi.set(__self__, "redirect_configurations", redirect_configurations)
if request_routing_rules and not isinstance(request_routing_rules, list):
raise TypeError("Expected argument 'request_routing_rules' to be a list")
pulumi.set(__self__, "request_routing_rules", request_routing_rules)
if resource_guid and not isinstance(resource_guid, str):
raise TypeError("Expected argument 'resource_guid' to be a str")
pulumi.set(__self__, "resource_guid", resource_guid)
if rewrite_rule_sets and not isinstance(rewrite_rule_sets, list):
raise TypeError("Expected argument 'rewrite_rule_sets' to be a list")
pulumi.set(__self__, "rewrite_rule_sets", rewrite_rule_sets)
if sku and not isinstance(sku, dict):
raise TypeError("Expected argument 'sku' to be a dict")
pulumi.set(__self__, "sku", sku)
if ssl_certificates and not isinstance(ssl_certificates, list):
raise TypeError("Expected argument 'ssl_certificates' to be a list")
pulumi.set(__self__, "ssl_certificates", ssl_certificates)
if ssl_policy and not isinstance(ssl_policy, dict):
raise TypeError("Expected argument 'ssl_policy' to be a dict")
pulumi.set(__self__, "ssl_policy", ssl_policy)
if ssl_profiles and not isinstance(ssl_profiles, list):
raise TypeError("Expected argument 'ssl_profiles' to be a list")
pulumi.set(__self__, "ssl_profiles", ssl_profiles)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if trusted_client_certificates and not isinstance(trusted_client_certificates, list):
raise TypeError("Expected argument 'trusted_client_certificates' to be a list")
pulumi.set(__self__, "trusted_client_certificates", trusted_client_certificates)
if trusted_root_certificates and not isinstance(trusted_root_certificates, list):
raise TypeError("Expected argument 'trusted_root_certificates' to be a list")
pulumi.set(__self__, "trusted_root_certificates", trusted_root_certificates)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if url_path_maps and not isinstance(url_path_maps, list):
raise TypeError("Expected argument 'url_path_maps' to be a list")
pulumi.set(__self__, "url_path_maps", url_path_maps)
if web_application_firewall_configuration and not isinstance(web_application_firewall_configuration, dict):
raise TypeError("Expected argument 'web_application_firewall_configuration' to be a dict")
pulumi.set(__self__, "web_application_firewall_configuration", web_application_firewall_configuration)
if zones and not isinstance(zones, list):
raise TypeError("Expected argument 'zones' to be a list")
pulumi.set(__self__, "zones", zones)
@property
@pulumi.getter(name="authenticationCertificates")
def authentication_certificates(self) -> Optional[Sequence['outputs.ApplicationGatewayAuthenticationCertificateResponse']]:
"""
Authentication certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits).
"""
return pulumi.get(self, "authentication_certificates")
@property
@pulumi.getter(name="autoscaleConfiguration")
def autoscale_configuration(self) -> Optional['outputs.ApplicationGatewayAutoscaleConfigurationResponse']:
"""
Autoscale Configuration.
"""
return pulumi.get(self, "autoscale_configuration")
@property
@pulumi.getter(name="backendAddressPools")
def backend_address_pools(self) -> Optional[Sequence['outputs.ApplicationGatewayBackendAddressPoolResponse']]:
"""
Backend address pool of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits).
"""
return pulumi.get(self, "backend_address_pools")
@property
@pulumi.getter(name="backendHttpSettingsCollection")
def backend_http_settings_collection(self) -> Optional[Sequence['outputs.ApplicationGatewayBackendHttpSettingsResponse']]:
"""
Backend http settings of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits).
"""
return pulumi.get(self, "backend_http_settings_collection")
@property
@pulumi.getter(name="customErrorConfigurations")
def custom_error_configurations(self) -> Optional[Sequence['outputs.ApplicationGatewayCustomErrorResponse']]:
"""
Custom error configurations of the application gateway resource.
"""
return pulumi.get(self, "custom_error_configurations")
@property
@pulumi.getter(name="enableFips")
def enable_fips(self) -> Optional[bool]:
"""
Whether FIPS is enabled on the application gateway resource.
"""
return pulumi.get(self, "enable_fips")
@property
@pulumi.getter(name="enableHttp2")
def enable_http2(self) -> Optional[bool]:
"""
Whether HTTP2 is enabled on the application gateway resource.
"""
return pulumi.get(self, "enable_http2")
@property
@pulumi.getter
def etag(self) -> str:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="firewallPolicy")
def firewall_policy(self) -> Optional['outputs.SubResourceResponse']:
"""
Reference to the FirewallPolicy resource.
"""
return pulumi.get(self, "firewall_policy")
@property
@pulumi.getter(name="forceFirewallPolicyAssociation")
def force_firewall_policy_association(self) -> Optional[bool]:
"""
If true, associates a firewall policy with an application gateway regardless whether the policy differs from the WAF Config.
"""
return pulumi.get(self, "force_firewall_policy_association")
@property
@pulumi.getter(name="frontendIPConfigurations")
def frontend_ip_configurations(self) -> Optional[Sequence['outputs.ApplicationGatewayFrontendIPConfigurationResponse']]:
"""
Frontend IP addresses of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits).
"""
return pulumi.get(self, "frontend_ip_configurations")
@property
@pulumi.getter(name="frontendPorts")
def frontend_ports(self) -> Optional[Sequence['outputs.ApplicationGatewayFrontendPortResponse']]:
"""
Frontend ports of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits).
"""
return pulumi.get(self, "frontend_ports")
@property
@pulumi.getter(name="gatewayIPConfigurations")
def gateway_ip_configurations(self) -> Optional[Sequence['outputs.ApplicationGatewayIPConfigurationResponse']]:
"""
Subnets of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits).
"""
return pulumi.get(self, "gateway_ip_configurations")
@property
@pulumi.getter(name="httpListeners")
def http_listeners(self) -> Optional[Sequence['outputs.ApplicationGatewayHttpListenerResponse']]:
"""
Http listeners of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits).
"""
return pulumi.get(self, "http_listeners")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def identity(self) -> Optional['outputs.ManagedServiceIdentityResponse']:
"""
The identity of the application gateway, if configured.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="operationalState")
def operational_state(self) -> str:
"""
Operational state of the application gateway resource.
"""
return pulumi.get(self, "operational_state")
@property
@pulumi.getter(name="privateEndpointConnections")
def private_endpoint_connections(self) -> Sequence['outputs.ApplicationGatewayPrivateEndpointConnectionResponse']:
"""
Private Endpoint connections on application gateway.
"""
return pulumi.get(self, "private_endpoint_connections")
@property
@pulumi.getter(name="privateLinkConfigurations")
def private_link_configurations(self) -> Optional[Sequence['outputs.ApplicationGatewayPrivateLinkConfigurationResponse']]:
"""
PrivateLink configurations on application gateway.
"""
return pulumi.get(self, "private_link_configurations")
@property
@pulumi.getter
def probes(self) -> Optional[Sequence['outputs.ApplicationGatewayProbeResponse']]:
"""
Probes of the application gateway resource.
"""
return pulumi.get(self, "probes")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state of the application gateway resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="redirectConfigurations")
def redirect_configurations(self) -> Optional[Sequence['outputs.ApplicationGatewayRedirectConfigurationResponse']]:
"""
Redirect configurations of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits).
"""
return pulumi.get(self, "redirect_configurations")
@property
@pulumi.getter(name="requestRoutingRules")
def request_routing_rules(self) -> Optional[Sequence['outputs.ApplicationGatewayRequestRoutingRuleResponse']]:
"""
Request routing rules of the application gateway resource.
"""
return pulumi.get(self, "request_routing_rules")
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> str:
"""
The resource GUID property of the application gateway resource.
"""
return pulumi.get(self, "resource_guid")
@property
@pulumi.getter(name="rewriteRuleSets")
def rewrite_rule_sets(self) -> Optional[Sequence['outputs.ApplicationGatewayRewriteRuleSetResponse']]:
"""
Rewrite rules for the application gateway resource.
"""
return pulumi.get(self, "rewrite_rule_sets")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.ApplicationGatewaySkuResponse']:
"""
SKU of the application gateway resource.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter(name="sslCertificates")
def ssl_certificates(self) -> Optional[Sequence['outputs.ApplicationGatewaySslCertificateResponse']]:
"""
SSL certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits).
"""
return pulumi.get(self, "ssl_certificates")
@property
@pulumi.getter(name="sslPolicy")
def ssl_policy(self) -> Optional['outputs.ApplicationGatewaySslPolicyResponse']:
"""
SSL policy of the application gateway resource.
"""
return pulumi.get(self, "ssl_policy")
@property
@pulumi.getter(name="sslProfiles")
def ssl_profiles(self) -> Optional[Sequence['outputs.ApplicationGatewaySslProfileResponse']]:
"""
SSL profiles of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits).
"""
return pulumi.get(self, "ssl_profiles")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="trustedClientCertificates")
def trusted_client_certificates(self) -> Optional[Sequence['outputs.ApplicationGatewayTrustedClientCertificateResponse']]:
"""
Trusted client certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits).
"""
return pulumi.get(self, "trusted_client_certificates")
@property
@pulumi.getter(name="trustedRootCertificates")
def trusted_root_certificates(self) -> Optional[Sequence['outputs.ApplicationGatewayTrustedRootCertificateResponse']]:
"""
Trusted Root certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits).
"""
return pulumi.get(self, "trusted_root_certificates")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="urlPathMaps")
def url_path_maps(self) -> Optional[Sequence['outputs.ApplicationGatewayUrlPathMapResponse']]:
"""
URL path map of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits).
"""
return pulumi.get(self, "url_path_maps")
@property
@pulumi.getter(name="webApplicationFirewallConfiguration")
def web_application_firewall_configuration(self) -> Optional['outputs.ApplicationGatewayWebApplicationFirewallConfigurationResponse']:
"""
Web application firewall configuration.
"""
return pulumi.get(self, "web_application_firewall_configuration")
@property
@pulumi.getter
def zones(self) -> Optional[Sequence[str]]:
"""
A list of availability zones denoting where the resource needs to come from.
"""
return pulumi.get(self, "zones")
class AwaitableGetApplicationGatewayResult(GetApplicationGatewayResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetApplicationGatewayResult(
authentication_certificates=self.authentication_certificates,
autoscale_configuration=self.autoscale_configuration,
backend_address_pools=self.backend_address_pools,
backend_http_settings_collection=self.backend_http_settings_collection,
custom_error_configurations=self.custom_error_configurations,
enable_fips=self.enable_fips,
enable_http2=self.enable_http2,
etag=self.etag,
firewall_policy=self.firewall_policy,
force_firewall_policy_association=self.force_firewall_policy_association,
frontend_ip_configurations=self.frontend_ip_configurations,
frontend_ports=self.frontend_ports,
gateway_ip_configurations=self.gateway_ip_configurations,
http_listeners=self.http_listeners,
id=self.id,
identity=self.identity,
location=self.location,
name=self.name,
operational_state=self.operational_state,
private_endpoint_connections=self.private_endpoint_connections,
private_link_configurations=self.private_link_configurations,
probes=self.probes,
provisioning_state=self.provisioning_state,
redirect_configurations=self.redirect_configurations,
request_routing_rules=self.request_routing_rules,
resource_guid=self.resource_guid,
rewrite_rule_sets=self.rewrite_rule_sets,
sku=self.sku,
ssl_certificates=self.ssl_certificates,
ssl_policy=self.ssl_policy,
ssl_profiles=self.ssl_profiles,
tags=self.tags,
trusted_client_certificates=self.trusted_client_certificates,
trusted_root_certificates=self.trusted_root_certificates,
type=self.type,
url_path_maps=self.url_path_maps,
web_application_firewall_configuration=self.web_application_firewall_configuration,
zones=self.zones)
def get_application_gateway(application_gateway_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetApplicationGatewayResult:
"""
Application gateway resource.
:param str application_gateway_name: The name of the application gateway.
:param str resource_group_name: The name of the resource group.
"""
__args__ = dict()
__args__['applicationGatewayName'] = application_gateway_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20201101:getApplicationGateway', __args__, opts=opts, typ=GetApplicationGatewayResult).value
return AwaitableGetApplicationGatewayResult(
authentication_certificates=__ret__.authentication_certificates,
autoscale_configuration=__ret__.autoscale_configuration,
backend_address_pools=__ret__.backend_address_pools,
backend_http_settings_collection=__ret__.backend_http_settings_collection,
custom_error_configurations=__ret__.custom_error_configurations,
enable_fips=__ret__.enable_fips,
enable_http2=__ret__.enable_http2,
etag=__ret__.etag,
firewall_policy=__ret__.firewall_policy,
force_firewall_policy_association=__ret__.force_firewall_policy_association,
frontend_ip_configurations=__ret__.frontend_ip_configurations,
frontend_ports=__ret__.frontend_ports,
gateway_ip_configurations=__ret__.gateway_ip_configurations,
http_listeners=__ret__.http_listeners,
id=__ret__.id,
identity=__ret__.identity,
location=__ret__.location,
name=__ret__.name,
operational_state=__ret__.operational_state,
private_endpoint_connections=__ret__.private_endpoint_connections,
private_link_configurations=__ret__.private_link_configurations,
probes=__ret__.probes,
provisioning_state=__ret__.provisioning_state,
redirect_configurations=__ret__.redirect_configurations,
request_routing_rules=__ret__.request_routing_rules,
resource_guid=__ret__.resource_guid,
rewrite_rule_sets=__ret__.rewrite_rule_sets,
sku=__ret__.sku,
ssl_certificates=__ret__.ssl_certificates,
ssl_policy=__ret__.ssl_policy,
ssl_profiles=__ret__.ssl_profiles,
tags=__ret__.tags,
trusted_client_certificates=__ret__.trusted_client_certificates,
trusted_root_certificates=__ret__.trusted_root_certificates,
type=__ret__.type,
url_path_maps=__ret__.url_path_maps,
web_application_firewall_configuration=__ret__.web_application_firewall_configuration,
zones=__ret__.zones)
| 50.361314
| 926
| 0.714545
|
06a5c5ed29d55eb2798f654bff42655a157fdacf
| 525
|
py
|
Python
|
vn_telex/utils/TelexRule.py
|
miketvo/vn_kmn_compiler
|
b0d34c6a8e1ad1e5350ab982fa8c3dde695edd33
|
[
"MIT"
] | null | null | null |
vn_telex/utils/TelexRule.py
|
miketvo/vn_kmn_compiler
|
b0d34c6a8e1ad1e5350ab982fa8c3dde695edd33
|
[
"MIT"
] | null | null | null |
vn_telex/utils/TelexRule.py
|
miketvo/vn_kmn_compiler
|
b0d34c6a8e1ad1e5350ab982fa8c3dde695edd33
|
[
"MIT"
] | null | null | null |
class TelexRule:
def __init__(self, base, modifier, result, kmn_clogic=None, kmn_ologic=None):
self.base = base
self.modifier = modifier
self.result = result
self.kmn_clogic = kmn_clogic # Context logic
self.kmn_ologic = kmn_ologic # Output logic
def to_string(self):
return 'TelexRule: {\n\tbase: ' + self.base + '\n\tmodifier: ' + self.modifier + '\n\tresult: ' + self.result + '\n\tkmn_clogic: ' + str(self.kmn_clogic) + '\n\tkmn_ologic: ' + str(self.kmn_ologic)
| 47.727273
| 205
| 0.641905
|
df59186648f93f4b85ae9b3583a4b4eed1e7d28f
| 3,845
|
py
|
Python
|
jobs/SCH/JB_BOOKINGS_FISCAL_QTR_SO.py
|
bibinvasudev/EBI_Project
|
df2560139e463d68a37e67e0bb683c06fa9ef91b
|
[
"CNRI-Python"
] | null | null | null |
jobs/SCH/JB_BOOKINGS_FISCAL_QTR_SO.py
|
bibinvasudev/EBI_Project
|
df2560139e463d68a37e67e0bb683c06fa9ef91b
|
[
"CNRI-Python"
] | null | null | null |
jobs/SCH/JB_BOOKINGS_FISCAL_QTR_SO.py
|
bibinvasudev/EBI_Project
|
df2560139e463d68a37e67e0bb683c06fa9ef91b
|
[
"CNRI-Python"
] | null | null | null |
# SCH1150.SH --> JB_BOOKINGS_FISCAL_QTR_SO.py
#**************************************************************************************************************
#
# Created by : Vinay Kumbakonam
# Modified by : bibin
# Version : 1.2
#
# Description :
# 1. This script will load the data into 'BOOKINGS_FISCAL_QTR_SO' table based on stream lookups.
#
#
# Initial Creation:
#
# Date (YYYY-MM-DD) Change Description
# ----------------- ------------------
# 2018-10-28 Initial creation
# 2018-10-30 bibin : Getting DB schema, db_prop_key_load from Config file
# 2018-11-02 bibin : Using job_debugger_print() for print any string in Job
#
#**************************************************************************************************************
# Importing required Lib
from dependencies.spark import start_spark
from dependencies.EbiReadWrite import EbiReadWrite
import logging
import sys
from time import gmtime, strftime
import cx_Oracle
import py4j
import pyspark
# Spark logging
logger = logging.getLogger(__name__)
# Date Formats
start_date = "'"+strftime("%Y-%m-%d %H:%M:%S", gmtime())+"'"
log_date =strftime("%Y%m%d", gmtime())
# Job Naming Details
script_name = "SCH1150.SH"
app_name = "JB_BOOKINGS_FISCAL_QTR_SO"
log_filename = app_name + '_' + log_date + '.log'
# Query for loading invoice table
def query_data(db_schema):
query = """
INSERT INTO """+db_schema+""".BOOKINGS_FISCAL_QTR_SO
SELECT
DISTINCT
--a.BOOKING_FISCAL_QTR
--, a.SO_NUMBER
a.BOOKING_FISCAL_QTR as BOOKING_FISCAL_QTR_KEY
, a.SO_NUMBER as SO_NUMBER_KEY
FROM
"""+ db_schema +""".BOOKINGS a
"""
return query
# Main method
def main():
try:
src_count = '0'
dest_count = '0'
"""Main ETL script definition.
:return: None
"""
# start Spark application and get Spark session, logger and config
spark, config = start_spark(
app_name=app_name)
# Create class Object
Ebi_read_write_obj = EbiReadWrite(app_name,spark,config,logger)
# DB prop Key of Source DB
db_prop_key_load = config['DB_PROP_KEY_LOAD']
db_schema = config['DB_SCHEMA']
log_file = config['LOG_DIR_NAME'] + "/" + log_filename
#SQL Query
query = query_data(db_schema)
# Calling Job Class method --> get_target_data_update()
Ebi_read_write_obj.get_target_data_update(query,db_prop_key_load)
end_date="'"+strftime("%Y-%m-%d %H:%M:%S", gmtime())+"'"
data_format = "JOB START DT : "+start_date+" | SCRIPT NAME : "+script_name+" | JOB : "+app_name+" | SRC COUNT : "+src_count+" | TGT COUNT : "+dest_count+" | JOB END DT : "+end_date+" | STATUS : %(message)s"
Ebi_read_write_obj.create_log(data_format,log_file,logger)
logger.info("Success")
print(" \n __main__ " + app_name +" --> Job "+app_name+" Succeed \n")
except Exception as err:
# Write expeption in spark log or console
end_date="'"+strftime("%Y-%m-%d %H:%M:%S", gmtime())+"'"
data_format = "JOB START DT : "+start_date+" | SCRIPT NAME : "+script_name+" | JOB : "+app_name+" | SRC COUNT : "+src_count+" | TGT COUNT : "+dest_count+" | JOB END DT : "+end_date+" | STATUS : %(message)s"
Ebi_read_write_obj.create_log(data_format,log_file,logger)
logger.info("[Error] Failed")
print(" \n Job "+app_name+" Failed\n")
logger.error("\n __main__ "+ app_name +" --> Exception-Traceback :: " + str(err))
raise
# Entry point for script
if __name__ == "__main__":
# Calling main() method
main()
| 32.584746
| 218
| 0.56619
|
436b0b8716d6dffe95acd55ab50f64b3c6ac3d93
| 241
|
py
|
Python
|
ex-mundo2/ex046.py
|
PedroPegado/ex-cursoemvideo
|
46751a7238e6a142b639c4cc3acf1759411732d7
|
[
"MIT"
] | null | null | null |
ex-mundo2/ex046.py
|
PedroPegado/ex-cursoemvideo
|
46751a7238e6a142b639c4cc3acf1759411732d7
|
[
"MIT"
] | null | null | null |
ex-mundo2/ex046.py
|
PedroPegado/ex-cursoemvideo
|
46751a7238e6a142b639c4cc3acf1759411732d7
|
[
"MIT"
] | null | null | null |
import time
import emoji
print('CONTEGEM REGRESSIVA')
time.sleep(0.5)
for c in range(10, 0, -1):
print(c)
time.sleep(1)
print(emoji.emojize(':fireworks::fireworks::fireworks: == FELIZ ANO NOVO == :fireworks::fireworks::fireworks:'))
| 26.777778
| 112
| 0.701245
|
c2190321fdf8378b63bc4f954d993f8c7b30310f
| 1,004
|
py
|
Python
|
homeassistant/components/deconz/const.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 6
|
2020-07-18T16:33:25.000Z
|
2021-09-26T09:52:04.000Z
|
homeassistant/components/deconz/const.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 47
|
2020-07-23T07:13:11.000Z
|
2022-03-31T06:01:46.000Z
|
homeassistant/components/deconz/const.py
|
klauern/home-assistant-core
|
c18ba6aec0627e6afb6442c678edb5ff2bb17db6
|
[
"Apache-2.0"
] | 5
|
2020-03-29T00:29:13.000Z
|
2021-09-06T20:58:40.000Z
|
"""Constants for the deCONZ component."""
import logging
LOGGER = logging.getLogger(__package__)
DOMAIN = "deconz"
CONF_BRIDGE_ID = "bridgeid"
CONF_GROUP_ID_BASE = "group_id_base"
DEFAULT_PORT = 80
DEFAULT_ALLOW_CLIP_SENSOR = False
DEFAULT_ALLOW_DECONZ_GROUPS = True
CONF_ALLOW_CLIP_SENSOR = "allow_clip_sensor"
CONF_ALLOW_DECONZ_GROUPS = "allow_deconz_groups"
CONF_MASTER_GATEWAY = "master"
SUPPORTED_PLATFORMS = [
"binary_sensor",
"climate",
"cover",
"light",
"scene",
"sensor",
"switch",
]
NEW_GROUP = "groups"
NEW_LIGHT = "lights"
NEW_SCENE = "scenes"
NEW_SENSOR = "sensors"
ATTR_DARK = "dark"
ATTR_OFFSET = "offset"
ATTR_ON = "on"
ATTR_VALVE = "valve"
DAMPERS = ["Level controllable output"]
WINDOW_COVERS = ["Window covering device", "Window covering controller"]
COVER_TYPES = DAMPERS + WINDOW_COVERS
POWER_PLUGS = ["On/Off light", "On/Off plug-in unit", "Smart plug"]
SIRENS = ["Warning device"]
SWITCH_TYPES = POWER_PLUGS + SIRENS
CONF_GESTURE = "gesture"
| 20.916667
| 72
| 0.732072
|
6c35ceabb89fb401fc7093354b96878f055bfbb9
| 1,756
|
py
|
Python
|
examples/CARLA_SAC/carla_agent.py
|
lp2333/PARL
|
e4bde1f5b7e69c5f8d3ee3a90a647dfe12204bd3
|
[
"ECL-2.0",
"Apache-2.0"
] | 3,172
|
2018-05-22T02:02:29.000Z
|
2022-03-31T09:14:56.000Z
|
examples/CARLA_SAC/carla_agent.py
|
BKBK00/PARL
|
f508bc6085420431b504441c7ff129e64826603e
|
[
"Apache-2.0"
] | 422
|
2018-05-17T16:58:45.000Z
|
2022-03-31T02:03:25.000Z
|
examples/CARLA_SAC/carla_agent.py
|
BKBK00/PARL
|
f508bc6085420431b504441c7ff129e64826603e
|
[
"Apache-2.0"
] | 794
|
2018-05-21T18:33:19.000Z
|
2022-03-30T13:38:09.000Z
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import parl
import paddle
import numpy as np
class CarlaAgent(parl.Agent):
def __init__(self, algorithm):
super(CarlaAgent, self).__init__(algorithm)
self.alg.sync_target(decay=0)
def predict(self, obs):
obs = paddle.to_tensor(obs.reshape(1, -1), dtype='float32')
action = self.alg.predict(obs)
action_numpy = action.cpu().numpy()[0]
return action_numpy
def sample(self, obs):
obs = paddle.to_tensor(obs.reshape(1, -1), dtype='float32')
action, _ = self.alg.sample(obs)
action_numpy = action.cpu().numpy()[0]
return action_numpy
def learn(self, obs, action, reward, next_obs, terminal):
terminal = np.expand_dims(terminal, -1)
reward = np.expand_dims(reward, -1)
obs = paddle.to_tensor(obs, dtype='float32')
action = paddle.to_tensor(action, dtype='float32')
reward = paddle.to_tensor(reward, dtype='float32')
next_obs = paddle.to_tensor(next_obs, dtype='float32')
terminal = paddle.to_tensor(terminal, dtype='float32')
self.alg.learn(obs, action, reward, next_obs, terminal)
| 36.583333
| 74
| 0.68508
|
83e9b477bf8d2b37e5d8764f9070dd5034522d57
| 48,779
|
py
|
Python
|
libs/pdfminer/pdfminer/rijndael.py
|
diverted247/SigningService
|
a630357a2bf5bea4e5d55106f092e4a2a31cab15
|
[
"MIT"
] | null | null | null |
libs/pdfminer/pdfminer/rijndael.py
|
diverted247/SigningService
|
a630357a2bf5bea4e5d55106f092e4a2a31cab15
|
[
"MIT"
] | null | null | null |
libs/pdfminer/pdfminer/rijndael.py
|
diverted247/SigningService
|
a630357a2bf5bea4e5d55106f092e4a2a31cab15
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
""" Python implementation of Rijndael encryption algorithm.
This code is in the public domain.
This code is based on a public domain C implementation
by Philip J. Erdelsky:
http://www.efgh.com/software/rijndael.htm
"""
import struct
def KEYLENGTH(keybits):
return (keybits)/8
def RKLENGTH(keybits):
return (keybits)/8+28
def NROUNDS(keybits):
return (keybits)/32+6
Te0 = [
0xc66363a5L, 0xf87c7c84L, 0xee777799L, 0xf67b7b8dL,
0xfff2f20dL, 0xd66b6bbdL, 0xde6f6fb1L, 0x91c5c554L,
0x60303050L, 0x02010103L, 0xce6767a9L, 0x562b2b7dL,
0xe7fefe19L, 0xb5d7d762L, 0x4dababe6L, 0xec76769aL,
0x8fcaca45L, 0x1f82829dL, 0x89c9c940L, 0xfa7d7d87L,
0xeffafa15L, 0xb25959ebL, 0x8e4747c9L, 0xfbf0f00bL,
0x41adadecL, 0xb3d4d467L, 0x5fa2a2fdL, 0x45afafeaL,
0x239c9cbfL, 0x53a4a4f7L, 0xe4727296L, 0x9bc0c05bL,
0x75b7b7c2L, 0xe1fdfd1cL, 0x3d9393aeL, 0x4c26266aL,
0x6c36365aL, 0x7e3f3f41L, 0xf5f7f702L, 0x83cccc4fL,
0x6834345cL, 0x51a5a5f4L, 0xd1e5e534L, 0xf9f1f108L,
0xe2717193L, 0xabd8d873L, 0x62313153L, 0x2a15153fL,
0x0804040cL, 0x95c7c752L, 0x46232365L, 0x9dc3c35eL,
0x30181828L, 0x379696a1L, 0x0a05050fL, 0x2f9a9ab5L,
0x0e070709L, 0x24121236L, 0x1b80809bL, 0xdfe2e23dL,
0xcdebeb26L, 0x4e272769L, 0x7fb2b2cdL, 0xea75759fL,
0x1209091bL, 0x1d83839eL, 0x582c2c74L, 0x341a1a2eL,
0x361b1b2dL, 0xdc6e6eb2L, 0xb45a5aeeL, 0x5ba0a0fbL,
0xa45252f6L, 0x763b3b4dL, 0xb7d6d661L, 0x7db3b3ceL,
0x5229297bL, 0xdde3e33eL, 0x5e2f2f71L, 0x13848497L,
0xa65353f5L, 0xb9d1d168L, 0x00000000L, 0xc1eded2cL,
0x40202060L, 0xe3fcfc1fL, 0x79b1b1c8L, 0xb65b5bedL,
0xd46a6abeL, 0x8dcbcb46L, 0x67bebed9L, 0x7239394bL,
0x944a4adeL, 0x984c4cd4L, 0xb05858e8L, 0x85cfcf4aL,
0xbbd0d06bL, 0xc5efef2aL, 0x4faaaae5L, 0xedfbfb16L,
0x864343c5L, 0x9a4d4dd7L, 0x66333355L, 0x11858594L,
0x8a4545cfL, 0xe9f9f910L, 0x04020206L, 0xfe7f7f81L,
0xa05050f0L, 0x783c3c44L, 0x259f9fbaL, 0x4ba8a8e3L,
0xa25151f3L, 0x5da3a3feL, 0x804040c0L, 0x058f8f8aL,
0x3f9292adL, 0x219d9dbcL, 0x70383848L, 0xf1f5f504L,
0x63bcbcdfL, 0x77b6b6c1L, 0xafdada75L, 0x42212163L,
0x20101030L, 0xe5ffff1aL, 0xfdf3f30eL, 0xbfd2d26dL,
0x81cdcd4cL, 0x180c0c14L, 0x26131335L, 0xc3ecec2fL,
0xbe5f5fe1L, 0x359797a2L, 0x884444ccL, 0x2e171739L,
0x93c4c457L, 0x55a7a7f2L, 0xfc7e7e82L, 0x7a3d3d47L,
0xc86464acL, 0xba5d5de7L, 0x3219192bL, 0xe6737395L,
0xc06060a0L, 0x19818198L, 0x9e4f4fd1L, 0xa3dcdc7fL,
0x44222266L, 0x542a2a7eL, 0x3b9090abL, 0x0b888883L,
0x8c4646caL, 0xc7eeee29L, 0x6bb8b8d3L, 0x2814143cL,
0xa7dede79L, 0xbc5e5ee2L, 0x160b0b1dL, 0xaddbdb76L,
0xdbe0e03bL, 0x64323256L, 0x743a3a4eL, 0x140a0a1eL,
0x924949dbL, 0x0c06060aL, 0x4824246cL, 0xb85c5ce4L,
0x9fc2c25dL, 0xbdd3d36eL, 0x43acacefL, 0xc46262a6L,
0x399191a8L, 0x319595a4L, 0xd3e4e437L, 0xf279798bL,
0xd5e7e732L, 0x8bc8c843L, 0x6e373759L, 0xda6d6db7L,
0x018d8d8cL, 0xb1d5d564L, 0x9c4e4ed2L, 0x49a9a9e0L,
0xd86c6cb4L, 0xac5656faL, 0xf3f4f407L, 0xcfeaea25L,
0xca6565afL, 0xf47a7a8eL, 0x47aeaee9L, 0x10080818L,
0x6fbabad5L, 0xf0787888L, 0x4a25256fL, 0x5c2e2e72L,
0x381c1c24L, 0x57a6a6f1L, 0x73b4b4c7L, 0x97c6c651L,
0xcbe8e823L, 0xa1dddd7cL, 0xe874749cL, 0x3e1f1f21L,
0x964b4bddL, 0x61bdbddcL, 0x0d8b8b86L, 0x0f8a8a85L,
0xe0707090L, 0x7c3e3e42L, 0x71b5b5c4L, 0xcc6666aaL,
0x904848d8L, 0x06030305L, 0xf7f6f601L, 0x1c0e0e12L,
0xc26161a3L, 0x6a35355fL, 0xae5757f9L, 0x69b9b9d0L,
0x17868691L, 0x99c1c158L, 0x3a1d1d27L, 0x279e9eb9L,
0xd9e1e138L, 0xebf8f813L, 0x2b9898b3L, 0x22111133L,
0xd26969bbL, 0xa9d9d970L, 0x078e8e89L, 0x339494a7L,
0x2d9b9bb6L, 0x3c1e1e22L, 0x15878792L, 0xc9e9e920L,
0x87cece49L, 0xaa5555ffL, 0x50282878L, 0xa5dfdf7aL,
0x038c8c8fL, 0x59a1a1f8L, 0x09898980L, 0x1a0d0d17L,
0x65bfbfdaL, 0xd7e6e631L, 0x844242c6L, 0xd06868b8L,
0x824141c3L, 0x299999b0L, 0x5a2d2d77L, 0x1e0f0f11L,
0x7bb0b0cbL, 0xa85454fcL, 0x6dbbbbd6L, 0x2c16163aL,
]
Te1 = [
0xa5c66363L, 0x84f87c7cL, 0x99ee7777L, 0x8df67b7bL,
0x0dfff2f2L, 0xbdd66b6bL, 0xb1de6f6fL, 0x5491c5c5L,
0x50603030L, 0x03020101L, 0xa9ce6767L, 0x7d562b2bL,
0x19e7fefeL, 0x62b5d7d7L, 0xe64dababL, 0x9aec7676L,
0x458fcacaL, 0x9d1f8282L, 0x4089c9c9L, 0x87fa7d7dL,
0x15effafaL, 0xebb25959L, 0xc98e4747L, 0x0bfbf0f0L,
0xec41adadL, 0x67b3d4d4L, 0xfd5fa2a2L, 0xea45afafL,
0xbf239c9cL, 0xf753a4a4L, 0x96e47272L, 0x5b9bc0c0L,
0xc275b7b7L, 0x1ce1fdfdL, 0xae3d9393L, 0x6a4c2626L,
0x5a6c3636L, 0x417e3f3fL, 0x02f5f7f7L, 0x4f83ccccL,
0x5c683434L, 0xf451a5a5L, 0x34d1e5e5L, 0x08f9f1f1L,
0x93e27171L, 0x73abd8d8L, 0x53623131L, 0x3f2a1515L,
0x0c080404L, 0x5295c7c7L, 0x65462323L, 0x5e9dc3c3L,
0x28301818L, 0xa1379696L, 0x0f0a0505L, 0xb52f9a9aL,
0x090e0707L, 0x36241212L, 0x9b1b8080L, 0x3ddfe2e2L,
0x26cdebebL, 0x694e2727L, 0xcd7fb2b2L, 0x9fea7575L,
0x1b120909L, 0x9e1d8383L, 0x74582c2cL, 0x2e341a1aL,
0x2d361b1bL, 0xb2dc6e6eL, 0xeeb45a5aL, 0xfb5ba0a0L,
0xf6a45252L, 0x4d763b3bL, 0x61b7d6d6L, 0xce7db3b3L,
0x7b522929L, 0x3edde3e3L, 0x715e2f2fL, 0x97138484L,
0xf5a65353L, 0x68b9d1d1L, 0x00000000L, 0x2cc1ededL,
0x60402020L, 0x1fe3fcfcL, 0xc879b1b1L, 0xedb65b5bL,
0xbed46a6aL, 0x468dcbcbL, 0xd967bebeL, 0x4b723939L,
0xde944a4aL, 0xd4984c4cL, 0xe8b05858L, 0x4a85cfcfL,
0x6bbbd0d0L, 0x2ac5efefL, 0xe54faaaaL, 0x16edfbfbL,
0xc5864343L, 0xd79a4d4dL, 0x55663333L, 0x94118585L,
0xcf8a4545L, 0x10e9f9f9L, 0x06040202L, 0x81fe7f7fL,
0xf0a05050L, 0x44783c3cL, 0xba259f9fL, 0xe34ba8a8L,
0xf3a25151L, 0xfe5da3a3L, 0xc0804040L, 0x8a058f8fL,
0xad3f9292L, 0xbc219d9dL, 0x48703838L, 0x04f1f5f5L,
0xdf63bcbcL, 0xc177b6b6L, 0x75afdadaL, 0x63422121L,
0x30201010L, 0x1ae5ffffL, 0x0efdf3f3L, 0x6dbfd2d2L,
0x4c81cdcdL, 0x14180c0cL, 0x35261313L, 0x2fc3ececL,
0xe1be5f5fL, 0xa2359797L, 0xcc884444L, 0x392e1717L,
0x5793c4c4L, 0xf255a7a7L, 0x82fc7e7eL, 0x477a3d3dL,
0xacc86464L, 0xe7ba5d5dL, 0x2b321919L, 0x95e67373L,
0xa0c06060L, 0x98198181L, 0xd19e4f4fL, 0x7fa3dcdcL,
0x66442222L, 0x7e542a2aL, 0xab3b9090L, 0x830b8888L,
0xca8c4646L, 0x29c7eeeeL, 0xd36bb8b8L, 0x3c281414L,
0x79a7dedeL, 0xe2bc5e5eL, 0x1d160b0bL, 0x76addbdbL,
0x3bdbe0e0L, 0x56643232L, 0x4e743a3aL, 0x1e140a0aL,
0xdb924949L, 0x0a0c0606L, 0x6c482424L, 0xe4b85c5cL,
0x5d9fc2c2L, 0x6ebdd3d3L, 0xef43acacL, 0xa6c46262L,
0xa8399191L, 0xa4319595L, 0x37d3e4e4L, 0x8bf27979L,
0x32d5e7e7L, 0x438bc8c8L, 0x596e3737L, 0xb7da6d6dL,
0x8c018d8dL, 0x64b1d5d5L, 0xd29c4e4eL, 0xe049a9a9L,
0xb4d86c6cL, 0xfaac5656L, 0x07f3f4f4L, 0x25cfeaeaL,
0xafca6565L, 0x8ef47a7aL, 0xe947aeaeL, 0x18100808L,
0xd56fbabaL, 0x88f07878L, 0x6f4a2525L, 0x725c2e2eL,
0x24381c1cL, 0xf157a6a6L, 0xc773b4b4L, 0x5197c6c6L,
0x23cbe8e8L, 0x7ca1ddddL, 0x9ce87474L, 0x213e1f1fL,
0xdd964b4bL, 0xdc61bdbdL, 0x860d8b8bL, 0x850f8a8aL,
0x90e07070L, 0x427c3e3eL, 0xc471b5b5L, 0xaacc6666L,
0xd8904848L, 0x05060303L, 0x01f7f6f6L, 0x121c0e0eL,
0xa3c26161L, 0x5f6a3535L, 0xf9ae5757L, 0xd069b9b9L,
0x91178686L, 0x5899c1c1L, 0x273a1d1dL, 0xb9279e9eL,
0x38d9e1e1L, 0x13ebf8f8L, 0xb32b9898L, 0x33221111L,
0xbbd26969L, 0x70a9d9d9L, 0x89078e8eL, 0xa7339494L,
0xb62d9b9bL, 0x223c1e1eL, 0x92158787L, 0x20c9e9e9L,
0x4987ceceL, 0xffaa5555L, 0x78502828L, 0x7aa5dfdfL,
0x8f038c8cL, 0xf859a1a1L, 0x80098989L, 0x171a0d0dL,
0xda65bfbfL, 0x31d7e6e6L, 0xc6844242L, 0xb8d06868L,
0xc3824141L, 0xb0299999L, 0x775a2d2dL, 0x111e0f0fL,
0xcb7bb0b0L, 0xfca85454L, 0xd66dbbbbL, 0x3a2c1616L,
]
Te2 = [
0x63a5c663L, 0x7c84f87cL, 0x7799ee77L, 0x7b8df67bL,
0xf20dfff2L, 0x6bbdd66bL, 0x6fb1de6fL, 0xc55491c5L,
0x30506030L, 0x01030201L, 0x67a9ce67L, 0x2b7d562bL,
0xfe19e7feL, 0xd762b5d7L, 0xabe64dabL, 0x769aec76L,
0xca458fcaL, 0x829d1f82L, 0xc94089c9L, 0x7d87fa7dL,
0xfa15effaL, 0x59ebb259L, 0x47c98e47L, 0xf00bfbf0L,
0xadec41adL, 0xd467b3d4L, 0xa2fd5fa2L, 0xafea45afL,
0x9cbf239cL, 0xa4f753a4L, 0x7296e472L, 0xc05b9bc0L,
0xb7c275b7L, 0xfd1ce1fdL, 0x93ae3d93L, 0x266a4c26L,
0x365a6c36L, 0x3f417e3fL, 0xf702f5f7L, 0xcc4f83ccL,
0x345c6834L, 0xa5f451a5L, 0xe534d1e5L, 0xf108f9f1L,
0x7193e271L, 0xd873abd8L, 0x31536231L, 0x153f2a15L,
0x040c0804L, 0xc75295c7L, 0x23654623L, 0xc35e9dc3L,
0x18283018L, 0x96a13796L, 0x050f0a05L, 0x9ab52f9aL,
0x07090e07L, 0x12362412L, 0x809b1b80L, 0xe23ddfe2L,
0xeb26cdebL, 0x27694e27L, 0xb2cd7fb2L, 0x759fea75L,
0x091b1209L, 0x839e1d83L, 0x2c74582cL, 0x1a2e341aL,
0x1b2d361bL, 0x6eb2dc6eL, 0x5aeeb45aL, 0xa0fb5ba0L,
0x52f6a452L, 0x3b4d763bL, 0xd661b7d6L, 0xb3ce7db3L,
0x297b5229L, 0xe33edde3L, 0x2f715e2fL, 0x84971384L,
0x53f5a653L, 0xd168b9d1L, 0x00000000L, 0xed2cc1edL,
0x20604020L, 0xfc1fe3fcL, 0xb1c879b1L, 0x5bedb65bL,
0x6abed46aL, 0xcb468dcbL, 0xbed967beL, 0x394b7239L,
0x4ade944aL, 0x4cd4984cL, 0x58e8b058L, 0xcf4a85cfL,
0xd06bbbd0L, 0xef2ac5efL, 0xaae54faaL, 0xfb16edfbL,
0x43c58643L, 0x4dd79a4dL, 0x33556633L, 0x85941185L,
0x45cf8a45L, 0xf910e9f9L, 0x02060402L, 0x7f81fe7fL,
0x50f0a050L, 0x3c44783cL, 0x9fba259fL, 0xa8e34ba8L,
0x51f3a251L, 0xa3fe5da3L, 0x40c08040L, 0x8f8a058fL,
0x92ad3f92L, 0x9dbc219dL, 0x38487038L, 0xf504f1f5L,
0xbcdf63bcL, 0xb6c177b6L, 0xda75afdaL, 0x21634221L,
0x10302010L, 0xff1ae5ffL, 0xf30efdf3L, 0xd26dbfd2L,
0xcd4c81cdL, 0x0c14180cL, 0x13352613L, 0xec2fc3ecL,
0x5fe1be5fL, 0x97a23597L, 0x44cc8844L, 0x17392e17L,
0xc45793c4L, 0xa7f255a7L, 0x7e82fc7eL, 0x3d477a3dL,
0x64acc864L, 0x5de7ba5dL, 0x192b3219L, 0x7395e673L,
0x60a0c060L, 0x81981981L, 0x4fd19e4fL, 0xdc7fa3dcL,
0x22664422L, 0x2a7e542aL, 0x90ab3b90L, 0x88830b88L,
0x46ca8c46L, 0xee29c7eeL, 0xb8d36bb8L, 0x143c2814L,
0xde79a7deL, 0x5ee2bc5eL, 0x0b1d160bL, 0xdb76addbL,
0xe03bdbe0L, 0x32566432L, 0x3a4e743aL, 0x0a1e140aL,
0x49db9249L, 0x060a0c06L, 0x246c4824L, 0x5ce4b85cL,
0xc25d9fc2L, 0xd36ebdd3L, 0xacef43acL, 0x62a6c462L,
0x91a83991L, 0x95a43195L, 0xe437d3e4L, 0x798bf279L,
0xe732d5e7L, 0xc8438bc8L, 0x37596e37L, 0x6db7da6dL,
0x8d8c018dL, 0xd564b1d5L, 0x4ed29c4eL, 0xa9e049a9L,
0x6cb4d86cL, 0x56faac56L, 0xf407f3f4L, 0xea25cfeaL,
0x65afca65L, 0x7a8ef47aL, 0xaee947aeL, 0x08181008L,
0xbad56fbaL, 0x7888f078L, 0x256f4a25L, 0x2e725c2eL,
0x1c24381cL, 0xa6f157a6L, 0xb4c773b4L, 0xc65197c6L,
0xe823cbe8L, 0xdd7ca1ddL, 0x749ce874L, 0x1f213e1fL,
0x4bdd964bL, 0xbddc61bdL, 0x8b860d8bL, 0x8a850f8aL,
0x7090e070L, 0x3e427c3eL, 0xb5c471b5L, 0x66aacc66L,
0x48d89048L, 0x03050603L, 0xf601f7f6L, 0x0e121c0eL,
0x61a3c261L, 0x355f6a35L, 0x57f9ae57L, 0xb9d069b9L,
0x86911786L, 0xc15899c1L, 0x1d273a1dL, 0x9eb9279eL,
0xe138d9e1L, 0xf813ebf8L, 0x98b32b98L, 0x11332211L,
0x69bbd269L, 0xd970a9d9L, 0x8e89078eL, 0x94a73394L,
0x9bb62d9bL, 0x1e223c1eL, 0x87921587L, 0xe920c9e9L,
0xce4987ceL, 0x55ffaa55L, 0x28785028L, 0xdf7aa5dfL,
0x8c8f038cL, 0xa1f859a1L, 0x89800989L, 0x0d171a0dL,
0xbfda65bfL, 0xe631d7e6L, 0x42c68442L, 0x68b8d068L,
0x41c38241L, 0x99b02999L, 0x2d775a2dL, 0x0f111e0fL,
0xb0cb7bb0L, 0x54fca854L, 0xbbd66dbbL, 0x163a2c16L,
]
Te3 = [
0x6363a5c6L, 0x7c7c84f8L, 0x777799eeL, 0x7b7b8df6L,
0xf2f20dffL, 0x6b6bbdd6L, 0x6f6fb1deL, 0xc5c55491L,
0x30305060L, 0x01010302L, 0x6767a9ceL, 0x2b2b7d56L,
0xfefe19e7L, 0xd7d762b5L, 0xababe64dL, 0x76769aecL,
0xcaca458fL, 0x82829d1fL, 0xc9c94089L, 0x7d7d87faL,
0xfafa15efL, 0x5959ebb2L, 0x4747c98eL, 0xf0f00bfbL,
0xadadec41L, 0xd4d467b3L, 0xa2a2fd5fL, 0xafafea45L,
0x9c9cbf23L, 0xa4a4f753L, 0x727296e4L, 0xc0c05b9bL,
0xb7b7c275L, 0xfdfd1ce1L, 0x9393ae3dL, 0x26266a4cL,
0x36365a6cL, 0x3f3f417eL, 0xf7f702f5L, 0xcccc4f83L,
0x34345c68L, 0xa5a5f451L, 0xe5e534d1L, 0xf1f108f9L,
0x717193e2L, 0xd8d873abL, 0x31315362L, 0x15153f2aL,
0x04040c08L, 0xc7c75295L, 0x23236546L, 0xc3c35e9dL,
0x18182830L, 0x9696a137L, 0x05050f0aL, 0x9a9ab52fL,
0x0707090eL, 0x12123624L, 0x80809b1bL, 0xe2e23ddfL,
0xebeb26cdL, 0x2727694eL, 0xb2b2cd7fL, 0x75759feaL,
0x09091b12L, 0x83839e1dL, 0x2c2c7458L, 0x1a1a2e34L,
0x1b1b2d36L, 0x6e6eb2dcL, 0x5a5aeeb4L, 0xa0a0fb5bL,
0x5252f6a4L, 0x3b3b4d76L, 0xd6d661b7L, 0xb3b3ce7dL,
0x29297b52L, 0xe3e33eddL, 0x2f2f715eL, 0x84849713L,
0x5353f5a6L, 0xd1d168b9L, 0x00000000L, 0xeded2cc1L,
0x20206040L, 0xfcfc1fe3L, 0xb1b1c879L, 0x5b5bedb6L,
0x6a6abed4L, 0xcbcb468dL, 0xbebed967L, 0x39394b72L,
0x4a4ade94L, 0x4c4cd498L, 0x5858e8b0L, 0xcfcf4a85L,
0xd0d06bbbL, 0xefef2ac5L, 0xaaaae54fL, 0xfbfb16edL,
0x4343c586L, 0x4d4dd79aL, 0x33335566L, 0x85859411L,
0x4545cf8aL, 0xf9f910e9L, 0x02020604L, 0x7f7f81feL,
0x5050f0a0L, 0x3c3c4478L, 0x9f9fba25L, 0xa8a8e34bL,
0x5151f3a2L, 0xa3a3fe5dL, 0x4040c080L, 0x8f8f8a05L,
0x9292ad3fL, 0x9d9dbc21L, 0x38384870L, 0xf5f504f1L,
0xbcbcdf63L, 0xb6b6c177L, 0xdada75afL, 0x21216342L,
0x10103020L, 0xffff1ae5L, 0xf3f30efdL, 0xd2d26dbfL,
0xcdcd4c81L, 0x0c0c1418L, 0x13133526L, 0xecec2fc3L,
0x5f5fe1beL, 0x9797a235L, 0x4444cc88L, 0x1717392eL,
0xc4c45793L, 0xa7a7f255L, 0x7e7e82fcL, 0x3d3d477aL,
0x6464acc8L, 0x5d5de7baL, 0x19192b32L, 0x737395e6L,
0x6060a0c0L, 0x81819819L, 0x4f4fd19eL, 0xdcdc7fa3L,
0x22226644L, 0x2a2a7e54L, 0x9090ab3bL, 0x8888830bL,
0x4646ca8cL, 0xeeee29c7L, 0xb8b8d36bL, 0x14143c28L,
0xdede79a7L, 0x5e5ee2bcL, 0x0b0b1d16L, 0xdbdb76adL,
0xe0e03bdbL, 0x32325664L, 0x3a3a4e74L, 0x0a0a1e14L,
0x4949db92L, 0x06060a0cL, 0x24246c48L, 0x5c5ce4b8L,
0xc2c25d9fL, 0xd3d36ebdL, 0xacacef43L, 0x6262a6c4L,
0x9191a839L, 0x9595a431L, 0xe4e437d3L, 0x79798bf2L,
0xe7e732d5L, 0xc8c8438bL, 0x3737596eL, 0x6d6db7daL,
0x8d8d8c01L, 0xd5d564b1L, 0x4e4ed29cL, 0xa9a9e049L,
0x6c6cb4d8L, 0x5656faacL, 0xf4f407f3L, 0xeaea25cfL,
0x6565afcaL, 0x7a7a8ef4L, 0xaeaee947L, 0x08081810L,
0xbabad56fL, 0x787888f0L, 0x25256f4aL, 0x2e2e725cL,
0x1c1c2438L, 0xa6a6f157L, 0xb4b4c773L, 0xc6c65197L,
0xe8e823cbL, 0xdddd7ca1L, 0x74749ce8L, 0x1f1f213eL,
0x4b4bdd96L, 0xbdbddc61L, 0x8b8b860dL, 0x8a8a850fL,
0x707090e0L, 0x3e3e427cL, 0xb5b5c471L, 0x6666aaccL,
0x4848d890L, 0x03030506L, 0xf6f601f7L, 0x0e0e121cL,
0x6161a3c2L, 0x35355f6aL, 0x5757f9aeL, 0xb9b9d069L,
0x86869117L, 0xc1c15899L, 0x1d1d273aL, 0x9e9eb927L,
0xe1e138d9L, 0xf8f813ebL, 0x9898b32bL, 0x11113322L,
0x6969bbd2L, 0xd9d970a9L, 0x8e8e8907L, 0x9494a733L,
0x9b9bb62dL, 0x1e1e223cL, 0x87879215L, 0xe9e920c9L,
0xcece4987L, 0x5555ffaaL, 0x28287850L, 0xdfdf7aa5L,
0x8c8c8f03L, 0xa1a1f859L, 0x89898009L, 0x0d0d171aL,
0xbfbfda65L, 0xe6e631d7L, 0x4242c684L, 0x6868b8d0L,
0x4141c382L, 0x9999b029L, 0x2d2d775aL, 0x0f0f111eL,
0xb0b0cb7bL, 0x5454fca8L, 0xbbbbd66dL, 0x16163a2cL,
]
Te4 = [
0x63636363L, 0x7c7c7c7cL, 0x77777777L, 0x7b7b7b7bL,
0xf2f2f2f2L, 0x6b6b6b6bL, 0x6f6f6f6fL, 0xc5c5c5c5L,
0x30303030L, 0x01010101L, 0x67676767L, 0x2b2b2b2bL,
0xfefefefeL, 0xd7d7d7d7L, 0xababababL, 0x76767676L,
0xcacacacaL, 0x82828282L, 0xc9c9c9c9L, 0x7d7d7d7dL,
0xfafafafaL, 0x59595959L, 0x47474747L, 0xf0f0f0f0L,
0xadadadadL, 0xd4d4d4d4L, 0xa2a2a2a2L, 0xafafafafL,
0x9c9c9c9cL, 0xa4a4a4a4L, 0x72727272L, 0xc0c0c0c0L,
0xb7b7b7b7L, 0xfdfdfdfdL, 0x93939393L, 0x26262626L,
0x36363636L, 0x3f3f3f3fL, 0xf7f7f7f7L, 0xccccccccL,
0x34343434L, 0xa5a5a5a5L, 0xe5e5e5e5L, 0xf1f1f1f1L,
0x71717171L, 0xd8d8d8d8L, 0x31313131L, 0x15151515L,
0x04040404L, 0xc7c7c7c7L, 0x23232323L, 0xc3c3c3c3L,
0x18181818L, 0x96969696L, 0x05050505L, 0x9a9a9a9aL,
0x07070707L, 0x12121212L, 0x80808080L, 0xe2e2e2e2L,
0xebebebebL, 0x27272727L, 0xb2b2b2b2L, 0x75757575L,
0x09090909L, 0x83838383L, 0x2c2c2c2cL, 0x1a1a1a1aL,
0x1b1b1b1bL, 0x6e6e6e6eL, 0x5a5a5a5aL, 0xa0a0a0a0L,
0x52525252L, 0x3b3b3b3bL, 0xd6d6d6d6L, 0xb3b3b3b3L,
0x29292929L, 0xe3e3e3e3L, 0x2f2f2f2fL, 0x84848484L,
0x53535353L, 0xd1d1d1d1L, 0x00000000L, 0xededededL,
0x20202020L, 0xfcfcfcfcL, 0xb1b1b1b1L, 0x5b5b5b5bL,
0x6a6a6a6aL, 0xcbcbcbcbL, 0xbebebebeL, 0x39393939L,
0x4a4a4a4aL, 0x4c4c4c4cL, 0x58585858L, 0xcfcfcfcfL,
0xd0d0d0d0L, 0xefefefefL, 0xaaaaaaaaL, 0xfbfbfbfbL,
0x43434343L, 0x4d4d4d4dL, 0x33333333L, 0x85858585L,
0x45454545L, 0xf9f9f9f9L, 0x02020202L, 0x7f7f7f7fL,
0x50505050L, 0x3c3c3c3cL, 0x9f9f9f9fL, 0xa8a8a8a8L,
0x51515151L, 0xa3a3a3a3L, 0x40404040L, 0x8f8f8f8fL,
0x92929292L, 0x9d9d9d9dL, 0x38383838L, 0xf5f5f5f5L,
0xbcbcbcbcL, 0xb6b6b6b6L, 0xdadadadaL, 0x21212121L,
0x10101010L, 0xffffffffL, 0xf3f3f3f3L, 0xd2d2d2d2L,
0xcdcdcdcdL, 0x0c0c0c0cL, 0x13131313L, 0xececececL,
0x5f5f5f5fL, 0x97979797L, 0x44444444L, 0x17171717L,
0xc4c4c4c4L, 0xa7a7a7a7L, 0x7e7e7e7eL, 0x3d3d3d3dL,
0x64646464L, 0x5d5d5d5dL, 0x19191919L, 0x73737373L,
0x60606060L, 0x81818181L, 0x4f4f4f4fL, 0xdcdcdcdcL,
0x22222222L, 0x2a2a2a2aL, 0x90909090L, 0x88888888L,
0x46464646L, 0xeeeeeeeeL, 0xb8b8b8b8L, 0x14141414L,
0xdedededeL, 0x5e5e5e5eL, 0x0b0b0b0bL, 0xdbdbdbdbL,
0xe0e0e0e0L, 0x32323232L, 0x3a3a3a3aL, 0x0a0a0a0aL,
0x49494949L, 0x06060606L, 0x24242424L, 0x5c5c5c5cL,
0xc2c2c2c2L, 0xd3d3d3d3L, 0xacacacacL, 0x62626262L,
0x91919191L, 0x95959595L, 0xe4e4e4e4L, 0x79797979L,
0xe7e7e7e7L, 0xc8c8c8c8L, 0x37373737L, 0x6d6d6d6dL,
0x8d8d8d8dL, 0xd5d5d5d5L, 0x4e4e4e4eL, 0xa9a9a9a9L,
0x6c6c6c6cL, 0x56565656L, 0xf4f4f4f4L, 0xeaeaeaeaL,
0x65656565L, 0x7a7a7a7aL, 0xaeaeaeaeL, 0x08080808L,
0xbabababaL, 0x78787878L, 0x25252525L, 0x2e2e2e2eL,
0x1c1c1c1cL, 0xa6a6a6a6L, 0xb4b4b4b4L, 0xc6c6c6c6L,
0xe8e8e8e8L, 0xddddddddL, 0x74747474L, 0x1f1f1f1fL,
0x4b4b4b4bL, 0xbdbdbdbdL, 0x8b8b8b8bL, 0x8a8a8a8aL,
0x70707070L, 0x3e3e3e3eL, 0xb5b5b5b5L, 0x66666666L,
0x48484848L, 0x03030303L, 0xf6f6f6f6L, 0x0e0e0e0eL,
0x61616161L, 0x35353535L, 0x57575757L, 0xb9b9b9b9L,
0x86868686L, 0xc1c1c1c1L, 0x1d1d1d1dL, 0x9e9e9e9eL,
0xe1e1e1e1L, 0xf8f8f8f8L, 0x98989898L, 0x11111111L,
0x69696969L, 0xd9d9d9d9L, 0x8e8e8e8eL, 0x94949494L,
0x9b9b9b9bL, 0x1e1e1e1eL, 0x87878787L, 0xe9e9e9e9L,
0xcecececeL, 0x55555555L, 0x28282828L, 0xdfdfdfdfL,
0x8c8c8c8cL, 0xa1a1a1a1L, 0x89898989L, 0x0d0d0d0dL,
0xbfbfbfbfL, 0xe6e6e6e6L, 0x42424242L, 0x68686868L,
0x41414141L, 0x99999999L, 0x2d2d2d2dL, 0x0f0f0f0fL,
0xb0b0b0b0L, 0x54545454L, 0xbbbbbbbbL, 0x16161616L,
]
Td0 = [
0x51f4a750L, 0x7e416553L, 0x1a17a4c3L, 0x3a275e96L,
0x3bab6bcbL, 0x1f9d45f1L, 0xacfa58abL, 0x4be30393L,
0x2030fa55L, 0xad766df6L, 0x88cc7691L, 0xf5024c25L,
0x4fe5d7fcL, 0xc52acbd7L, 0x26354480L, 0xb562a38fL,
0xdeb15a49L, 0x25ba1b67L, 0x45ea0e98L, 0x5dfec0e1L,
0xc32f7502L, 0x814cf012L, 0x8d4697a3L, 0x6bd3f9c6L,
0x038f5fe7L, 0x15929c95L, 0xbf6d7aebL, 0x955259daL,
0xd4be832dL, 0x587421d3L, 0x49e06929L, 0x8ec9c844L,
0x75c2896aL, 0xf48e7978L, 0x99583e6bL, 0x27b971ddL,
0xbee14fb6L, 0xf088ad17L, 0xc920ac66L, 0x7dce3ab4L,
0x63df4a18L, 0xe51a3182L, 0x97513360L, 0x62537f45L,
0xb16477e0L, 0xbb6bae84L, 0xfe81a01cL, 0xf9082b94L,
0x70486858L, 0x8f45fd19L, 0x94de6c87L, 0x527bf8b7L,
0xab73d323L, 0x724b02e2L, 0xe31f8f57L, 0x6655ab2aL,
0xb2eb2807L, 0x2fb5c203L, 0x86c57b9aL, 0xd33708a5L,
0x302887f2L, 0x23bfa5b2L, 0x02036abaL, 0xed16825cL,
0x8acf1c2bL, 0xa779b492L, 0xf307f2f0L, 0x4e69e2a1L,
0x65daf4cdL, 0x0605bed5L, 0xd134621fL, 0xc4a6fe8aL,
0x342e539dL, 0xa2f355a0L, 0x058ae132L, 0xa4f6eb75L,
0x0b83ec39L, 0x4060efaaL, 0x5e719f06L, 0xbd6e1051L,
0x3e218af9L, 0x96dd063dL, 0xdd3e05aeL, 0x4de6bd46L,
0x91548db5L, 0x71c45d05L, 0x0406d46fL, 0x605015ffL,
0x1998fb24L, 0xd6bde997L, 0x894043ccL, 0x67d99e77L,
0xb0e842bdL, 0x07898b88L, 0xe7195b38L, 0x79c8eedbL,
0xa17c0a47L, 0x7c420fe9L, 0xf8841ec9L, 0x00000000L,
0x09808683L, 0x322bed48L, 0x1e1170acL, 0x6c5a724eL,
0xfd0efffbL, 0x0f853856L, 0x3daed51eL, 0x362d3927L,
0x0a0fd964L, 0x685ca621L, 0x9b5b54d1L, 0x24362e3aL,
0x0c0a67b1L, 0x9357e70fL, 0xb4ee96d2L, 0x1b9b919eL,
0x80c0c54fL, 0x61dc20a2L, 0x5a774b69L, 0x1c121a16L,
0xe293ba0aL, 0xc0a02ae5L, 0x3c22e043L, 0x121b171dL,
0x0e090d0bL, 0xf28bc7adL, 0x2db6a8b9L, 0x141ea9c8L,
0x57f11985L, 0xaf75074cL, 0xee99ddbbL, 0xa37f60fdL,
0xf701269fL, 0x5c72f5bcL, 0x44663bc5L, 0x5bfb7e34L,
0x8b432976L, 0xcb23c6dcL, 0xb6edfc68L, 0xb8e4f163L,
0xd731dccaL, 0x42638510L, 0x13972240L, 0x84c61120L,
0x854a247dL, 0xd2bb3df8L, 0xaef93211L, 0xc729a16dL,
0x1d9e2f4bL, 0xdcb230f3L, 0x0d8652ecL, 0x77c1e3d0L,
0x2bb3166cL, 0xa970b999L, 0x119448faL, 0x47e96422L,
0xa8fc8cc4L, 0xa0f03f1aL, 0x567d2cd8L, 0x223390efL,
0x87494ec7L, 0xd938d1c1L, 0x8ccaa2feL, 0x98d40b36L,
0xa6f581cfL, 0xa57ade28L, 0xdab78e26L, 0x3fadbfa4L,
0x2c3a9de4L, 0x5078920dL, 0x6a5fcc9bL, 0x547e4662L,
0xf68d13c2L, 0x90d8b8e8L, 0x2e39f75eL, 0x82c3aff5L,
0x9f5d80beL, 0x69d0937cL, 0x6fd52da9L, 0xcf2512b3L,
0xc8ac993bL, 0x10187da7L, 0xe89c636eL, 0xdb3bbb7bL,
0xcd267809L, 0x6e5918f4L, 0xec9ab701L, 0x834f9aa8L,
0xe6956e65L, 0xaaffe67eL, 0x21bccf08L, 0xef15e8e6L,
0xbae79bd9L, 0x4a6f36ceL, 0xea9f09d4L, 0x29b07cd6L,
0x31a4b2afL, 0x2a3f2331L, 0xc6a59430L, 0x35a266c0L,
0x744ebc37L, 0xfc82caa6L, 0xe090d0b0L, 0x33a7d815L,
0xf104984aL, 0x41ecdaf7L, 0x7fcd500eL, 0x1791f62fL,
0x764dd68dL, 0x43efb04dL, 0xccaa4d54L, 0xe49604dfL,
0x9ed1b5e3L, 0x4c6a881bL, 0xc12c1fb8L, 0x4665517fL,
0x9d5eea04L, 0x018c355dL, 0xfa877473L, 0xfb0b412eL,
0xb3671d5aL, 0x92dbd252L, 0xe9105633L, 0x6dd64713L,
0x9ad7618cL, 0x37a10c7aL, 0x59f8148eL, 0xeb133c89L,
0xcea927eeL, 0xb761c935L, 0xe11ce5edL, 0x7a47b13cL,
0x9cd2df59L, 0x55f2733fL, 0x1814ce79L, 0x73c737bfL,
0x53f7cdeaL, 0x5ffdaa5bL, 0xdf3d6f14L, 0x7844db86L,
0xcaaff381L, 0xb968c43eL, 0x3824342cL, 0xc2a3405fL,
0x161dc372L, 0xbce2250cL, 0x283c498bL, 0xff0d9541L,
0x39a80171L, 0x080cb3deL, 0xd8b4e49cL, 0x6456c190L,
0x7bcb8461L, 0xd532b670L, 0x486c5c74L, 0xd0b85742L,
]
Td1 = [
0x5051f4a7L, 0x537e4165L, 0xc31a17a4L, 0x963a275eL,
0xcb3bab6bL, 0xf11f9d45L, 0xabacfa58L, 0x934be303L,
0x552030faL, 0xf6ad766dL, 0x9188cc76L, 0x25f5024cL,
0xfc4fe5d7L, 0xd7c52acbL, 0x80263544L, 0x8fb562a3L,
0x49deb15aL, 0x6725ba1bL, 0x9845ea0eL, 0xe15dfec0L,
0x02c32f75L, 0x12814cf0L, 0xa38d4697L, 0xc66bd3f9L,
0xe7038f5fL, 0x9515929cL, 0xebbf6d7aL, 0xda955259L,
0x2dd4be83L, 0xd3587421L, 0x2949e069L, 0x448ec9c8L,
0x6a75c289L, 0x78f48e79L, 0x6b99583eL, 0xdd27b971L,
0xb6bee14fL, 0x17f088adL, 0x66c920acL, 0xb47dce3aL,
0x1863df4aL, 0x82e51a31L, 0x60975133L, 0x4562537fL,
0xe0b16477L, 0x84bb6baeL, 0x1cfe81a0L, 0x94f9082bL,
0x58704868L, 0x198f45fdL, 0x8794de6cL, 0xb7527bf8L,
0x23ab73d3L, 0xe2724b02L, 0x57e31f8fL, 0x2a6655abL,
0x07b2eb28L, 0x032fb5c2L, 0x9a86c57bL, 0xa5d33708L,
0xf2302887L, 0xb223bfa5L, 0xba02036aL, 0x5ced1682L,
0x2b8acf1cL, 0x92a779b4L, 0xf0f307f2L, 0xa14e69e2L,
0xcd65daf4L, 0xd50605beL, 0x1fd13462L, 0x8ac4a6feL,
0x9d342e53L, 0xa0a2f355L, 0x32058ae1L, 0x75a4f6ebL,
0x390b83ecL, 0xaa4060efL, 0x065e719fL, 0x51bd6e10L,
0xf93e218aL, 0x3d96dd06L, 0xaedd3e05L, 0x464de6bdL,
0xb591548dL, 0x0571c45dL, 0x6f0406d4L, 0xff605015L,
0x241998fbL, 0x97d6bde9L, 0xcc894043L, 0x7767d99eL,
0xbdb0e842L, 0x8807898bL, 0x38e7195bL, 0xdb79c8eeL,
0x47a17c0aL, 0xe97c420fL, 0xc9f8841eL, 0x00000000L,
0x83098086L, 0x48322bedL, 0xac1e1170L, 0x4e6c5a72L,
0xfbfd0effL, 0x560f8538L, 0x1e3daed5L, 0x27362d39L,
0x640a0fd9L, 0x21685ca6L, 0xd19b5b54L, 0x3a24362eL,
0xb10c0a67L, 0x0f9357e7L, 0xd2b4ee96L, 0x9e1b9b91L,
0x4f80c0c5L, 0xa261dc20L, 0x695a774bL, 0x161c121aL,
0x0ae293baL, 0xe5c0a02aL, 0x433c22e0L, 0x1d121b17L,
0x0b0e090dL, 0xadf28bc7L, 0xb92db6a8L, 0xc8141ea9L,
0x8557f119L, 0x4caf7507L, 0xbbee99ddL, 0xfda37f60L,
0x9ff70126L, 0xbc5c72f5L, 0xc544663bL, 0x345bfb7eL,
0x768b4329L, 0xdccb23c6L, 0x68b6edfcL, 0x63b8e4f1L,
0xcad731dcL, 0x10426385L, 0x40139722L, 0x2084c611L,
0x7d854a24L, 0xf8d2bb3dL, 0x11aef932L, 0x6dc729a1L,
0x4b1d9e2fL, 0xf3dcb230L, 0xec0d8652L, 0xd077c1e3L,
0x6c2bb316L, 0x99a970b9L, 0xfa119448L, 0x2247e964L,
0xc4a8fc8cL, 0x1aa0f03fL, 0xd8567d2cL, 0xef223390L,
0xc787494eL, 0xc1d938d1L, 0xfe8ccaa2L, 0x3698d40bL,
0xcfa6f581L, 0x28a57adeL, 0x26dab78eL, 0xa43fadbfL,
0xe42c3a9dL, 0x0d507892L, 0x9b6a5fccL, 0x62547e46L,
0xc2f68d13L, 0xe890d8b8L, 0x5e2e39f7L, 0xf582c3afL,
0xbe9f5d80L, 0x7c69d093L, 0xa96fd52dL, 0xb3cf2512L,
0x3bc8ac99L, 0xa710187dL, 0x6ee89c63L, 0x7bdb3bbbL,
0x09cd2678L, 0xf46e5918L, 0x01ec9ab7L, 0xa8834f9aL,
0x65e6956eL, 0x7eaaffe6L, 0x0821bccfL, 0xe6ef15e8L,
0xd9bae79bL, 0xce4a6f36L, 0xd4ea9f09L, 0xd629b07cL,
0xaf31a4b2L, 0x312a3f23L, 0x30c6a594L, 0xc035a266L,
0x37744ebcL, 0xa6fc82caL, 0xb0e090d0L, 0x1533a7d8L,
0x4af10498L, 0xf741ecdaL, 0x0e7fcd50L, 0x2f1791f6L,
0x8d764dd6L, 0x4d43efb0L, 0x54ccaa4dL, 0xdfe49604L,
0xe39ed1b5L, 0x1b4c6a88L, 0xb8c12c1fL, 0x7f466551L,
0x049d5eeaL, 0x5d018c35L, 0x73fa8774L, 0x2efb0b41L,
0x5ab3671dL, 0x5292dbd2L, 0x33e91056L, 0x136dd647L,
0x8c9ad761L, 0x7a37a10cL, 0x8e59f814L, 0x89eb133cL,
0xeecea927L, 0x35b761c9L, 0xede11ce5L, 0x3c7a47b1L,
0x599cd2dfL, 0x3f55f273L, 0x791814ceL, 0xbf73c737L,
0xea53f7cdL, 0x5b5ffdaaL, 0x14df3d6fL, 0x867844dbL,
0x81caaff3L, 0x3eb968c4L, 0x2c382434L, 0x5fc2a340L,
0x72161dc3L, 0x0cbce225L, 0x8b283c49L, 0x41ff0d95L,
0x7139a801L, 0xde080cb3L, 0x9cd8b4e4L, 0x906456c1L,
0x617bcb84L, 0x70d532b6L, 0x74486c5cL, 0x42d0b857L,
]
Td2 = [
0xa75051f4L, 0x65537e41L, 0xa4c31a17L, 0x5e963a27L,
0x6bcb3babL, 0x45f11f9dL, 0x58abacfaL, 0x03934be3L,
0xfa552030L, 0x6df6ad76L, 0x769188ccL, 0x4c25f502L,
0xd7fc4fe5L, 0xcbd7c52aL, 0x44802635L, 0xa38fb562L,
0x5a49deb1L, 0x1b6725baL, 0x0e9845eaL, 0xc0e15dfeL,
0x7502c32fL, 0xf012814cL, 0x97a38d46L, 0xf9c66bd3L,
0x5fe7038fL, 0x9c951592L, 0x7aebbf6dL, 0x59da9552L,
0x832dd4beL, 0x21d35874L, 0x692949e0L, 0xc8448ec9L,
0x896a75c2L, 0x7978f48eL, 0x3e6b9958L, 0x71dd27b9L,
0x4fb6bee1L, 0xad17f088L, 0xac66c920L, 0x3ab47dceL,
0x4a1863dfL, 0x3182e51aL, 0x33609751L, 0x7f456253L,
0x77e0b164L, 0xae84bb6bL, 0xa01cfe81L, 0x2b94f908L,
0x68587048L, 0xfd198f45L, 0x6c8794deL, 0xf8b7527bL,
0xd323ab73L, 0x02e2724bL, 0x8f57e31fL, 0xab2a6655L,
0x2807b2ebL, 0xc2032fb5L, 0x7b9a86c5L, 0x08a5d337L,
0x87f23028L, 0xa5b223bfL, 0x6aba0203L, 0x825ced16L,
0x1c2b8acfL, 0xb492a779L, 0xf2f0f307L, 0xe2a14e69L,
0xf4cd65daL, 0xbed50605L, 0x621fd134L, 0xfe8ac4a6L,
0x539d342eL, 0x55a0a2f3L, 0xe132058aL, 0xeb75a4f6L,
0xec390b83L, 0xefaa4060L, 0x9f065e71L, 0x1051bd6eL,
0x8af93e21L, 0x063d96ddL, 0x05aedd3eL, 0xbd464de6L,
0x8db59154L, 0x5d0571c4L, 0xd46f0406L, 0x15ff6050L,
0xfb241998L, 0xe997d6bdL, 0x43cc8940L, 0x9e7767d9L,
0x42bdb0e8L, 0x8b880789L, 0x5b38e719L, 0xeedb79c8L,
0x0a47a17cL, 0x0fe97c42L, 0x1ec9f884L, 0x00000000L,
0x86830980L, 0xed48322bL, 0x70ac1e11L, 0x724e6c5aL,
0xfffbfd0eL, 0x38560f85L, 0xd51e3daeL, 0x3927362dL,
0xd9640a0fL, 0xa621685cL, 0x54d19b5bL, 0x2e3a2436L,
0x67b10c0aL, 0xe70f9357L, 0x96d2b4eeL, 0x919e1b9bL,
0xc54f80c0L, 0x20a261dcL, 0x4b695a77L, 0x1a161c12L,
0xba0ae293L, 0x2ae5c0a0L, 0xe0433c22L, 0x171d121bL,
0x0d0b0e09L, 0xc7adf28bL, 0xa8b92db6L, 0xa9c8141eL,
0x198557f1L, 0x074caf75L, 0xddbbee99L, 0x60fda37fL,
0x269ff701L, 0xf5bc5c72L, 0x3bc54466L, 0x7e345bfbL,
0x29768b43L, 0xc6dccb23L, 0xfc68b6edL, 0xf163b8e4L,
0xdccad731L, 0x85104263L, 0x22401397L, 0x112084c6L,
0x247d854aL, 0x3df8d2bbL, 0x3211aef9L, 0xa16dc729L,
0x2f4b1d9eL, 0x30f3dcb2L, 0x52ec0d86L, 0xe3d077c1L,
0x166c2bb3L, 0xb999a970L, 0x48fa1194L, 0x642247e9L,
0x8cc4a8fcL, 0x3f1aa0f0L, 0x2cd8567dL, 0x90ef2233L,
0x4ec78749L, 0xd1c1d938L, 0xa2fe8ccaL, 0x0b3698d4L,
0x81cfa6f5L, 0xde28a57aL, 0x8e26dab7L, 0xbfa43fadL,
0x9de42c3aL, 0x920d5078L, 0xcc9b6a5fL, 0x4662547eL,
0x13c2f68dL, 0xb8e890d8L, 0xf75e2e39L, 0xaff582c3L,
0x80be9f5dL, 0x937c69d0L, 0x2da96fd5L, 0x12b3cf25L,
0x993bc8acL, 0x7da71018L, 0x636ee89cL, 0xbb7bdb3bL,
0x7809cd26L, 0x18f46e59L, 0xb701ec9aL, 0x9aa8834fL,
0x6e65e695L, 0xe67eaaffL, 0xcf0821bcL, 0xe8e6ef15L,
0x9bd9bae7L, 0x36ce4a6fL, 0x09d4ea9fL, 0x7cd629b0L,
0xb2af31a4L, 0x23312a3fL, 0x9430c6a5L, 0x66c035a2L,
0xbc37744eL, 0xcaa6fc82L, 0xd0b0e090L, 0xd81533a7L,
0x984af104L, 0xdaf741ecL, 0x500e7fcdL, 0xf62f1791L,
0xd68d764dL, 0xb04d43efL, 0x4d54ccaaL, 0x04dfe496L,
0xb5e39ed1L, 0x881b4c6aL, 0x1fb8c12cL, 0x517f4665L,
0xea049d5eL, 0x355d018cL, 0x7473fa87L, 0x412efb0bL,
0x1d5ab367L, 0xd25292dbL, 0x5633e910L, 0x47136dd6L,
0x618c9ad7L, 0x0c7a37a1L, 0x148e59f8L, 0x3c89eb13L,
0x27eecea9L, 0xc935b761L, 0xe5ede11cL, 0xb13c7a47L,
0xdf599cd2L, 0x733f55f2L, 0xce791814L, 0x37bf73c7L,
0xcdea53f7L, 0xaa5b5ffdL, 0x6f14df3dL, 0xdb867844L,
0xf381caafL, 0xc43eb968L, 0x342c3824L, 0x405fc2a3L,
0xc372161dL, 0x250cbce2L, 0x498b283cL, 0x9541ff0dL,
0x017139a8L, 0xb3de080cL, 0xe49cd8b4L, 0xc1906456L,
0x84617bcbL, 0xb670d532L, 0x5c74486cL, 0x5742d0b8L,
]
Td3 = [
0xf4a75051L, 0x4165537eL, 0x17a4c31aL, 0x275e963aL,
0xab6bcb3bL, 0x9d45f11fL, 0xfa58abacL, 0xe303934bL,
0x30fa5520L, 0x766df6adL, 0xcc769188L, 0x024c25f5L,
0xe5d7fc4fL, 0x2acbd7c5L, 0x35448026L, 0x62a38fb5L,
0xb15a49deL, 0xba1b6725L, 0xea0e9845L, 0xfec0e15dL,
0x2f7502c3L, 0x4cf01281L, 0x4697a38dL, 0xd3f9c66bL,
0x8f5fe703L, 0x929c9515L, 0x6d7aebbfL, 0x5259da95L,
0xbe832dd4L, 0x7421d358L, 0xe0692949L, 0xc9c8448eL,
0xc2896a75L, 0x8e7978f4L, 0x583e6b99L, 0xb971dd27L,
0xe14fb6beL, 0x88ad17f0L, 0x20ac66c9L, 0xce3ab47dL,
0xdf4a1863L, 0x1a3182e5L, 0x51336097L, 0x537f4562L,
0x6477e0b1L, 0x6bae84bbL, 0x81a01cfeL, 0x082b94f9L,
0x48685870L, 0x45fd198fL, 0xde6c8794L, 0x7bf8b752L,
0x73d323abL, 0x4b02e272L, 0x1f8f57e3L, 0x55ab2a66L,
0xeb2807b2L, 0xb5c2032fL, 0xc57b9a86L, 0x3708a5d3L,
0x2887f230L, 0xbfa5b223L, 0x036aba02L, 0x16825cedL,
0xcf1c2b8aL, 0x79b492a7L, 0x07f2f0f3L, 0x69e2a14eL,
0xdaf4cd65L, 0x05bed506L, 0x34621fd1L, 0xa6fe8ac4L,
0x2e539d34L, 0xf355a0a2L, 0x8ae13205L, 0xf6eb75a4L,
0x83ec390bL, 0x60efaa40L, 0x719f065eL, 0x6e1051bdL,
0x218af93eL, 0xdd063d96L, 0x3e05aeddL, 0xe6bd464dL,
0x548db591L, 0xc45d0571L, 0x06d46f04L, 0x5015ff60L,
0x98fb2419L, 0xbde997d6L, 0x4043cc89L, 0xd99e7767L,
0xe842bdb0L, 0x898b8807L, 0x195b38e7L, 0xc8eedb79L,
0x7c0a47a1L, 0x420fe97cL, 0x841ec9f8L, 0x00000000L,
0x80868309L, 0x2bed4832L, 0x1170ac1eL, 0x5a724e6cL,
0x0efffbfdL, 0x8538560fL, 0xaed51e3dL, 0x2d392736L,
0x0fd9640aL, 0x5ca62168L, 0x5b54d19bL, 0x362e3a24L,
0x0a67b10cL, 0x57e70f93L, 0xee96d2b4L, 0x9b919e1bL,
0xc0c54f80L, 0xdc20a261L, 0x774b695aL, 0x121a161cL,
0x93ba0ae2L, 0xa02ae5c0L, 0x22e0433cL, 0x1b171d12L,
0x090d0b0eL, 0x8bc7adf2L, 0xb6a8b92dL, 0x1ea9c814L,
0xf1198557L, 0x75074cafL, 0x99ddbbeeL, 0x7f60fda3L,
0x01269ff7L, 0x72f5bc5cL, 0x663bc544L, 0xfb7e345bL,
0x4329768bL, 0x23c6dccbL, 0xedfc68b6L, 0xe4f163b8L,
0x31dccad7L, 0x63851042L, 0x97224013L, 0xc6112084L,
0x4a247d85L, 0xbb3df8d2L, 0xf93211aeL, 0x29a16dc7L,
0x9e2f4b1dL, 0xb230f3dcL, 0x8652ec0dL, 0xc1e3d077L,
0xb3166c2bL, 0x70b999a9L, 0x9448fa11L, 0xe9642247L,
0xfc8cc4a8L, 0xf03f1aa0L, 0x7d2cd856L, 0x3390ef22L,
0x494ec787L, 0x38d1c1d9L, 0xcaa2fe8cL, 0xd40b3698L,
0xf581cfa6L, 0x7ade28a5L, 0xb78e26daL, 0xadbfa43fL,
0x3a9de42cL, 0x78920d50L, 0x5fcc9b6aL, 0x7e466254L,
0x8d13c2f6L, 0xd8b8e890L, 0x39f75e2eL, 0xc3aff582L,
0x5d80be9fL, 0xd0937c69L, 0xd52da96fL, 0x2512b3cfL,
0xac993bc8L, 0x187da710L, 0x9c636ee8L, 0x3bbb7bdbL,
0x267809cdL, 0x5918f46eL, 0x9ab701ecL, 0x4f9aa883L,
0x956e65e6L, 0xffe67eaaL, 0xbccf0821L, 0x15e8e6efL,
0xe79bd9baL, 0x6f36ce4aL, 0x9f09d4eaL, 0xb07cd629L,
0xa4b2af31L, 0x3f23312aL, 0xa59430c6L, 0xa266c035L,
0x4ebc3774L, 0x82caa6fcL, 0x90d0b0e0L, 0xa7d81533L,
0x04984af1L, 0xecdaf741L, 0xcd500e7fL, 0x91f62f17L,
0x4dd68d76L, 0xefb04d43L, 0xaa4d54ccL, 0x9604dfe4L,
0xd1b5e39eL, 0x6a881b4cL, 0x2c1fb8c1L, 0x65517f46L,
0x5eea049dL, 0x8c355d01L, 0x877473faL, 0x0b412efbL,
0x671d5ab3L, 0xdbd25292L, 0x105633e9L, 0xd647136dL,
0xd7618c9aL, 0xa10c7a37L, 0xf8148e59L, 0x133c89ebL,
0xa927eeceL, 0x61c935b7L, 0x1ce5ede1L, 0x47b13c7aL,
0xd2df599cL, 0xf2733f55L, 0x14ce7918L, 0xc737bf73L,
0xf7cdea53L, 0xfdaa5b5fL, 0x3d6f14dfL, 0x44db8678L,
0xaff381caL, 0x68c43eb9L, 0x24342c38L, 0xa3405fc2L,
0x1dc37216L, 0xe2250cbcL, 0x3c498b28L, 0x0d9541ffL,
0xa8017139L, 0x0cb3de08L, 0xb4e49cd8L, 0x56c19064L,
0xcb84617bL, 0x32b670d5L, 0x6c5c7448L, 0xb85742d0L,
]
Td4 = [
0x52525252L, 0x09090909L, 0x6a6a6a6aL, 0xd5d5d5d5L,
0x30303030L, 0x36363636L, 0xa5a5a5a5L, 0x38383838L,
0xbfbfbfbfL, 0x40404040L, 0xa3a3a3a3L, 0x9e9e9e9eL,
0x81818181L, 0xf3f3f3f3L, 0xd7d7d7d7L, 0xfbfbfbfbL,
0x7c7c7c7cL, 0xe3e3e3e3L, 0x39393939L, 0x82828282L,
0x9b9b9b9bL, 0x2f2f2f2fL, 0xffffffffL, 0x87878787L,
0x34343434L, 0x8e8e8e8eL, 0x43434343L, 0x44444444L,
0xc4c4c4c4L, 0xdedededeL, 0xe9e9e9e9L, 0xcbcbcbcbL,
0x54545454L, 0x7b7b7b7bL, 0x94949494L, 0x32323232L,
0xa6a6a6a6L, 0xc2c2c2c2L, 0x23232323L, 0x3d3d3d3dL,
0xeeeeeeeeL, 0x4c4c4c4cL, 0x95959595L, 0x0b0b0b0bL,
0x42424242L, 0xfafafafaL, 0xc3c3c3c3L, 0x4e4e4e4eL,
0x08080808L, 0x2e2e2e2eL, 0xa1a1a1a1L, 0x66666666L,
0x28282828L, 0xd9d9d9d9L, 0x24242424L, 0xb2b2b2b2L,
0x76767676L, 0x5b5b5b5bL, 0xa2a2a2a2L, 0x49494949L,
0x6d6d6d6dL, 0x8b8b8b8bL, 0xd1d1d1d1L, 0x25252525L,
0x72727272L, 0xf8f8f8f8L, 0xf6f6f6f6L, 0x64646464L,
0x86868686L, 0x68686868L, 0x98989898L, 0x16161616L,
0xd4d4d4d4L, 0xa4a4a4a4L, 0x5c5c5c5cL, 0xccccccccL,
0x5d5d5d5dL, 0x65656565L, 0xb6b6b6b6L, 0x92929292L,
0x6c6c6c6cL, 0x70707070L, 0x48484848L, 0x50505050L,
0xfdfdfdfdL, 0xededededL, 0xb9b9b9b9L, 0xdadadadaL,
0x5e5e5e5eL, 0x15151515L, 0x46464646L, 0x57575757L,
0xa7a7a7a7L, 0x8d8d8d8dL, 0x9d9d9d9dL, 0x84848484L,
0x90909090L, 0xd8d8d8d8L, 0xababababL, 0x00000000L,
0x8c8c8c8cL, 0xbcbcbcbcL, 0xd3d3d3d3L, 0x0a0a0a0aL,
0xf7f7f7f7L, 0xe4e4e4e4L, 0x58585858L, 0x05050505L,
0xb8b8b8b8L, 0xb3b3b3b3L, 0x45454545L, 0x06060606L,
0xd0d0d0d0L, 0x2c2c2c2cL, 0x1e1e1e1eL, 0x8f8f8f8fL,
0xcacacacaL, 0x3f3f3f3fL, 0x0f0f0f0fL, 0x02020202L,
0xc1c1c1c1L, 0xafafafafL, 0xbdbdbdbdL, 0x03030303L,
0x01010101L, 0x13131313L, 0x8a8a8a8aL, 0x6b6b6b6bL,
0x3a3a3a3aL, 0x91919191L, 0x11111111L, 0x41414141L,
0x4f4f4f4fL, 0x67676767L, 0xdcdcdcdcL, 0xeaeaeaeaL,
0x97979797L, 0xf2f2f2f2L, 0xcfcfcfcfL, 0xcecececeL,
0xf0f0f0f0L, 0xb4b4b4b4L, 0xe6e6e6e6L, 0x73737373L,
0x96969696L, 0xacacacacL, 0x74747474L, 0x22222222L,
0xe7e7e7e7L, 0xadadadadL, 0x35353535L, 0x85858585L,
0xe2e2e2e2L, 0xf9f9f9f9L, 0x37373737L, 0xe8e8e8e8L,
0x1c1c1c1cL, 0x75757575L, 0xdfdfdfdfL, 0x6e6e6e6eL,
0x47474747L, 0xf1f1f1f1L, 0x1a1a1a1aL, 0x71717171L,
0x1d1d1d1dL, 0x29292929L, 0xc5c5c5c5L, 0x89898989L,
0x6f6f6f6fL, 0xb7b7b7b7L, 0x62626262L, 0x0e0e0e0eL,
0xaaaaaaaaL, 0x18181818L, 0xbebebebeL, 0x1b1b1b1bL,
0xfcfcfcfcL, 0x56565656L, 0x3e3e3e3eL, 0x4b4b4b4bL,
0xc6c6c6c6L, 0xd2d2d2d2L, 0x79797979L, 0x20202020L,
0x9a9a9a9aL, 0xdbdbdbdbL, 0xc0c0c0c0L, 0xfefefefeL,
0x78787878L, 0xcdcdcdcdL, 0x5a5a5a5aL, 0xf4f4f4f4L,
0x1f1f1f1fL, 0xddddddddL, 0xa8a8a8a8L, 0x33333333L,
0x88888888L, 0x07070707L, 0xc7c7c7c7L, 0x31313131L,
0xb1b1b1b1L, 0x12121212L, 0x10101010L, 0x59595959L,
0x27272727L, 0x80808080L, 0xececececL, 0x5f5f5f5fL,
0x60606060L, 0x51515151L, 0x7f7f7f7fL, 0xa9a9a9a9L,
0x19191919L, 0xb5b5b5b5L, 0x4a4a4a4aL, 0x0d0d0d0dL,
0x2d2d2d2dL, 0xe5e5e5e5L, 0x7a7a7a7aL, 0x9f9f9f9fL,
0x93939393L, 0xc9c9c9c9L, 0x9c9c9c9cL, 0xefefefefL,
0xa0a0a0a0L, 0xe0e0e0e0L, 0x3b3b3b3bL, 0x4d4d4d4dL,
0xaeaeaeaeL, 0x2a2a2a2aL, 0xf5f5f5f5L, 0xb0b0b0b0L,
0xc8c8c8c8L, 0xebebebebL, 0xbbbbbbbbL, 0x3c3c3c3cL,
0x83838383L, 0x53535353L, 0x99999999L, 0x61616161L,
0x17171717L, 0x2b2b2b2bL, 0x04040404L, 0x7e7e7e7eL,
0xbabababaL, 0x77777777L, 0xd6d6d6d6L, 0x26262626L,
0xe1e1e1e1L, 0x69696969L, 0x14141414L, 0x63636363L,
0x55555555L, 0x21212121L, 0x0c0c0c0cL, 0x7d7d7d7dL,
]
rcon = [
0x01000000, 0x02000000, 0x04000000, 0x08000000,
0x10000000, 0x20000000, 0x40000000, 0x80000000,
0x1B000000, 0x36000000,
# 128-bit blocks, Rijndael never uses more than 10 rcon values
]
if len(struct.pack('L',0)) == 4:
# 32bit
def GETU32(x): return struct.unpack('>L', x)[0]
def PUTU32(x): return struct.pack('>L', x)
else:
# 64bit
def GETU32(x): return struct.unpack('>I', x)[0]
def PUTU32(x): return struct.pack('>I', x)
# Expand the cipher key into the encryption key schedule.
#
# @return the number of rounds for the given cipher key size.
def rijndaelSetupEncrypt(key, keybits):
i = p = 0
rk = [0]*RKLENGTH(keybits)
rk[0] = GETU32(key[0:4])
rk[1] = GETU32(key[4:8])
rk[2] = GETU32(key[8:12])
rk[3] = GETU32(key[12:16])
if keybits == 128:
while 1:
temp = rk[p+3]
rk[p+4] = (rk[p+0] ^
(Te4[(temp >> 16) & 0xff] & 0xff000000) ^
(Te4[(temp >> 8) & 0xff] & 0x00ff0000) ^
(Te4[(temp ) & 0xff] & 0x0000ff00) ^
(Te4[(temp >> 24) ] & 0x000000ff) ^
rcon[i])
rk[p+5] = rk[p+1] ^ rk[p+4]
rk[p+6] = rk[p+2] ^ rk[p+5]
rk[p+7] = rk[p+3] ^ rk[p+6]
i += 1
if i == 10: return (rk, 10)
p += 4
rk[4] = GETU32(key[16:20])
rk[5] = GETU32(key[20:24])
if keybits == 192:
while 1:
temp = rk[p+5]
rk[p+6] = (rk[p+0] ^
(Te4[(temp >> 16) & 0xff] & 0xff000000) ^
(Te4[(temp >> 8) & 0xff] & 0x00ff0000) ^
(Te4[(temp ) & 0xff] & 0x0000ff00) ^
(Te4[(temp >> 24) ] & 0x000000ff) ^
rcon[i])
rk[p+7] = rk[p+1] ^ rk[p+6]
rk[p+8] = rk[p+2] ^ rk[p+7]
rk[p+9] = rk[p+3] ^ rk[p+8]
i += 1
if i == 8: return (rk, 12)
rk[p+10] = rk[p+4] ^ rk[p+9]
rk[p+11] = rk[p+5] ^ rk[p+10]
p += 6
rk[6] = GETU32(key[24:28])
rk[7] = GETU32(key[28:32])
if keybits == 256:
while 1:
temp = rk[p+7]
rk[p+8] = (rk[p+0] ^
(Te4[(temp >> 16) & 0xff] & 0xff000000) ^
(Te4[(temp >> 8) & 0xff] & 0x00ff0000) ^
(Te4[(temp ) & 0xff] & 0x0000ff00) ^
(Te4[(temp >> 24) ] & 0x000000ff) ^
rcon[i])
rk[p+9] = rk[p+1] ^ rk[p+8]
rk[p+10] = rk[p+2] ^ rk[p+9]
rk[p+11] = rk[p+3] ^ rk[p+10]
i += 1
if i == 7: return (rk, 14)
temp = rk[p+11]
rk[p+12] = (rk[p+4] ^
(Te4[(temp >> 24) ] & 0xff000000) ^
(Te4[(temp >> 16) & 0xff] & 0x00ff0000) ^
(Te4[(temp >> 8) & 0xff] & 0x0000ff00) ^
(Te4[(temp ) & 0xff] & 0x000000ff))
rk[p+13] = rk[p+5] ^ rk[p+12]
rk[p+14] = rk[p+6] ^ rk[p+13]
rk[p+15] = rk[p+7] ^ rk[p+14]
p += 8
raise ValueError(keybits)
# Expand the cipher key into the decryption key schedule.
#
# @return the number of rounds for the given cipher key size.
def rijndaelSetupDecrypt(key, keybits):
# expand the cipher key:
(rk, nrounds) = rijndaelSetupEncrypt(key, keybits)
# invert the order of the round keys:
i = 0
j = 4*nrounds
while i < j:
temp = rk[i ]; rk[i ] = rk[j ]; rk[j ] = temp
temp = rk[i + 1]; rk[i + 1] = rk[j + 1]; rk[j + 1] = temp
temp = rk[i + 2]; rk[i + 2] = rk[j + 2]; rk[j + 2] = temp
temp = rk[i + 3]; rk[i + 3] = rk[j + 3]; rk[j + 3] = temp
i += 4
j -= 4
# apply the inverse MixColumn transform to all round keys but the first and the last:
p = 0
for i in xrange(1, nrounds):
p += 4
rk[p+0] = (
Td0[Te4[(rk[p+0] >> 24) ] & 0xff] ^
Td1[Te4[(rk[p+0] >> 16) & 0xff] & 0xff] ^
Td2[Te4[(rk[p+0] >> 8) & 0xff] & 0xff] ^
Td3[Te4[(rk[p+0] ) & 0xff] & 0xff])
rk[p+1] = (
Td0[Te4[(rk[p+1] >> 24) ] & 0xff] ^
Td1[Te4[(rk[p+1] >> 16) & 0xff] & 0xff] ^
Td2[Te4[(rk[p+1] >> 8) & 0xff] & 0xff] ^
Td3[Te4[(rk[p+1] ) & 0xff] & 0xff])
rk[p+2] = (
Td0[Te4[(rk[p+2] >> 24) ] & 0xff] ^
Td1[Te4[(rk[p+2] >> 16) & 0xff] & 0xff] ^
Td2[Te4[(rk[p+2] >> 8) & 0xff] & 0xff] ^
Td3[Te4[(rk[p+2] ) & 0xff] & 0xff])
rk[p+3] = (
Td0[Te4[(rk[p+3] >> 24) ] & 0xff] ^
Td1[Te4[(rk[p+3] >> 16) & 0xff] & 0xff] ^
Td2[Te4[(rk[p+3] >> 8) & 0xff] & 0xff] ^
Td3[Te4[(rk[p+3] ) & 0xff] & 0xff])
return (rk, nrounds)
def rijndaelEncrypt(rk, nrounds, plaintext):
assert len(plaintext) == 16
# map byte array block to cipher state
# and add initial round key:
s0 = GETU32(plaintext[0:4]) ^ rk[0]
s1 = GETU32(plaintext[4:8]) ^ rk[1]
s2 = GETU32(plaintext[8:12]) ^ rk[2]
s3 = GETU32(plaintext[12:16]) ^ rk[3]
# nrounds - 1 full rounds:
r = nrounds >> 1
p = 0
while 1:
t0 = (
Te0[(s0 >> 24) ] ^
Te1[(s1 >> 16) & 0xff] ^
Te2[(s2 >> 8) & 0xff] ^
Te3[(s3 ) & 0xff] ^
rk[p+4])
t1 = (
Te0[(s1 >> 24) ] ^
Te1[(s2 >> 16) & 0xff] ^
Te2[(s3 >> 8) & 0xff] ^
Te3[(s0 ) & 0xff] ^
rk[p+5])
t2 = (
Te0[(s2 >> 24) ] ^
Te1[(s3 >> 16) & 0xff] ^
Te2[(s0 >> 8) & 0xff] ^
Te3[(s1 ) & 0xff] ^
rk[p+6])
t3 = (
Te0[(s3 >> 24) ] ^
Te1[(s0 >> 16) & 0xff] ^
Te2[(s1 >> 8) & 0xff] ^
Te3[(s2 ) & 0xff] ^
rk[p+7])
p += 8
r -= 1
if r == 0: break
s0 = (
Te0[(t0 >> 24) ] ^
Te1[(t1 >> 16) & 0xff] ^
Te2[(t2 >> 8) & 0xff] ^
Te3[(t3 ) & 0xff] ^
rk[p+0])
s1 = (
Te0[(t1 >> 24) ] ^
Te1[(t2 >> 16) & 0xff] ^
Te2[(t3 >> 8) & 0xff] ^
Te3[(t0 ) & 0xff] ^
rk[p+1])
s2 = (
Te0[(t2 >> 24) ] ^
Te1[(t3 >> 16) & 0xff] ^
Te2[(t0 >> 8) & 0xff] ^
Te3[(t1 ) & 0xff] ^
rk[p+2])
s3 = (
Te0[(t3 >> 24) ] ^
Te1[(t0 >> 16) & 0xff] ^
Te2[(t1 >> 8) & 0xff] ^
Te3[(t2 ) & 0xff] ^
rk[p+3])
ciphertext = ''
# apply last round and
# map cipher state to byte array block:
s0 = (
(Te4[(t0 >> 24) ] & 0xff000000) ^
(Te4[(t1 >> 16) & 0xff] & 0x00ff0000) ^
(Te4[(t2 >> 8) & 0xff] & 0x0000ff00) ^
(Te4[(t3 ) & 0xff] & 0x000000ff) ^
rk[p+0])
ciphertext += PUTU32(s0)
s1 = (
(Te4[(t1 >> 24) ] & 0xff000000) ^
(Te4[(t2 >> 16) & 0xff] & 0x00ff0000) ^
(Te4[(t3 >> 8) & 0xff] & 0x0000ff00) ^
(Te4[(t0 ) & 0xff] & 0x000000ff) ^
rk[p+1])
ciphertext += PUTU32(s1)
s2 = (
(Te4[(t2 >> 24) ] & 0xff000000) ^
(Te4[(t3 >> 16) & 0xff] & 0x00ff0000) ^
(Te4[(t0 >> 8) & 0xff] & 0x0000ff00) ^
(Te4[(t1 ) & 0xff] & 0x000000ff) ^
rk[p+2])
ciphertext += PUTU32(s2)
s3 = (
(Te4[(t3 >> 24) ] & 0xff000000) ^
(Te4[(t0 >> 16) & 0xff] & 0x00ff0000) ^
(Te4[(t1 >> 8) & 0xff] & 0x0000ff00) ^
(Te4[(t2 ) & 0xff] & 0x000000ff) ^
rk[p+3])
ciphertext += PUTU32(s3)
assert len(ciphertext) == 16
return ciphertext
def rijndaelDecrypt(rk, nrounds, ciphertext):
assert len(ciphertext) == 16
# map byte array block to cipher state
# and add initial round key:
s0 = GETU32(ciphertext[0:4]) ^ rk[0]
s1 = GETU32(ciphertext[4:8]) ^ rk[1]
s2 = GETU32(ciphertext[8:12]) ^ rk[2]
s3 = GETU32(ciphertext[12:16]) ^ rk[3]
# nrounds - 1 full rounds:
r = nrounds >> 1
p = 0
while 1:
t0 = (
Td0[(s0 >> 24) ] ^
Td1[(s3 >> 16) & 0xff] ^
Td2[(s2 >> 8) & 0xff] ^
Td3[(s1 ) & 0xff] ^
rk[p+4])
t1 = (
Td0[(s1 >> 24) ] ^
Td1[(s0 >> 16) & 0xff] ^
Td2[(s3 >> 8) & 0xff] ^
Td3[(s2 ) & 0xff] ^
rk[p+5])
t2 = (
Td0[(s2 >> 24) ] ^
Td1[(s1 >> 16) & 0xff] ^
Td2[(s0 >> 8) & 0xff] ^
Td3[(s3 ) & 0xff] ^
rk[p+6])
t3 = (
Td0[(s3 >> 24) ] ^
Td1[(s2 >> 16) & 0xff] ^
Td2[(s1 >> 8) & 0xff] ^
Td3[(s0 ) & 0xff] ^
rk[p+7])
p += 8
r -= 1
if r == 0: break
s0 = (
Td0[(t0 >> 24) ] ^
Td1[(t3 >> 16) & 0xff] ^
Td2[(t2 >> 8) & 0xff] ^
Td3[(t1 ) & 0xff] ^
rk[p+0])
s1 = (
Td0[(t1 >> 24) ] ^
Td1[(t0 >> 16) & 0xff] ^
Td2[(t3 >> 8) & 0xff] ^
Td3[(t2 ) & 0xff] ^
rk[p+1])
s2 = (
Td0[(t2 >> 24) ] ^
Td1[(t1 >> 16) & 0xff] ^
Td2[(t0 >> 8) & 0xff] ^
Td3[(t3 ) & 0xff] ^
rk[p+2])
s3 = (
Td0[(t3 >> 24) ] ^
Td1[(t2 >> 16) & 0xff] ^
Td2[(t1 >> 8) & 0xff] ^
Td3[(t0 ) & 0xff] ^
rk[p+3])
plaintext = ''
# apply last round and
# map cipher state to byte array block:
s0 = (
(Td4[(t0 >> 24) ] & 0xff000000) ^
(Td4[(t3 >> 16) & 0xff] & 0x00ff0000) ^
(Td4[(t2 >> 8) & 0xff] & 0x0000ff00) ^
(Td4[(t1 ) & 0xff] & 0x000000ff) ^
rk[p+0])
plaintext += PUTU32(s0)
s1 = (
(Td4[(t1 >> 24) ] & 0xff000000) ^
(Td4[(t0 >> 16) & 0xff] & 0x00ff0000) ^
(Td4[(t3 >> 8) & 0xff] & 0x0000ff00) ^
(Td4[(t2 ) & 0xff] & 0x000000ff) ^
rk[p+1])
plaintext += PUTU32(s1)
s2 = (
(Td4[(t2 >> 24) ] & 0xff000000) ^
(Td4[(t1 >> 16) & 0xff] & 0x00ff0000) ^
(Td4[(t0 >> 8) & 0xff] & 0x0000ff00) ^
(Td4[(t3 ) & 0xff] & 0x000000ff) ^
rk[p+2])
plaintext += PUTU32(s2)
s3 = (
(Td4[(t3 >> 24) ] & 0xff000000) ^
(Td4[(t2 >> 16) & 0xff] & 0x00ff0000) ^
(Td4[(t1 >> 8) & 0xff] & 0x0000ff00) ^
(Td4[(t0 ) & 0xff] & 0x000000ff) ^
rk[p+3])
plaintext += PUTU32(s3)
assert len(plaintext) == 16
return plaintext
# decrypt(key, fin, fout, keybits=256)
class RijndaelDecryptor(object):
"""
>>> key = '00010203050607080a0b0c0d0f101112'.decode('hex')
>>> ciphertext = 'd8f532538289ef7d06b506a4fd5be9c9'.decode('hex')
>>> RijndaelDecryptor(key, 128).decrypt(ciphertext).encode('hex')
'506812a45f08c889b97f5980038b8359'
"""
def __init__(self, key, keybits=256):
assert len(key) == KEYLENGTH(keybits)
(self.rk, self.nrounds) = rijndaelSetupDecrypt(key, keybits)
assert len(self.rk) == RKLENGTH(keybits)
assert self.nrounds == NROUNDS(keybits)
return
def decrypt(self, ciphertext):
assert len(ciphertext) == 16
return rijndaelDecrypt(self.rk, self.nrounds, ciphertext)
# encrypt(key, fin, fout, keybits=256)
class RijndaelEncryptor(object):
"""
>>> key = '00010203050607080a0b0c0d0f101112'.decode('hex')
>>> plaintext = '506812a45f08c889b97f5980038b8359'.decode('hex')
>>> RijndaelEncryptor(key, 128).encrypt(plaintext).encode('hex')
'd8f532538289ef7d06b506a4fd5be9c9'
"""
def __init__(self, key, keybits=256):
assert len(key) == KEYLENGTH(keybits)
(self.rk, self.nrounds) = rijndaelSetupEncrypt(key, keybits)
assert len(self.rk) == RKLENGTH(keybits)
assert self.nrounds == NROUNDS(keybits)
return
def encrypt(self, plaintext):
assert len(plaintext) == 16
return rijndaelEncrypt(self.rk, self.nrounds, plaintext)
if __name__ == '__main__':
import doctest
doctest.testmod()
| 44.83364
| 89
| 0.691609
|
40e0106fd6a7fde9ccb079368b9e377152f07e30
| 890
|
py
|
Python
|
ArraysAndSorting/SherlockAndArray.py
|
tejasnikumbh/Algorithms
|
2a2983a522be295ce95bd970a0ee8a617866992f
|
[
"BSD-2-Clause"
] | 8
|
2015-04-16T03:43:49.000Z
|
2018-08-14T22:47:03.000Z
|
ArraysAndSorting/SherlockAndArray.py
|
tejasnikumbh/Algorithms
|
2a2983a522be295ce95bd970a0ee8a617866992f
|
[
"BSD-2-Clause"
] | null | null | null |
ArraysAndSorting/SherlockAndArray.py
|
tejasnikumbh/Algorithms
|
2a2983a522be295ce95bd970a0ee8a617866992f
|
[
"BSD-2-Clause"
] | 7
|
2016-03-22T20:29:27.000Z
|
2018-09-29T18:55:47.000Z
|
# Importing library functions
import sys
'''
Function that determines if such an element exists. Note the time complexity
It is O(N). Notice how clever I am.
'''
def doesIndexExist(a):
if (len(a) == 1):
return True;
else:
leftSum = 0
index = 0
rightSum = sum(a[1:])
while(index != len(a) - 1):
if(leftSum == rightSum): return True
else:
leftSum += a[index]
index += 1
rightSum -= a[index]
'''
Main function of the program
'''
if __name__ == "__main__":
t = int(sys.stdin.readline().rstrip())
for i in range(t):
n = int(sys.stdin.readline().rstrip())
a = [int(x) for x in sys.stdin.readline().rstrip().split()]
if(doesIndexExist(a)):
print "YES"
else:
print "NO"
| 25.428571
| 80
| 0.502247
|
f1161e8361383c4e35a2021718a182e118bdefa3
| 7,911
|
py
|
Python
|
regression-tests.py
|
tp7/Sushi
|
908c0ff228734059aebb914a8d10f8e4ce2e868c
|
[
"MIT"
] | 470
|
2015-01-02T21:38:56.000Z
|
2022-03-19T13:10:37.000Z
|
regression-tests.py
|
Kadantte/Sushi
|
908c0ff228734059aebb914a8d10f8e4ce2e868c
|
[
"MIT"
] | 36
|
2015-01-03T05:41:56.000Z
|
2021-04-01T02:11:26.000Z
|
regression-tests.py
|
Kadantte/Sushi
|
908c0ff228734059aebb914a8d10f8e4ce2e868c
|
[
"MIT"
] | 45
|
2015-04-10T12:31:37.000Z
|
2022-01-23T21:16:38.000Z
|
from contextlib import contextmanager
import json
import logging
import os
import gc
import sys
import resource
import re
import subprocess
import argparse
from common import format_time
from demux import Timecodes
from subs import AssScript
from wav import WavStream
root_logger = logging.getLogger('')
def strip_tags(text):
return re.sub(r'{.*?}', " ", text)
@contextmanager
def set_file_logger(path):
handler = logging.FileHandler(path, mode='a')
handler.setLevel(logging.DEBUG)
handler.setFormatter(logging.Formatter('%(message)s'))
root_logger.addHandler(handler)
try:
yield
finally:
root_logger.removeHandler(handler)
def compare_scripts(ideal_path, test_path, timecodes, test_name, expected_errors):
ideal_script = AssScript.from_file(ideal_path)
test_script = AssScript.from_file(test_path)
if len(test_script.events) != len(ideal_script.events):
logging.critical("Script length didn't match: {0} in ideal vs {1} in test. Test {2}".format(
len(ideal_script.events), len(test_script.events), test_name)
)
return False
ideal_script.sort_by_time()
test_script.sort_by_time()
failed = 0
ft = format_time
for idx, (ideal, test) in enumerate(zip(ideal_script.events, test_script.events)):
ideal_start_frame = timecodes.get_frame_number(ideal.start)
ideal_end_frame = timecodes.get_frame_number(ideal.end)
test_start_frame = timecodes.get_frame_number(test.start)
test_end_frame = timecodes.get_frame_number(test.end)
if ideal_start_frame != test_start_frame and ideal_end_frame != test_end_frame:
logging.debug(u'{0}: start and end time failed at "{1}". {2}-{3} vs {4}-{5}'.format(
idx, strip_tags(ideal.text), ft(ideal.start), ft(ideal.end), ft(test.start), ft(test.end))
)
failed += 1
elif ideal_end_frame != test_end_frame:
logging.debug(
u'{0}: end time failed at "{1}". {2} vs {3}'.format(
idx, strip_tags(ideal.text), ft(ideal.end), ft(test.end))
)
failed += 1
elif ideal_start_frame != test_start_frame:
logging.debug(
u'{0}: start time failed at "{1}". {2} vs {3}'.format(
idx, strip_tags(ideal.text), ft(ideal.start), ft(test.start))
)
failed += 1
logging.info('Total lines: {0}, good: {1}, failed: {2}'.format(len(ideal_script.events), len(ideal_script.events)-failed, failed))
if failed > expected_errors:
logging.critical('Got more failed lines than expected ({0} actual vs {1} expected)'.format(failed, expected_errors))
return False
elif failed < expected_errors:
logging.critical('Got less failed lines than expected ({0} actual vs {1} expected)'.format(failed, expected_errors))
return False
else:
logging.critical('Met expectations')
return True
def run_test(base_path, plots_path, test_name, params):
def safe_add_key(args, key, name):
if name in params:
args.extend((key, str(params[name])))
def safe_add_path(args, folder, key, name):
if name in params:
args.extend((key, os.path.join(folder, params[name])))
logging.info('Testing "{0}"'.format(test_name))
folder = os.path.join(base_path, params['folder'])
cmd = ["sushi"]
safe_add_path(cmd, folder, '--src', 'src')
safe_add_path(cmd, folder, '--dst', 'dst')
safe_add_path(cmd, folder, '--src-keyframes', 'src-keyframes')
safe_add_path(cmd, folder, '--dst-keyframes', 'dst-keyframes')
safe_add_path(cmd, folder, '--src-timecodes', 'src-timecodes')
safe_add_path(cmd, folder, '--dst-timecodes', 'dst-timecodes')
safe_add_path(cmd, folder, '--script', 'script')
safe_add_path(cmd, folder, '--chapters', 'chapters')
safe_add_path(cmd, folder, '--src-script', 'src-script')
safe_add_path(cmd, folder, '--dst-script', 'dst-script')
safe_add_key(cmd, '--max-kf-distance', 'max-kf-distance')
safe_add_key(cmd, '--max-ts-distance', 'max-ts-distance')
safe_add_key(cmd, '--max-ts-duration', 'max-ts-duration')
output_path = os.path.join(folder, params['dst']) + '.sushi.test.ass'
cmd.extend(('-o', output_path))
if plots_path:
cmd.extend(('--test-shift-plot', os.path.join(plots_path, '{0}.png'.format(test_name))))
log_path = os.path.join(folder, 'sushi_test.log')
with open(log_path, "w") as log_file:
try:
subprocess.call(cmd, stderr=log_file, stdout=log_file)
except Exception as e:
logging.critical('Sushi failed on test "{0}": {1}'.format(test_name, e.message))
return False
with set_file_logger(log_path):
ideal_path = os.path.join(folder, params['ideal'])
try:
timecodes = Timecodes.from_file(os.path.join(folder, params['dst-timecodes']))
except KeyError:
timecodes = Timecodes.cfr(params['fps'])
return compare_scripts(ideal_path, output_path, timecodes, test_name, params['expected_errors'])
def run_wav_test(test_name, file_path, params):
gc.collect(2)
before = resource.getrusage(resource.RUSAGE_SELF)
loaded = WavStream(file_path, params.get('sample_rate', 12000), params.get('sample_type', 'uint8'))
after = resource.getrusage(resource.RUSAGE_SELF)
total_time = (after.ru_stime - before.ru_stime) + (after.ru_utime - before.ru_utime)
ram_difference = (after.ru_maxrss - before.ru_maxrss) / 1024.0 / 1024.0
if 'max_time' in params and total_time > params['max_time']:
logging.critical('Loading "{0}" took too much time: {1} vs {2} seconds'
.format(test_name, total_time, params['max_time']))
return False
if 'max_memory' in params and ram_difference > params['max_memory']:
logging.critical('Loading "{0}" consumed too much RAM: {1} vs {2}'
.format(test_name, ram_difference, params['max_memory']))
return False
return True
def create_arg_parser():
parser = argparse.ArgumentParser(description='Sushi regression testing util')
parser.add_argument('--only', dest="run_only", nargs="*", metavar='<test names>',
help='Test names to run')
parser.add_argument('-c', '--conf', default="tests.json", dest='conf_path', metavar='<filename>',
help='Config file path')
return parser
def run():
root_logger.setLevel(logging.DEBUG)
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(logging.Formatter('%(message)s'))
root_logger.addHandler(console_handler)
args = create_arg_parser().parse_args()
try:
with open(args.conf_path) as file:
config = json.load(file)
except IOError as e:
logging.critical(e)
sys.exit(2)
def should_run(name):
return not args.run_only or name in args.run_only
failed = ran = 0
for test_name, params in config.get('tests', {}).iteritems():
if not should_run(test_name):
continue
if not params.get('disabled', False):
ran += 1
if not run_test(config['basepath'], config['plots'], test_name, params):
failed += 1
logging.info('')
else:
logging.warn('Test "{0}" disabled'.format(test_name))
if should_run("wavs"):
for test_name, params in config.get('wavs', {}).iteritems():
ran += 1
if not run_wav_test(test_name, os.path.join(config['basepath'], params['file']), params):
failed += 1
logging.info('')
logging.info('Ran {0} tests, {1} failed'.format(ran, failed))
if __name__ == '__main__':
run()
| 36.795349
| 134
| 0.638984
|
f6f2ab5494f91d3e590a143979a9085d4663f3fe
| 609
|
py
|
Python
|
stubs.min/Revit/GeometryObjects.py
|
ricardyn/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | 1
|
2021-02-02T13:39:16.000Z
|
2021-02-02T13:39:16.000Z
|
stubs.min/Revit/GeometryObjects.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
stubs.min/Revit/GeometryObjects.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
# module Revit.GeometryObjects calls itself GeometryObjects
# from RevitNodes,Version=1.2.1.3083,Culture=neutral,PublicKeyToken=null
# by generator 1.145
# no doc
# no imports
# no functions
# classes
class GeometryObjectSelector(object):
# no doc
@staticmethod
def ByReferenceStableRepresentation(referenceString):
"""
ByReferenceStableRepresentation(referenceString: str) -> object
Return an AbstractGeometryObject given a string representation of the
geometry's reference.
"""
pass
__all__=[
'ByReferenceStableRepresentation',
]
| 22.555556
| 75
| 0.732348
|
d23a1149adeb50ed91f2d4f0739c37bfd94a625d
| 628
|
py
|
Python
|
nba/apiclient.py
|
rozzac90/new_nba
|
87feec5fc8ff6654fdb65229a047e0ff3023a9ff
|
[
"MIT"
] | 1
|
2017-12-29T05:01:17.000Z
|
2017-12-29T05:01:17.000Z
|
nba/apiclient.py
|
rozzac90/new_nba
|
87feec5fc8ff6654fdb65229a047e0ff3023a9ff
|
[
"MIT"
] | 2
|
2017-10-26T07:47:15.000Z
|
2020-04-18T12:24:36.000Z
|
nba/apiclient.py
|
rozzac90/nba
|
87feec5fc8ff6654fdb65229a047e0ff3023a9ff
|
[
"MIT"
] | null | null | null |
from nba.baseclient import BaseClient
from nba import endpoints
class APIClient(BaseClient):
def __init__(self):
super(APIClient, self).__init__()
self.boxscores = endpoints.Boxscores(self)
self.common = endpoints.Common(self)
self.draft = endpoints.Draft(self)
self.events = endpoints.Events(self)
self.homepage = endpoints.Homepage(self)
self.misc = endpoints.Misc(self)
self.playbyplay = endpoints.PlayByPlay(self)
self.player = endpoints.Player(self)
self.scoreboard = endpoints.Scoreboard(self)
self.team = endpoints.Team(self)
| 33.052632
| 52
| 0.678344
|
ef2135caa31480dfe5044689e8ceb326f078dbc5
| 1,724
|
py
|
Python
|
raster/migrations/0001_initial.py
|
bpneumann/django-raster
|
74daf9d396f2332a2cd83723b7330e6b10d73b1c
|
[
"BSD-3-Clause"
] | null | null | null |
raster/migrations/0001_initial.py
|
bpneumann/django-raster
|
74daf9d396f2332a2cd83723b7330e6b10d73b1c
|
[
"BSD-3-Clause"
] | null | null | null |
raster/migrations/0001_initial.py
|
bpneumann/django-raster
|
74daf9d396f2332a2cd83723b7330e6b10d73b1c
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.gis.db import models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='RasterLayer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, null=True, blank=True)),
('description', models.TextField(null=True, blank=True)),
('datatype', models.CharField(default=b'co', max_length=2, choices=[(b'co', b'Continuous'), (b'ca', b'Categorical'), (b'ma', b'Mask'), (b'ro', b'Rank Ordered')])),
('rasterfile', models.FileField(upload_to=b'rasters')),
('srid', models.CharField(default=b'3086', max_length=10)),
('nodata', models.CharField(default=b'-9999', max_length=100)),
('parse_log', models.TextField(default=b'', null=True, editable=False, blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='RasterTile',
fields=[
('rid', models.AutoField(serialize=False, primary_key=True)),
('rast', models.RasterField(null=True, blank=True)),
('filename', models.TextField(null=True, blank=True)),
('rasterlayer', models.ForeignKey(blank=True, to='raster.RasterLayer', null=True, on_delete=models.CASCADE)),
],
options={
},
bases=(models.Model,),
),
]
| 40.093023
| 179
| 0.560325
|
f97eedd21a4e3b388f6958192979ec69daac9f09
| 175
|
py
|
Python
|
test.py
|
leexinhao/Metallographic-detection-platform-based-on-DL
|
071565559883b7add0015074aff72ee1609a2b06
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
leexinhao/Metallographic-detection-platform-based-on-DL
|
071565559883b7add0015074aff72ee1609a2b06
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
leexinhao/Metallographic-detection-platform-based-on-DL
|
071565559883b7add0015074aff72ee1609a2b06
|
[
"Apache-2.0"
] | null | null | null |
r"""运行入口
"""
import sys
from PyQt5 import QtWidgets
import main_widget
app = QtWidgets.QApplication(sys.argv)
mw = main_widget.MainWidget()
mw.show()
sys.exit(app.exec_())
| 13.461538
| 38
| 0.742857
|
fbddeb377e130ff65041d237e0a8ab85a10ca9b6
| 1,450
|
py
|
Python
|
ProgrammerAlgorithmInterview/Chapter01/01_01_reverse_linked_list_02.py
|
YorkFish/git_study
|
6e023244daaa22e12b24e632e76a13e5066f2947
|
[
"MIT"
] | null | null | null |
ProgrammerAlgorithmInterview/Chapter01/01_01_reverse_linked_list_02.py
|
YorkFish/git_study
|
6e023244daaa22e12b24e632e76a13e5066f2947
|
[
"MIT"
] | null | null | null |
ProgrammerAlgorithmInterview/Chapter01/01_01_reverse_linked_list_02.py
|
YorkFish/git_study
|
6e023244daaa22e12b24e632e76a13e5066f2947
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#coding:utf-8
class LNode(object):
def __init__(self, x):
self.val = x
self.next = None
def recursiveReverse(head):
"""
方法功能:对不带头结点的单链表进行逆序
输入参数:firstRef: 链表头结点
"""
if head == None or head.next == None: # 链表为空或只有一个元素
return head
else:
new_head = recursiveReverse(head.next) # 反转后面的结点
head.next.next = head # 把当前遍历到的结点加到 new_head 这条逆序后的新链表的尾部
head.next = None
return new_head
def reverse(head):
"""
方法功能:对带头结点的单链表进行逆序
输入参数:head: 链表头结点
"""
if head is None:
return
first_node = head.next # 获取链表的第一个结点
new_head = recursiveReverse(first_node) # 对链表进行逆序
head.next = new_head # 将头结点指向逆序后的第一个结点
return new_head
def printLinkedList(head):
"""
方法功能:输出单链表
输入参数:head: 链表头结点
"""
cur = head.next
while cur != None:
print(cur.data, end=' ')
cur = cur.next
print()
return None
def constructLinkedList(n):
"""
方法功能:构造单链表
输入参数:n: 设定链表长度(不包括头结点)
"""
head = LNode(None)
head.next = None
cur = head
tmp = None
for i in range(n):
tmp = LNode(None)
tmp.data = i
tmp.next = None
cur.next = tmp
cur = tmp
return head
if __name__ == "__main__":
head = constructLinkedList(8)
print("before:")
printLinkedList(head)
reverse(head)
print("\nafter:")
printLinkedList(head)
| 19.863014
| 65
| 0.586207
|
f5b4ef7cc05a8e64be5041e83d37672448ed83fb
| 306
|
py
|
Python
|
Desafio Python/Aula 14 des63.py
|
ayresmajor/Curso-python
|
006229cec38ea365bf43b19e3ce93fbd32e1dca6
|
[
"MIT"
] | null | null | null |
Desafio Python/Aula 14 des63.py
|
ayresmajor/Curso-python
|
006229cec38ea365bf43b19e3ce93fbd32e1dca6
|
[
"MIT"
] | null | null | null |
Desafio Python/Aula 14 des63.py
|
ayresmajor/Curso-python
|
006229cec38ea365bf43b19e3ce93fbd32e1dca6
|
[
"MIT"
] | null | null | null |
print('Sequência de Fibonacci')
print('=' * 20)
num = int(input('Quantos termos quer mostrar? '))
t1 = 0
t2 = 1
print('=-' * 30)
print('{} -> {}'.format(t1, t2), end='')
cont = 3
if cont <= num:
t3 = t1 + t2
print(' -> {}'.format(t3), end='')
t1 = t2
t2 = t3
cont += 1
print(' -> FIM')
| 19.125
| 49
| 0.506536
|
7acd960792807134daa773aaf8705bf693a1e244
| 443
|
py
|
Python
|
mpf/devices/shot_profile.py
|
cloudjor/mpf
|
1cf6bf18b0d81120383b0b128b0ebbfa1c62717c
|
[
"MIT"
] | null | null | null |
mpf/devices/shot_profile.py
|
cloudjor/mpf
|
1cf6bf18b0d81120383b0b128b0ebbfa1c62717c
|
[
"MIT"
] | null | null | null |
mpf/devices/shot_profile.py
|
cloudjor/mpf
|
1cf6bf18b0d81120383b0b128b0ebbfa1c62717c
|
[
"MIT"
] | null | null | null |
"""Shot profiles."""
from mpf.core.mode import Mode
from mpf.core.system_wide_device import SystemWideDevice
from mpf.core.mode_device import ModeDevice
class ShotProfile(ModeDevice, SystemWideDevice):
"""A shot profile."""
config_section = 'shot_profiles'
collection = 'shot_profiles'
class_label = 'shot_profile'
def device_removed_from_mode(self, mode: Mode) -> None:
"""Remove from mode."""
pass
| 22.15
| 59
| 0.708804
|
c367ec1606045938610f67b6b855cc89d7084a73
| 953
|
py
|
Python
|
atom/proton/python/test/test_goal_withdrawal_config.py
|
AbhiGupta03/SDK
|
f3a61aae7a847f07f0c22a154ca88dc378e9d25e
|
[
"Apache-2.0"
] | 11
|
2019-04-16T02:11:17.000Z
|
2021-12-16T22:51:40.000Z
|
atom/proton/python/test/test_goal_withdrawal_config.py
|
AbhiGupta03/SDK
|
f3a61aae7a847f07f0c22a154ca88dc378e9d25e
|
[
"Apache-2.0"
] | 81
|
2019-11-19T23:24:28.000Z
|
2022-03-28T11:35:47.000Z
|
atom/proton/python/test/test_goal_withdrawal_config.py
|
AbhiGupta03/SDK
|
f3a61aae7a847f07f0c22a154ca88dc378e9d25e
|
[
"Apache-2.0"
] | 11
|
2020-07-08T02:29:56.000Z
|
2022-03-28T10:05:33.000Z
|
# coding: utf-8
"""
Hydrogen Proton API
Financial engineering module of Hydrogen Atom # noqa: E501
OpenAPI spec version: 1.9.2
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import proton_api
from proton_api.models.goal_withdrawal_config import GoalWithdrawalConfig # noqa: E501
from proton_api.rest import ApiException
class TestGoalWithdrawalConfig(unittest.TestCase):
"""GoalWithdrawalConfig unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGoalWithdrawalConfig(self):
"""Test GoalWithdrawalConfig"""
# FIXME: construct object with mandatory attributes with example values
# model = proton_api.models.goal_withdrawal_config.GoalWithdrawalConfig() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 23.243902
| 95
| 0.721931
|
bd343f9e011b77ddc08f567688c9f448b7d0f452
| 956
|
py
|
Python
|
model.py
|
raptorz/userga
|
2dc86db097076045c2e3bbfae58537c8c10fe0d5
|
[
"MIT"
] | 1
|
2017-01-21T13:32:56.000Z
|
2017-01-21T13:32:56.000Z
|
model.py
|
raptorz/userga
|
2dc86db097076045c2e3bbfae58537c8c10fe0d5
|
[
"MIT"
] | null | null | null |
model.py
|
raptorz/userga
|
2dc86db097076045c2e3bbfae58537c8c10fe0d5
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*
'''
data model
~~~~~~~~~~~~~~~~
sqlalchemy data model.
:copyright: 20160204 by raptor.zh@gmail.com.
'''
from config import config
from sqlalchemy import create_engine
engine = create_engine(config["db_url"])
from sqlalchemy import Table, ForeignKey, Column, String, Unicode, Float
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, backref
Base = declarative_base()
class User(Base):
__tablename__ = "gauser"
email = Column(Unicode(100), primary_key=True)
secret = Column(String(10), nullable=False) # if none, password is not required
expires = Column(Float)
key = Column(String(33), nullable=False, unique=True)
resetpw = Column(String(10))
inv_setpw = Column(Float)
inv_login = Column(Float)
inv_reset = Column(Float)
metadata = Base.metadata
if __name__ == "__main__":
metadata.create_all(engine)
| 23.9
| 87
| 0.689331
|
03dc6df2c79a2678a5b23a849f73324946602279
| 4,410
|
py
|
Python
|
Gds/src/fprime_gds/common/models/common/channel_telemetry.py
|
hunterpaulson/fprime
|
70560897b56dc3037dc966c99751b708b1cc8a05
|
[
"Apache-2.0"
] | null | null | null |
Gds/src/fprime_gds/common/models/common/channel_telemetry.py
|
hunterpaulson/fprime
|
70560897b56dc3037dc966c99751b708b1cc8a05
|
[
"Apache-2.0"
] | null | null | null |
Gds/src/fprime_gds/common/models/common/channel_telemetry.py
|
hunterpaulson/fprime
|
70560897b56dc3037dc966c99751b708b1cc8a05
|
[
"Apache-2.0"
] | null | null | null |
"""
Created on Apr. 27, 2015
@author: reder
"""
from __future__ import print_function
# Import the types this way so they do not need prefixing for execution.
from fprime.common.models.serialize.type_exceptions import TypeException
from fprime.common.models.serialize.type_exceptions import TypeMismatchException
from fprime.common.models.serialize.type_base import BaseType
class Channel(object):
"""
Channel class is for deserialize channel telemetry value.
Really this is a container since the type will have it's own deserialize
implementation.
"""
def __init__(
self,
name,
channel_id,
comp_name,
ch_description,
ch_type,
ch_format_string,
low_red,
low_orange,
low_yellow,
high_yellow,
high_orange,
high_red,
):
"""
Constructor
"""
#
## Make sure correct types are passed
#
if not type(name) == type(str()):
raise TypeMismatchException(type(str()), type(name))
if not type(channel_id) == type(int()):
raise TypeMismatchException(type(int()), type(channel_id))
if not type(ch_description) == type(str()):
raise TypeMismatchException(type(str()), type(ch_description))
if not issubclass(type(ch_type), type(BaseType())):
raise TypeMismatchException(type(BaseType()), type(ch_type))
# Initialize event internal variables
self.__name = name
self.__comp_name = comp_name
self.__id = channel_id
self.__ch_desc = ch_description
self.__ch_type = ch_type
self.__format_string = ch_format_string
self.__low_red = low_red
self.__low_orange = low_orange
self.__low_yellow = low_yellow
self.__high_yellow = high_yellow
self.__high_orange = high_orange
self.__high_red = high_red
#
self.__time_base = None
self.__time_context = None
self.__time_sec = None
self.__time_usec = None
#
self.__changed = False
def deserialize(self, ser_data, offset):
"""
Deserialize event arguments
@param ser_data: Binary input of the channel value.
"""
# type_base.showBytes(ser_data[offset:])
#
try:
self.__ch_type.deserialize(ser_data, offset)
val = self.__ch_type.val
except TypeException as e:
print("Channel deserialize exception %s" % (e.getMsg()))
val = "ERR"
#
return val
def setTime(self, time_base, time_context, time_sec, time_usec):
"""
Channel telemetry time updater.
"""
self.__time_base = time_base
self.__time_context = time_context
self.__time_sec = time_sec
self.__time_usec = time_usec
def getTime(self):
"""
Return time tuple for UI updater use.
"""
return (
self.__time_base,
self.__time_context,
self.__time_sec,
self.__time_usec,
)
def getName(self):
return self.__name
def getCompName(self):
return self.__comp_name
def getId(self):
return self.__id
def getChDesc(self):
return self.__ch_desc
def getType(self):
return self.__ch_type
def getTimeBase(self):
return self.__time_base
def getTimeContext(self):
return self.__time_context
def getTimeSec(self):
return self.__time_sec
def getTimeUsec(self):
return self.__time_usec
def getFormatString(self):
return self.__format_string
def getLowRed(self):
return self.__low_red
def getLowOrange(self):
return self.__low_orange
def getLowYellow(self):
return self.__low_yellow
def getHighYellow(self):
return self.__high_yellow
def getHighOrange(self):
return self.__high_orange
def getHighRed(self):
return self.__high_red
@property
def changed(self):
"""
change is True if recently updated.
change is False if not changed.
"""
return self.__changed
@changed.setter
def changed(self, ch):
if not ch == False or not ch == True:
ch = True
self.__changed = ch
| 25.491329
| 80
| 0.607256
|
51cdb4146310aefc31e75a0f1a7fd593e36155fc
| 13,788
|
py
|
Python
|
pyrobotics/BB.py
|
BioRoboticsUNAM/pyRobotics
|
623f75b66c84b80d03251bb1472b43502a172c22
|
[
"MIT"
] | 1
|
2021-04-18T06:11:48.000Z
|
2021-04-18T06:11:48.000Z
|
pyrobotics/BB.py
|
AldoVidales/pyRobotics
|
623f75b66c84b80d03251bb1472b43502a172c22
|
[
"MIT"
] | null | null | null |
pyrobotics/BB.py
|
AldoVidales/pyRobotics
|
623f75b66c84b80d03251bb1472b43502a172c22
|
[
"MIT"
] | 2
|
2019-09-05T04:09:53.000Z
|
2021-04-18T06:11:18.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
This module contains the main interface to interact with BlackBoard.
Author: Adrián Revuelta Cuauhtli <adrianrc.89@gmail.com>
Workplace: Bio-Robotics Lab., UNAM <http://bio-robotics.fi-p-unam.mx>
'''
# STANDARD IMPORTS
import threading
import time
import types
import Queue
import sys
# PACKAGE IMPORTS
import shared_variables, parallel_senders
from messages import Message, Command, Response
from connection_manager import ConnectionManager
from command_parser import CommandParser
__version__ = '1.9.2'
ParallelSender = parallel_senders.ParallelSender
SharedVarTypes = shared_variables.SharedVarTypes
SubscriptionTypes = shared_variables.SubscriptionTypes
ReportTypes = shared_variables.ReportTypes
_initialized = False
_started = False
_startedLock = threading.Lock()
_ready = False
_readyLock = threading.Lock()
_subscriptionHandlersLock = threading.Lock()
_subscriptionHandlers = {}
_incomingMessages = Queue.Queue(20)
_receivedCommands = Queue.Queue(20)
_receivedResponses = {}
_responsesLock = threading.Lock()
_sentCommands = set([])
_commandsLock = threading.Lock()
def Initialize(port, functionMap={}, asyncHandler = None):
'''
Initializes BlackBoard with the corresponding parameters.
:param int port: The port through which BlackBoard will communicate with this module.
:param dictionary functionMap: A dictionary containing **key:value** pairs, where the *key* is the name of a command received (a string),
and the *value* is either a tuple containing a function as a first element and a boolean as a second element, or a function.
The function in both cases is the function that is going to execute the specified command and receives on object of type :class:`Command` (See :ref:`Creating a command handler <creating_a_command_handler>`).
The boolean value indicates whether the execution of that command should be synchronous (on the same thread) or asynchronous,
usually synchronous execution is preferred for fast commands that can answer almost immediately and asynchronous for commands that might take a little time.
When the value is only a function, by default the execution is synchronous. *functionMap* can also contain an entry with a string containing only an asterisk,
meaning that would be the handler in case no other handler is found for a specific command.
.. note::
Notice that although functionMap can include a wildcard handler and this might seem like the module could answer
anything, BlackBoard will only send commands that are registered under this module's configuration.
:param function asyncHandler: A function that would handle the response of commands when sent with the method :func:`Send`
instead of using :func:`SendAndWait`. This means the execution of a program that sends a command could continue
and an asynchronous handler would handle the response when one is received.
.. note::
Notice that the asyncHandler functionality could also be achieved using a :class:`ParallelSender` object,
but it has other implications.
'''
global _executors, _connMan, _parser, _p, _initialized, _ready
_executors = { 'busy' : (lambda x: Response('busy'), False),
'ready' : (_isReady, False),
'alive' : (lambda x: Response('alive', True), False) }
for m in functionMap:
if isinstance(functionMap[m], types.FunctionType):
_executors[m] = (functionMap[m], False)
elif isinstance(functionMap[m], tuple):
_executors[m] = functionMap[m]
else:
print 'Element in function map is not a function nor a correct tuple: ' + repr(functionMap[m])
_connMan = ConnectionManager(port)
_parser = CommandParser(asyncHandler)
_p = threading.Thread(target=_MainThread)
_p.daemon = True
_initialized = True
def Start():
'''
Once pyRobotics is :func:`initialized <Initialize>`, you can start the communication with BlackBoard.
This will start the threads of the internal *ConnectionManager* and *CommandParser* classes to start listening for
a connection and start receiving and parsin messages.
If pyRobotics is not initialized it will only print a message saying "pyRobotics needs to be initialized before starting".
A similar message will show when trying to use some of this module's functions before calling this function.
.. todo::
Fix bug: sometimes when connection is established successfully a message saying pyRobotics has not been started is printed.
'''
global _p, _connMann, _parser, _initialized, _started, _startedLock
if not _initialized:
print 'pyRobotics needs to be initialized before starting.'
return
_parser.Start()
_connMan.Start()
_p.start()
_startedLock.acquire()
_started = True
_startedLock.release()
def SetReady(val=True):
'''
Once pyRobotics is :func:`initialized <Initialize>` and :func:`started <Start>`, this flag should be set to true to
let BlackBoard know that the module is functioning correctly and ready to receive commands.
Even if this module does not receive any commands, this should be set to true.
'''
global _ready, _readyLock, _started, _startedLock
_startedLock.acquire()
started = _started
_startedLock.release()
if not started:
print 'pyRobotics has not been started.'
return False
_readyLock.acquire()
_ready = val
_readyLock.release()
def _isReady(c):
global _ready, _readyLock
_readyLock.acquire()
ready = _ready
_readyLock.release()
return Response('ready', ready)
def Wait():
'''
In case this module is only used to receive and respond commands, but is doing nothing while no command is received,
this will prevent the main thread (and therefore BlackBoard connection and commands execution) to terminate.
'''
global _started, _startedLock
_startedLock.acquire()
started = _started
_startedLock.release()
if not started:
print 'pyRobotics has not been started.'
return False
try:
while True:
time.sleep(300)
except (KeyboardInterrupt, SystemExit):
sys.exit()
def _MainThread():
global _receivedCommands, _executors
while True:
command = _receivedCommands.get()
key = command.name
if key not in _executors:
if '*' in _executors:
key = '*'
else:
print 'Executor not found for command: ' + command.name
return
func, async = _executors[key]
if async:
p = threading.Thread(target=_Execute, args=(func, command))
p.daemon = True
p.start()
else:
_Execute(func, command)
def _Execute(func, command):
try:
response = func(command)
except Exception as e:
print "Function '" + str(func) + "' crashed."
print 'ERROR: ' + str(e)
response = Response.FromCommandObject(command, False, command.params)
if not isinstance(response, Response):
print "Function '" + str(func) + "' did not return a Response object."
response = Response.FromCommandObject(command, False, command.params)
resp = Response.FromCommandObject(command, response.successful, response.params)
Send(resp)
def Send(message):
'''
Sends a command WITHOUT waiting for an answer.
:param Command message: Message to be sent, must be an instance of the Command class.
:return: ``True`` if the message was sent successfully, ``False`` otherwise.
'''
global _connMan, _started, _startedLock
_startedLock.acquire()
started = _started
_startedLock.release()
if not started:
print 'pyRobotics has not been started.'
return False
if not isinstance(message, Message):
print "Message to be sent should be a derived class of pyrobotics.messages.Message Class. Message was not sent."
return False
for _ in range(3):
if _connMan.Send(message):
return True
return False
def SendAndWait(command, timeout=300000, attempts = 1):
global _commandsLock, _sentCommands, _responsesLock, _receivedResponses, _started, _startedLock
'''
Sends a command and wait for the answer. This blocks the execution of the calling thread.
:param Command command: Message to be sent, must be an instance of the Command class.
:param int timeout: (Default 300000) How much time (in miliseconds) to wait for response before trying again or aborting.
:param int attempts: (Default 1) How many attempts to send the command if no response is received after timeout.
If attempts is 0, it will keep trying indefinitely. (Not recommended)
:return: A :class:`Response` object if the message was sent successfully and a response was received before the timeout occurred, ``None`` otherwise.
'''
_startedLock.acquire()
started = _started
_startedLock.release()
if not started:
print 'pyRobotics has not been started.'
return None
if not isinstance(command, Command):
print "Message should be an instance of class Command. Message not sent."
return None
_commandsLock.acquire()
_sentCommands.add(command)
_commandsLock.release()
currentAttempt = 0
timeout = timeout/1000.0
response = None
while not response and currentAttempt < attempts:
Send(command)
newTimeout = time.time() + timeout
currentAttempt += 1
while time.time() < newTimeout:
_responsesLock.acquire()
if command in _receivedResponses:
response = _receivedResponses.pop(command)
_responsesLock.release()
if response:
break
time.sleep(0.1)
_commandsLock.acquire()
_sentCommands.remove(command)
_commandsLock.release()
return response
def ReadSharedVar(name):
'''
Reads the value of a Shared Variable from the BlackBoard.
:param string name: The name of the Shared Variable.
:return: A :class:`SharedVar` object if the request was successful, ``False`` if pyRobotics has not been started, ``None`` otherwise.
'''
global _started, _startedLock
_startedLock.acquire()
started = _started
_startedLock.release()
if not started:
print 'pyRobotics has not been started.'
return False
return shared_variables._ReadSharedVar(name)
def CreateSharedVar(sharedVarType, name):
'''
Creates a Shared Variable in BlackBoard.
:param enum sharedVarType: The type of the shared variable, it is one of the constants in :class:`SharedVarTypes` pseudo-enum.
:param string name: The name of the shared variable to be created.
:return: ``True`` if creation was successful, ``False`` otherwise.
'''
global _started, _startedLock
_startedLock.acquire()
started = _started
_startedLock.release()
if not started:
print 'pyRobotics has not been started.'
return False
return shared_variables._CreateSharedVar(sharedVarType, name)
def WriteSharedVar(sharedVarType, name, data):
'''
Writes content to a Shared Variable in BlackBoard.
:param enum sharedVarType: The type of the shared variable, it is one of the constants in :class:`SharedVarTypes` pseudo-enum.
:param string name: The name of the shared variable to write to.
:param var data: The data to be written, the type must match the shared variable's type.
:return: ``True`` if shared variable was succesfully written to, ``False`` otherwise.
'''
global _started, _startedLock
_startedLock.acquire()
started = _started
_startedLock.release()
if not started:
print 'pyRobotics has not been started.'
return False
return shared_variables._WriteSharedVar(sharedVarType, name, data)
def SubscribeToSharedVar(name, handler, subscriptionType=SubscriptionTypes.WRITE_OTHERS, reportType = ReportTypes.CONTENT):
'''
Subscribes to a Shared Variable in BlackBoard.
When a module subscribes to a shared variable, it gets notifications when someone writes to it.
:param string name: The name of the shared variable to subscribe to.
:param function handler: A function that will be the handler for this shared variables notification. (See :ref:`Creating a subscription handler <creating_a_subscription_handler>`)
:param enum subscriptionType: The type of subscription, it is one of the constants in :class:`SubscriptionTypes` pseudo-enum.
:param enum reportType: The type of report to receive when someone writes to it, it is one of the constants in :class:`ReportTypes` pseudo-enum.
:return: ``True`` if subscription was successful, ``False`` otherwise.
'''
global _subscriptionHandlersLock, _subscriptionHandlers, _started, _startedLock
_startedLock.acquire()
started = _started
_startedLock.release()
if not started:
print 'pyRobotics has not been started.'
return False
if not shared_variables._SubscribeToSharedVar(name, subscriptionType, reportType):
return False
_subscriptionHandlersLock.acquire()
_subscriptionHandlers[name] = handler
_subscriptionHandlersLock.release()
return True
| 35.812987
| 215
| 0.686612
|
4f3033b43704610619918a5198d6992e1516fae9
| 3,415
|
py
|
Python
|
Modules/Group.py
|
Bamgm14/GarbageBot-The-Original-
|
6d796a0e343491377c0195efff98aea903041b1a
|
[
"MIT"
] | 1
|
2019-06-18T19:15:56.000Z
|
2019-06-18T19:15:56.000Z
|
Modules/Group.py
|
Bamgm14/GarbageBot-The-Original-
|
6d796a0e343491377c0195efff98aea903041b1a
|
[
"MIT"
] | null | null | null |
Modules/Group.py
|
Bamgm14/GarbageBot-The-Original-
|
6d796a0e343491377c0195efff98aea903041b1a
|
[
"MIT"
] | null | null | null |
import time as t
import Modules.Constant as c
def makeadmin(message,driver,textbox,name):
botadmin=open('Special.txt','r+').read().split('\n')
if message.split('!')[0].replace('\n','') in botadmin and message.split('!')[0].replace('\n','')!='':
message=message.split('!')[1].split('\n')[0][9:].split(' ')[1]
if message not in HB(message,driver,textbox,name):
textbox.send_keys("Person Not Here\n")
else:
click_menu = driver.find_element_by_xpath(c.Menu)
click_menu.click()
prt=driver.find_element_by_xpath(c.Parti)
driver.execute_script(c.Scroll, prt)
prt.click()
t.sleep(3)
send=driver.find_element_by_xpath(c.Search_parti)
send.send_keys(message+'\n')
t.sleep(3)
if driver.find_element_by_class_name(c.Adm).is_displayed():
textbox.send_keys("You Are Admin Already\n")
else:
driver.find_element_by_class_name(c.Madm).click()
driver.find_element_by_xpath(c.BMadm).click()
t.sleep(2)
driver.find_element_by_class_name(c.Qt).click()
t.sleep(3)
driver.find_element_by_class_name(c.Qt).click()
else:
textbox.send_keys("You Aren't Special\n")
def special(message,driver,textbox,name):
try:
a=open('Special.txt','a')
except:
a=open('Special.txt','w+')
a.write(name.lower()+'\n')
a.close()
textbox.send_keys('You Are Special\n')
def Addmeback(message,driver,textbox,name):
driver.find_element_by_xpath(c.GrpSrch).send_keys(message.split('!')[1][4:]+'\n')
t.sleep(3)
click_menu = driver.find_element_by_xpath(c.Menu)
click_menu.click()
prt=driver.find_element_by_xpath(c.AddPrt)
driver.execute_script(c.Scroll, prt)
prt.click()
t.sleep(2)
send=driver.find_element_by_xpath(c.Search_add)
send.send_keys(name+'\n')
t.sleep(2)
driver.find_element_by_xpath(c.Add).click()
t.sleep(2)
driver.find_element_by_xpath(c.Tick).click()
t.sleep(2)
driver.find_element_by_xpath(c.AddPt).click()
def linkmeback(message,driver,textbox,name):
driver.find_element_by_xpath(c.GrpSrch).send_keys(message.split('!')[1][4:]+'\n')
t.sleep(3)
click_menu = driver.find_element_by_xpath(c.Menu)
click_menu.click()
prt=driver.find_element_by_xpath(c.Invite)
driver.execute_script(c.Scroll, prt)
prt.click()
driver.find_element_by_xpath(c.Snd_invite1).click()
t.sleep(1)
send=driver.find_element_by_xpath(c.Search_add)
send.send_keys(name+'\n')
driver.find_element_by_xpath(c.Snd_invite2).click()
driver.find_element_by_xpath(c.Back).click()
def HB(message,browser,textbox,name):
try:
click_menu = browser.find_element_by_xpath(c.Menu).text.lower()
assert 'last' and 'online' not in click_menu,'Second Path'
assert 'typing' not in click_menu,'Please Stop Typing\n'
participate=click_menu.replace(' ','').split(',')
return participate
except Exception as e:
if str(e)=='Please Stop Typing\n':
textbox.send_keys(e)
return HB(message,browser,textbox,name)
else:
lst=[]
lst.append(name.lower())
return lst
| 40.654762
| 106
| 0.619327
|
f598c4d9452a3921ebecd09842e3f2e8066735f1
| 15,565
|
py
|
Python
|
hw0_release/.env/bin/pildriver.py
|
Renhy/CS131_release
|
23b92d04c4cbb122da18dc929199d3d06fb0251f
|
[
"MIT"
] | null | null | null |
hw0_release/.env/bin/pildriver.py
|
Renhy/CS131_release
|
23b92d04c4cbb122da18dc929199d3d06fb0251f
|
[
"MIT"
] | null | null | null |
hw0_release/.env/bin/pildriver.py
|
Renhy/CS131_release
|
23b92d04c4cbb122da18dc929199d3d06fb0251f
|
[
"MIT"
] | null | null | null |
#!/Users/renhy/Desktop/cs131/CS131_release/hw0_release/.env/bin/python3.7
"""PILdriver, an image-processing calculator using PIL.
An instance of class PILDriver is essentially a software stack machine
(Polish-notation interpreter) for sequencing PIL image
transformations. The state of the instance is the interpreter stack.
The only method one will normally invoke after initialization is the
`execute' method. This takes an argument list of tokens, pushes them
onto the instance's stack, and then tries to clear the stack by
successive evaluation of PILdriver operators. Any part of the stack
not cleaned off persists and is part of the evaluation context for
the next call of the execute method.
PILDriver doesn't catch any exceptions, on the theory that these
are actually diagnostic information that should be interpreted by
the calling code.
When called as a script, the command-line arguments are passed to
a PILDriver instance. If there are no command-line arguments, the
module runs an interactive interpreter, each line of which is split into
space-separated tokens and passed to the execute method.
In the method descriptions below, a first line beginning with the string
`usage:' means this method can be invoked with the token that follows
it. Following <>-enclosed arguments describe how the method interprets
the entries on the stack. Each argument specification begins with a
type specification: either `int', `float', `string', or `image'.
All operations consume their arguments off the stack (use `dup' to
keep copies around). Use `verbose 1' to see the stack state displayed
before each operation.
Usage examples:
`show crop 0 0 200 300 open test.png' loads test.png, crops out a portion
of its upper-left-hand corner and displays the cropped portion.
`save rotated.png rotate 30 open test.tiff' loads test.tiff, rotates it
30 degrees, and saves the result as rotated.png (in PNG format).
"""
# by Eric S. Raymond <esr@thyrsus.com>
# $Id$
# TO DO:
# 1. Add PILFont capabilities, once that's documented.
# 2. Add PILDraw operations.
# 3. Add support for composing and decomposing multiple-image files.
#
from __future__ import print_function
from PIL import Image
class PILDriver(object):
verbose = 0
def do_verbose(self):
"""usage: verbose <int:num>
Set verbosity flag from top of stack.
"""
self.verbose = int(self.do_pop())
# The evaluation stack (internal only)
stack = [] # Stack of pending operations
def push(self, item):
"Push an argument onto the evaluation stack."
self.stack.insert(0, item)
def top(self):
"Return the top-of-stack element."
return self.stack[0]
# Stack manipulation (callable)
def do_clear(self):
"""usage: clear
Clear the stack.
"""
self.stack = []
def do_pop(self):
"""usage: pop
Discard the top element on the stack.
"""
return self.stack.pop(0)
def do_dup(self):
"""usage: dup
Duplicate the top-of-stack item.
"""
if hasattr(self, 'format'): # If it's an image, do a real copy
dup = self.stack[0].copy()
else:
dup = self.stack[0]
self.push(dup)
def do_swap(self):
"""usage: swap
Swap the top-of-stack item with the next one down.
"""
self.stack = [self.stack[1], self.stack[0]] + self.stack[2:]
# Image module functions (callable)
def do_new(self):
"""usage: new <int:xsize> <int:ysize> <int:color>:
Create and push a greyscale image of given size and color.
"""
xsize = int(self.do_pop())
ysize = int(self.do_pop())
color = int(self.do_pop())
self.push(Image.new("L", (xsize, ysize), color))
def do_open(self):
"""usage: open <string:filename>
Open the indicated image, read it, push the image on the stack.
"""
self.push(Image.open(self.do_pop()))
def do_blend(self):
"""usage: blend <image:pic1> <image:pic2> <float:alpha>
Replace two images and an alpha with the blended image.
"""
image1 = self.do_pop()
image2 = self.do_pop()
alpha = float(self.do_pop())
self.push(Image.blend(image1, image2, alpha))
def do_composite(self):
"""usage: composite <image:pic1> <image:pic2> <image:mask>
Replace two images and a mask with their composite.
"""
image1 = self.do_pop()
image2 = self.do_pop()
mask = self.do_pop()
self.push(Image.composite(image1, image2, mask))
def do_merge(self):
"""usage: merge <string:mode> <image:pic1>
[<image:pic2> [<image:pic3> [<image:pic4>]]]
Merge top-of stack images in a way described by the mode.
"""
mode = self.do_pop()
bandlist = []
for band in mode:
bandlist.append(self.do_pop())
self.push(Image.merge(mode, bandlist))
# Image class methods
def do_convert(self):
"""usage: convert <string:mode> <image:pic1>
Convert the top image to the given mode.
"""
mode = self.do_pop()
image = self.do_pop()
self.push(image.convert(mode))
def do_copy(self):
"""usage: copy <image:pic1>
Make and push a true copy of the top image.
"""
self.dup()
def do_crop(self):
"""usage: crop <int:left> <int:upper> <int:right> <int:lower>
<image:pic1>
Crop and push a rectangular region from the current image.
"""
left = int(self.do_pop())
upper = int(self.do_pop())
right = int(self.do_pop())
lower = int(self.do_pop())
image = self.do_pop()
self.push(image.crop((left, upper, right, lower)))
def do_draft(self):
"""usage: draft <string:mode> <int:xsize> <int:ysize>
Configure the loader for a given mode and size.
"""
mode = self.do_pop()
xsize = int(self.do_pop())
ysize = int(self.do_pop())
self.push(self.draft(mode, (xsize, ysize)))
def do_filter(self):
"""usage: filter <string:filtername> <image:pic1>
Process the top image with the given filter.
"""
from PIL import ImageFilter
imageFilter = getattr(ImageFilter, self.do_pop().upper())
image = self.do_pop()
self.push(image.filter(imageFilter))
def do_getbbox(self):
"""usage: getbbox
Push left, upper, right, and lower pixel coordinates of the top image.
"""
bounding_box = self.do_pop().getbbox()
self.push(bounding_box[3])
self.push(bounding_box[2])
self.push(bounding_box[1])
self.push(bounding_box[0])
def do_getextrema(self):
"""usage: extrema
Push minimum and maximum pixel values of the top image.
"""
extrema = self.do_pop().extrema()
self.push(extrema[1])
self.push(extrema[0])
def do_offset(self):
"""usage: offset <int:xoffset> <int:yoffset> <image:pic1>
Offset the pixels in the top image.
"""
xoff = int(self.do_pop())
yoff = int(self.do_pop())
image = self.do_pop()
self.push(image.offset(xoff, yoff))
def do_paste(self):
"""usage: paste <image:figure> <int:xoffset> <int:yoffset>
<image:ground>
Paste figure image into ground with upper left at given offsets.
"""
figure = self.do_pop()
xoff = int(self.do_pop())
yoff = int(self.do_pop())
ground = self.do_pop()
if figure.mode == "RGBA":
ground.paste(figure, (xoff, yoff), figure)
else:
ground.paste(figure, (xoff, yoff))
self.push(ground)
def do_resize(self):
"""usage: resize <int:xsize> <int:ysize> <image:pic1>
Resize the top image.
"""
ysize = int(self.do_pop())
xsize = int(self.do_pop())
image = self.do_pop()
self.push(image.resize((xsize, ysize)))
def do_rotate(self):
"""usage: rotate <int:angle> <image:pic1>
Rotate image through a given angle
"""
angle = int(self.do_pop())
image = self.do_pop()
self.push(image.rotate(angle))
def do_save(self):
"""usage: save <string:filename> <image:pic1>
Save image with default options.
"""
filename = self.do_pop()
image = self.do_pop()
image.save(filename)
def do_save2(self):
"""usage: save2 <string:filename> <string:options> <image:pic1>
Save image with specified options.
"""
filename = self.do_pop()
options = self.do_pop()
image = self.do_pop()
image.save(filename, None, options)
def do_show(self):
"""usage: show <image:pic1>
Display and pop the top image.
"""
self.do_pop().show()
def do_thumbnail(self):
"""usage: thumbnail <int:xsize> <int:ysize> <image:pic1>
Modify the top image in the stack to contain a thumbnail of itself.
"""
ysize = int(self.do_pop())
xsize = int(self.do_pop())
self.top().thumbnail((xsize, ysize))
def do_transpose(self):
"""usage: transpose <string:operator> <image:pic1>
Transpose the top image.
"""
transpose = self.do_pop().upper()
image = self.do_pop()
self.push(image.transpose(transpose))
# Image attributes
def do_format(self):
"""usage: format <image:pic1>
Push the format of the top image onto the stack.
"""
self.push(self.do_pop().format)
def do_mode(self):
"""usage: mode <image:pic1>
Push the mode of the top image onto the stack.
"""
self.push(self.do_pop().mode)
def do_size(self):
"""usage: size <image:pic1>
Push the image size on the stack as (y, x).
"""
size = self.do_pop().size
self.push(size[0])
self.push(size[1])
# ImageChops operations
def do_invert(self):
"""usage: invert <image:pic1>
Invert the top image.
"""
from PIL import ImageChops
self.push(ImageChops.invert(self.do_pop()))
def do_lighter(self):
"""usage: lighter <image:pic1> <image:pic2>
Pop the two top images, push an image of the lighter pixels of both.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
self.push(ImageChops.lighter(image1, image2))
def do_darker(self):
"""usage: darker <image:pic1> <image:pic2>
Pop the two top images, push an image of the darker pixels of both.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
self.push(ImageChops.darker(image1, image2))
def do_difference(self):
"""usage: difference <image:pic1> <image:pic2>
Pop the two top images, push the difference image
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
self.push(ImageChops.difference(image1, image2))
def do_multiply(self):
"""usage: multiply <image:pic1> <image:pic2>
Pop the two top images, push the multiplication image.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
self.push(ImageChops.multiply(image1, image2))
def do_screen(self):
"""usage: screen <image:pic1> <image:pic2>
Pop the two top images, superimpose their inverted versions.
"""
from PIL import ImageChops
image2 = self.do_pop()
image1 = self.do_pop()
self.push(ImageChops.screen(image1, image2))
def do_add(self):
"""usage: add <image:pic1> <image:pic2> <int:offset> <float:scale>
Pop the two top images, produce the scaled sum with offset.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
scale = float(self.do_pop())
offset = int(self.do_pop())
self.push(ImageChops.add(image1, image2, scale, offset))
def do_subtract(self):
"""usage: subtract <image:pic1> <image:pic2> <int:offset> <float:scale>
Pop the two top images, produce the scaled difference with offset.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
scale = float(self.do_pop())
offset = int(self.do_pop())
self.push(ImageChops.subtract(image1, image2, scale, offset))
# ImageEnhance classes
def do_color(self):
"""usage: color <image:pic1>
Enhance color in the top image.
"""
from PIL import ImageEnhance
factor = float(self.do_pop())
image = self.do_pop()
enhancer = ImageEnhance.Color(image)
self.push(enhancer.enhance(factor))
def do_contrast(self):
"""usage: contrast <image:pic1>
Enhance contrast in the top image.
"""
from PIL import ImageEnhance
factor = float(self.do_pop())
image = self.do_pop()
enhancer = ImageEnhance.Contrast(image)
self.push(enhancer.enhance(factor))
def do_brightness(self):
"""usage: brightness <image:pic1>
Enhance brightness in the top image.
"""
from PIL import ImageEnhance
factor = float(self.do_pop())
image = self.do_pop()
enhancer = ImageEnhance.Brightness(image)
self.push(enhancer.enhance(factor))
def do_sharpness(self):
"""usage: sharpness <image:pic1>
Enhance sharpness in the top image.
"""
from PIL import ImageEnhance
factor = float(self.do_pop())
image = self.do_pop()
enhancer = ImageEnhance.Sharpness(image)
self.push(enhancer.enhance(factor))
# The interpreter loop
def execute(self, list):
"Interpret a list of PILDriver commands."
list.reverse()
while len(list) > 0:
self.push(list[0])
list = list[1:]
if self.verbose:
print("Stack: " + repr(self.stack))
top = self.top()
if not isinstance(top, str):
continue
funcname = "do_" + top
if not hasattr(self, funcname):
continue
else:
self.do_pop()
func = getattr(self, funcname)
func()
if __name__ == '__main__':
import sys
# If we see command-line arguments, interpret them as a stack state
# and execute. Otherwise go interactive.
driver = PILDriver()
if len(sys.argv[1:]) > 0:
driver.execute(sys.argv[1:])
else:
print("PILDriver says hello.")
while True:
try:
if sys.version_info[0] >= 3:
line = input('pildriver> ')
else:
line = raw_input('pildriver> ')
except EOFError:
print("\nPILDriver says goodbye.")
break
driver.execute(line.split())
print(driver.stack)
# The following sets edit modes for GNU EMACS
# Local Variables:
# mode:python
# End:
| 29.535104
| 79
| 0.591969
|
c114e8ac9ec08f59352cccd15e9b070252324360
| 18,710
|
py
|
Python
|
mmf/utils/configuration.py
|
xuefeicao/mmf
|
2ff00626ec2bc6b56a543da0dc2869c548e81fec
|
[
"BSD-3-Clause"
] | null | null | null |
mmf/utils/configuration.py
|
xuefeicao/mmf
|
2ff00626ec2bc6b56a543da0dc2869c548e81fec
|
[
"BSD-3-Clause"
] | null | null | null |
mmf/utils/configuration.py
|
xuefeicao/mmf
|
2ff00626ec2bc6b56a543da0dc2869c548e81fec
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates.
import collections
import json
import logging
import os
import warnings
from ast import literal_eval
import demjson
import torch
from mmf.common.registry import registry
from mmf.utils.env import import_user_module
from mmf.utils.file_io import PathManager
from mmf.utils.general import get_absolute_path, get_mmf_root
from omegaconf import OmegaConf
logger = logging.getLogger(__name__)
def load_yaml(f):
# Convert to absolute path for loading includes
abs_f = get_absolute_path(f)
try:
mapping = OmegaConf.load(abs_f)
f = abs_f
except FileNotFoundError as e:
# Check if this file might be relative to root?
# TODO: Later test if this can be removed
relative = os.path.abspath(os.path.join(get_mmf_root(), f))
if not PathManager.isfile(relative):
raise e
else:
f = relative
mapping = OmegaConf.load(f)
if mapping is None:
mapping = OmegaConf.create()
includes = mapping.get("includes", [])
if not isinstance(includes, collections.abc.Sequence):
raise AttributeError(
"Includes must be a list, {} provided".format(type(includes))
)
include_mapping = OmegaConf.create()
mmf_root_dir = get_mmf_root()
for include in includes:
original_include_path = include
include = os.path.join(mmf_root_dir, include)
# If path doesn't exist relative to MMF root, try relative to current file
if not PathManager.exists(include):
include = os.path.join(os.path.dirname(f), original_include_path)
current_include_mapping = load_yaml(include)
include_mapping = OmegaConf.merge(include_mapping, current_include_mapping)
mapping.pop("includes", None)
mapping = OmegaConf.merge(include_mapping, mapping)
return mapping
def get_default_config_path():
directory = os.path.dirname(os.path.abspath(__file__))
configs_dir = os.path.join(directory, "..", "configs")
# Check for fb defaults
fb_defaults = os.path.join(configs_dir, "fb_defaults.yaml")
if PathManager.exists(fb_defaults):
return fb_defaults
else:
return os.path.join(configs_dir, "defaults.yaml")
def load_yaml_with_defaults(f):
default_config = get_default_config_path()
return OmegaConf.merge(load_yaml(default_config), load_yaml(f))
def get_zoo_config(
key, variation="defaults", zoo_config_path=None, zoo_type="datasets"
):
version = None
resources = None
if zoo_config_path is None:
zoo_config_path = os.path.join("configs", "zoo", f"{zoo_type}.yaml")
zoo = load_yaml(zoo_config_path)
# Set struct on zoo so that unidentified access is not allowed
OmegaConf.set_struct(zoo, True)
try:
item = OmegaConf.select(zoo, key)
except Exception:
# Key wasn't present or something else happened, return None, None
return version, resources
if not item:
return version, resources
if variation not in item:
# If variation is not present, then key value should
# be directly returned if "defaults" was selected as the variation
assert (
variation == "defaults"
), f"'{variation}' variation not present in zoo config"
return _get_version_and_resources(item)
elif "resources" in item:
# Case where full key is directly passed
return _get_version_and_resources(item)
else:
return _get_version_and_resources(item[variation])
def _get_version_and_resources(item):
assert "version" in item, "'version' key should be present in zoo config {}".format(
item._get_full_key("")
)
assert (
"resources" in item
), "'resources' key should be present in zoo config {}".format(
item._get_full_key("")
)
return item.version, item.resources
def get_global_config(key=None):
config = registry.get("config")
if config is None:
configuration = Configuration()
config = configuration.get_config()
registry.register("config", config)
if key:
config = OmegaConf.select(config, key)
return config
def get_mmf_cache_dir():
config = get_global_config()
cache_dir = config.env.cache_dir
# If cache_dir path exists do not join to mmf root
if not os.path.exists(cache_dir):
cache_dir = os.path.join(get_mmf_root(), cache_dir)
return cache_dir
def get_mmf_env(key=None):
config = get_global_config()
if key:
return OmegaConf.select(config.env, key)
else:
return config.env
def resolve_cache_dir(env_variable="MMF_CACHE_DIR", default="mmf"):
# Some of this follow what "transformers" does for there cache resolving
try:
from torch.hub import _get_torch_home
torch_cache_home = _get_torch_home()
except ImportError:
torch_cache_home = os.path.expanduser(
os.getenv(
"TORCH_HOME",
os.path.join(os.getenv("XDG_CACHE_HOME", "~/.cache"), "torch"),
)
)
default_cache_path = os.path.join(torch_cache_home, default)
cache_path = os.getenv(env_variable, default_cache_path)
if not PathManager.exists(cache_path):
try:
PathManager.mkdirs(cache_path)
except PermissionError:
cache_path = os.path.join(get_mmf_root(), ".mmf_cache")
PathManager.mkdirs(cache_path)
return cache_path
def resolve_dir(env_variable, default="data"):
default_dir = os.path.join(resolve_cache_dir(), default)
dir_path = os.getenv(env_variable, default_dir)
if not PathManager.exists(dir_path):
PathManager.mkdirs(dir_path)
return dir_path
class Configuration:
def __init__(self, args=None, default_only=False):
self.config = {}
if not args:
import argparse
args = argparse.Namespace(opts=[])
default_only = True
self.args = args
self._register_resolvers()
self._default_config = self._build_default_config()
if default_only:
other_configs = {}
else:
other_configs = self._build_other_configs()
self.config = OmegaConf.merge(self._default_config, other_configs)
self.config = self._merge_with_dotlist(self.config, args.opts)
self._update_specific(self.config)
self.upgrade(self.config)
# Resolve the config here itself after full creation so that spawned workers
# don't face any issues
self.config = OmegaConf.create(
OmegaConf.to_container(self.config, resolve=True)
)
registry.register("config", self.config)
def _build_default_config(self):
self.default_config_path = get_default_config_path()
default_config = load_yaml(self.default_config_path)
return default_config
def _build_other_configs(self):
opts_config = self._build_opt_list(self.args.opts)
user_config = self._build_user_config(opts_config)
self._opts_config = opts_config
self._user_config = user_config
self.import_user_dir()
model_config = self._build_model_config(opts_config)
dataset_config = self._build_dataset_config(opts_config)
args_overrides = self._build_demjson_config(self.args.config_override)
other_configs = OmegaConf.merge(
model_config, dataset_config, user_config, args_overrides
)
return other_configs
def _build_opt_list(self, opts):
opts_dot_list = self._convert_to_dot_list(opts)
return OmegaConf.from_dotlist(opts_dot_list)
def _build_user_config(self, opts):
user_config = {}
# Update user_config with opts if passed
self.config_path = opts.config
if self.config_path is not None:
user_config = load_yaml(self.config_path)
return user_config
def import_user_dir(self):
# Try user_dir options in order of MMF configuration hierarchy
# First try the default one, which can be set via environment as well
user_dir = self._default_config.env.user_dir
# Now, check user's config
user_config_user_dir = self._user_config.get("env", {}).get("user_dir", None)
if user_config_user_dir:
user_dir = user_config_user_dir
# Finally, check opts
opts_user_dir = self._opts_config.get("env", {}).get("user_dir", None)
if opts_user_dir:
user_dir = opts_user_dir
if user_dir:
import_user_module(user_dir)
def _build_model_config(self, config):
model = config.model
if model is None:
raise KeyError("Required argument 'model' not passed")
model_cls = registry.get_model_class(model)
if model_cls is None:
warning = f"No model named '{model}' has been registered"
warnings.warn(warning)
return OmegaConf.create()
default_model_config_path = model_cls.config_path()
if default_model_config_path is None:
warning = "Model {}'s class has no default configuration provided".format(
model
)
warnings.warn(warning)
return OmegaConf.create()
return load_yaml(default_model_config_path)
def _build_dataset_config(self, config):
dataset = config.dataset
datasets = config.datasets
if dataset is None and datasets is None:
raise KeyError("Required argument 'dataset|datasets' not passed")
if datasets is None:
config.datasets = dataset
datasets = dataset.split(",")
else:
datasets = datasets.split(",")
dataset_config = OmegaConf.create()
for dataset in datasets:
builder_cls = registry.get_builder_class(dataset)
if builder_cls is None:
warning = f"No dataset named '{dataset}' has been registered"
warnings.warn(warning)
continue
default_dataset_config_path = builder_cls.config_path()
if default_dataset_config_path is None:
warning = (
f"Dataset {dataset}'s builder class has no default configuration "
+ "provided"
)
warnings.warn(warning)
continue
dataset_config = OmegaConf.merge(
dataset_config, load_yaml(default_dataset_config_path)
)
return dataset_config
def get_config(self):
self._register_resolvers()
return self.config
def _build_demjson_config(self, demjson_string):
if demjson_string is None:
return OmegaConf.create()
demjson_dict = demjson.decode(demjson_string)
return OmegaConf.create(demjson_dict)
def _get_args_config(self, args):
args_dict = vars(args)
return OmegaConf.create(args_dict)
def _register_resolvers(self):
OmegaConf.clear_resolvers()
# Device count resolver
device_count = max(1, torch.cuda.device_count())
OmegaConf.register_resolver("device_count", lambda: device_count)
OmegaConf.register_resolver("resolve_cache_dir", resolve_cache_dir)
OmegaConf.register_resolver("resolve_dir", resolve_dir)
def _merge_with_dotlist(self, config, opts):
# TODO: To remove technical debt, a possible solution is to use
# struct mode to update with dotlist OmegaConf node. Look into this
# in next iteration
if opts is None:
opts = []
if len(opts) == 0:
return config
# Support equal e.g. model=visual_bert for better future hydra support
has_equal = opts[0].find("=") != -1
if has_equal:
opt_values = [opt.split("=") for opt in opts]
else:
assert len(opts) % 2 == 0, "Number of opts should be multiple of 2"
opt_values = zip(opts[0::2], opts[1::2])
for opt, value in opt_values:
if opt == "dataset":
opt = "datasets"
splits = opt.split(".")
current = config
for idx, field in enumerate(splits):
array_index = -1
if field.find("[") != -1 and field.find("]") != -1:
stripped_field = field[: field.find("[")]
array_index = int(field[field.find("[") + 1 : field.find("]")])
else:
stripped_field = field
if stripped_field not in current:
raise AttributeError(
"While updating configuration"
" option {} is missing from"
" configuration at field {}".format(opt, stripped_field)
)
if isinstance(current[stripped_field], collections.abc.Mapping):
current = current[stripped_field]
elif (
isinstance(current[stripped_field], collections.abc.Sequence)
and array_index != -1
):
current_value = current[stripped_field][array_index]
# Case where array element to be updated is last element
if (
not isinstance(
current_value,
(collections.abc.Mapping, collections.abc.Sequence),
)
or idx == len(splits) - 1
):
logger.info(f"Overriding option {opt} to {value}")
current[stripped_field][array_index] = self._decode_value(value)
else:
# Otherwise move on down the chain
current = current_value
else:
if idx == len(splits) - 1:
logger.info(f"Overriding option {opt} to {value}")
current[stripped_field] = self._decode_value(value)
else:
raise AttributeError(
"While updating configuration",
"option {} is not present "
"after field {}".format(opt, stripped_field),
)
return config
def _decode_value(self, value):
# https://github.com/rbgirshick/yacs/blob/master/yacs/config.py#L400
if not isinstance(value, str):
return value
if value == "None":
value = None
try:
value = literal_eval(value)
except ValueError:
pass
except SyntaxError:
pass
return value
def freeze(self):
OmegaConf.set_struct(self.config, True)
def defrost(self):
OmegaConf.set_struct(self.config, False)
def _convert_to_dot_list(self, opts):
if opts is None:
opts = []
if len(opts) == 0:
return opts
# Support equal e.g. model=visual_bert for better future hydra support
has_equal = opts[0].find("=") != -1
if has_equal:
return opts
return [(opt + "=" + value) for opt, value in zip(opts[0::2], opts[1::2])]
def pretty_print(self):
if not self.config.training.log_detailed_config:
return
logger.info("===== Training Parameters =====")
logger.info(self._convert_node_to_json(self.config.training))
logger.info("====== Dataset Attributes ======")
datasets = self.config.datasets.split(",")
for dataset in datasets:
if dataset in self.config.dataset_config:
logger.info(f"======== {dataset} =======")
dataset_config = self.config.dataset_config[dataset]
logger.info(self._convert_node_to_json(dataset_config))
else:
logger.warning(f"No dataset named '{dataset}' in config. Skipping")
logger.info("====== Optimizer Attributes ======")
logger.info(self._convert_node_to_json(self.config.optimizer))
if self.config.model not in self.config.model_config:
raise ValueError(f"{self.config.model} not present in model attributes")
logger.info(f"====== Model ({self.config.model}) Attributes ======")
logger.info(
self._convert_node_to_json(self.config.model_config[self.config.model])
)
def _convert_node_to_json(self, node):
container = OmegaConf.to_container(node, resolve=True)
return json.dumps(container, indent=4, sort_keys=True)
def _update_specific(self, config):
# tp = self.config.training
# if args["seed"] is not None or tp['seed'] is not None:
# print(
# "You have chosen to seed the training. This will turn on CUDNN "
# "deterministic setting which can slow down your training "
# "considerably! You may see unexpected behavior when restarting "
# "from checkpoints."
# )
# if args["seed"] == -1:
# self.config["training"]["seed"] = random.randint(1, 1000000)
if config.learning_rate:
if "optimizer" in config and "params" in config.optimizer:
lr = config.learning_rate
config.optimizer.params.lr = lr
if not torch.cuda.is_available() and "cuda" in config.training.device:
warnings.warn(
"Device specified is 'cuda' but cuda is not present. "
+ "Switching to CPU version."
)
config.training.device = "cpu"
return config
def upgrade(self, config):
mapping = {
"training.resume_file": "checkpoint.resume_file",
"training.resume": "checkpoint.resume",
"training.resume_best": "checkpoint.resume_best",
"training.load_pretrained": "checkpoint.resume_pretrained",
"training.pretrained_state_mapping": "checkpoint.pretrained_state_mapping",
"training.run_type": "run_type",
}
for old, new in mapping.items():
value = OmegaConf.select(config, old)
if value:
OmegaConf.update(config, new, value)
# This is still here due to legacy reasons around
# older checkpoint loading from v0.3
class ConfigNode(collections.OrderedDict):
pass
| 33.232682
| 88
| 0.608872
|
bb675b6fe5dcf7303590ad8770bafbc6e5eb9640
| 890
|
py
|
Python
|
jivago/serialization/deserialization/optional_attribute_deserialization_strategy.py
|
keotl/jivago
|
892dfb0cae773e36245083c3e56f0f8523145523
|
[
"MIT"
] | 12
|
2018-03-19T20:57:44.000Z
|
2020-01-27T14:11:24.000Z
|
jivago/serialization/deserialization/optional_attribute_deserialization_strategy.py
|
keotl/jivago
|
892dfb0cae773e36245083c3e56f0f8523145523
|
[
"MIT"
] | 73
|
2018-04-20T22:26:00.000Z
|
2021-12-01T14:17:37.000Z
|
jivago/serialization/deserialization/optional_attribute_deserialization_strategy.py
|
keotl/jivago
|
892dfb0cae773e36245083c3e56f0f8523145523
|
[
"MIT"
] | 1
|
2019-02-28T13:33:45.000Z
|
2019-02-28T13:33:45.000Z
|
from typing import Optional
from jivago.inject import typing_meta_helper
from jivago.lang.annotations import Override
from jivago.lang.stream import Stream
from jivago.serialization.deserialization_strategy import DeserializationStrategy, T
class OptionalAttributeDeserializationStrategy(DeserializationStrategy):
def __init__(self, deserializer: "Deserializer"):
self.deserializer = deserializer
@Override
def can_handle_deserialization(self, declared_type: type) -> bool:
return typing_meta_helper.is_optional_typing_meta(declared_type)
@Override
def deserialize(self, obj, declared_type: Optional[T]) -> Optional[T]:
if obj is None:
return obj
non_nil_declared_type = Stream(declared_type.__args__).firstMatch(lambda x: x != type(None)).get()
return self.deserializer.deserialize(obj, non_nil_declared_type)
| 35.6
| 106
| 0.767416
|
5ffc6e6970226588921bcc9f381a6b15e6c0fa25
| 6,007
|
py
|
Python
|
sapphiregram/users/views.py
|
Sapphire317/Sapphiregram
|
dcc4de6161e78cb5c4830f4221db3f9ac2d04898
|
[
"MIT"
] | null | null | null |
sapphiregram/users/views.py
|
Sapphire317/Sapphiregram
|
dcc4de6161e78cb5c4830f4221db3f9ac2d04898
|
[
"MIT"
] | null | null | null |
sapphiregram/users/views.py
|
Sapphire317/Sapphiregram
|
dcc4de6161e78cb5c4830f4221db3f9ac2d04898
|
[
"MIT"
] | null | null | null |
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from . import models, serializers
from sapphiregram.notifications import views as notification_views
from allauth.socialaccount.providers.facebook.views import FacebookOAuth2Adapter
from rest_auth.registration.views import SocialLoginView
class ExploreUsers(APIView):
def get(self, request, format=None):
last_five = models.User.objects.all().order_by('-date_joined')[:5]
serializer = serializers.ListUserSerializer(last_five, many=True)
return Response(data=serializer.data, status = status.HTTP_200_OK)
class FollowUser(APIView):
def post(self, request, user_id, format=None):
#create Notifications for follow
user = request.user
try:
user_to_follow = models.User.objects.get(id=user_id)
except models.User.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
user.following.add(user_to_follow)
user.save()
notification_views.create_notification(user, user_to_follow,'follow')
return Response(status = status.HTTP_200_OK)
class UnFollowUser(APIView):
def post(self, request, user_id, format=None):
user = request.user
try:
user_to_follow = models.User.objects.get(id=user_id)
except models.User.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
user.following.remove(user_to_follow)
user.save()
return Response(status = status.HTTP_200_OK)
class UserProfile(APIView):
def get_user(self, username):
try:
found_user = models.User.objects.get(username=username)
return found_user
except models.User.DoesNotExist:
return None
def get(self, request, username, format=None):
found_user = self.get_user(username)
if found_user is None:
return Response(status=status.HTTP_404_NOT_FOUND)
serializer = serializers.UserProfileSerializer(found_user)
return Response(data = serializer.data, status = status.HTTP_200_OK)
def put(self, request, username, format=None):
user = request.user
found_user = self.get_user(username)
if found_user is None:
return Response(status=status.HTTP_404_NOT_FOUND)
elif found_user.username!=user.username:
return Response(status = status.HTTP_401_UNAUTHORIZED)
else:
serializer = serializers.UserProfileSerializer(found_user, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(data=serializer.data, status = status.HTTP_200_OK)
else:
return Response(data=serializer.data, status=status.HTTP_400_BAD_REQUEST)
class UserFollowers(APIView):
def get(self, request, username, format=None):
try:
found_user = models.User.objects.get(username = username)
except models.User.DoesNotExist:
return Response(status = status.HTTP_404_NOT_FOUND)
user_followers = found_user.followers.all()
serializer = serializers.ListUserSerializer(user_followers, many=True)
return Response(data=serializer.data, status = status.HTTP_200_OK)
class UserFollowing(APIView):
def get(self, request, username, format=None):
#create Notifications for like
try:
found_user = models.User.objects.get(username = username)
except models.User.DoesNotExist:
return Response(status = status.HTTP_404_NOT_FOUND)
user_following = found_user.following.all()
serializer = serializers.ListUserSerializer(user_following, many=True)
return Response(data=serializer.data, status = status.HTTP_200_OK)
class Search(APIView):
def get(self, request, format=None):
username = request.query_params.get('username', None)
if username is not None:
user = models.User.objects.filter(username__icontains=username)
serializer = serializers.ListUserSerializer(user, many=True)
return Response(data = serializer.data, status=status.HTTP_200_OK)
else:
return Response(status = status.HTTP_400_BAD_REQUEST)
def UserFollowingFBV(request, username):
if request.method == 'GET':
try:
found_user = models.User.objects.get(username = username)
except models.User.DoesNotExist:
return Response(status = status.HTTP_404_NOT_FOUND)
user_following = found_user.following.all()
serializer = serializers.ListUserSerializer(user_following, many=True)
return Response(data=serializer.data, status=status.HTTP_200_OK)
class ChangePassword(APIView) :
def put(self, request, username, format= None):
user = request.user
current_password = request.data.get('current_password', None)
if user.username == username:
if current_password is not None:
password_match = user.check_password(current_password)
if password_match :
new_password = request.data.get('new_password', None)
if new_password is not None:
user.set_password(new_password)
user.save()
return Response(status = status.HTTP_200_OK)
else:
return Response(status = status.HTTP_400_BAD_REQUEST)
else:
return Response(status = status.HTTP_400_BAD_REQUEST)
else:
return Response(status = status.HTTP_400_BAD_REQUEST)
else:
return Response(status = status.HTTP_401_UNAUTHORIZED)
class FacebookLogin(SocialLoginView):
adapter_class = FacebookOAuth2Adapter
| 28.070093
| 89
| 0.659897
|
cb20a4cd4d68ac67cce45932e54440311acc70f8
| 2,769
|
py
|
Python
|
gaphor/core/modeling/tests/test_presentation.py
|
MartinIIOT/gaphor
|
b08bf6ddb8c92ec87fccabc2ddee697609f73e67
|
[
"Apache-2.0"
] | null | null | null |
gaphor/core/modeling/tests/test_presentation.py
|
MartinIIOT/gaphor
|
b08bf6ddb8c92ec87fccabc2ddee697609f73e67
|
[
"Apache-2.0"
] | null | null | null |
gaphor/core/modeling/tests/test_presentation.py
|
MartinIIOT/gaphor
|
b08bf6ddb8c92ec87fccabc2ddee697609f73e67
|
[
"Apache-2.0"
] | null | null | null |
from gaphas.item import Item
from gaphor.core.eventmanager import event_handler
from gaphor.core.modeling.diagram import Diagram
from gaphor.core.modeling.event import ElementDeleted
from gaphor.core.modeling.presentation import Presentation
from gaphor.tests.raises import raises_exception_group
class Example(Presentation, Item):
pass
def test_presentation_implements_item_protocol(diagram):
presentation = diagram.create(Example)
assert isinstance(presentation, Item)
def test_presentation_should_have_a_diagram(diagram):
presentation = diagram.create(Example)
assert presentation.diagram is diagram
def test_should_emit_event_when_unlinked(diagram, event_manager):
presentation = diagram.create(Example)
events = []
@event_handler(ElementDeleted)
def handler(event):
events.append(event)
event_manager.subscribe(handler)
presentation.unlink()
assert events
assert events[0].diagram is diagram
assert events[0].element is presentation
def test_presentation_can_not_set_new_diagram(diagram, element_factory):
presentation = diagram.create(Example)
new_diagram = element_factory.create(Diagram)
with raises_exception_group(ValueError):
presentation.diagram = new_diagram
assert presentation.diagram is diagram
def test_matrix_i2c_updates_when_matrix_changes(diagram):
presentation = diagram.create(Example)
presentation.matrix.translate(1, 1)
assert tuple(presentation.matrix_i2c) == (1, 0, 0, 1, 1, 1)
def test_parent_matrix_updates(diagram):
parent = diagram.create(Example)
presentation = diagram.create(Example)
presentation.change_parent(parent)
parent.matrix.scale(2, 2)
assert tuple(presentation.matrix_i2c) == (2, 0, 0, 2, 0, 0)
def test_set_parent_updates_matrix_i2c(diagram):
parent = diagram.create(Example)
presentation = diagram.create(Example)
parent.matrix.scale(2, 2)
presentation.change_parent(parent)
assert tuple(presentation.matrix_i2c) == (1, 0, 0, 1, 0, 0)
def test_unset_parent_updates_matrix_i2c(diagram):
parent = diagram.create(Example)
presentation = diagram.create(Example)
parent.matrix.scale(2, 2)
presentation.change_parent(parent)
presentation.change_parent(None)
assert tuple(presentation.matrix_i2c) == (1, 0, 0, 1, 0, 0)
def test_change_parent_updates_matrix_i2c_and_keeps_coordinates(diagram):
parent = diagram.create(Example)
new_parent = diagram.create(Example)
presentation = diagram.create(Example)
parent.matrix.scale(2, 2)
new_parent.matrix.translate(2, 2)
presentation.change_parent(parent)
presentation.change_parent(new_parent)
assert tuple(presentation.matrix_i2c) == (1, 0, 0, 1, 0, 0)
| 26.883495
| 73
| 0.75623
|
ec6c8ac0abc2af034aec052aa3852abf902f6730
| 313
|
py
|
Python
|
Zadanie_1/Program.py
|
tamaranesterenko/Python_LR_1-2
|
af18beac35150041f57e8839e4728db35fd7ee1e
|
[
"MIT"
] | null | null | null |
Zadanie_1/Program.py
|
tamaranesterenko/Python_LR_1-2
|
af18beac35150041f57e8839e4728db35fd7ee1e
|
[
"MIT"
] | null | null | null |
Zadanie_1/Program.py
|
tamaranesterenko/Python_LR_1-2
|
af18beac35150041f57e8839e4728db35fd7ee1e
|
[
"MIT"
] | null | null | null |
# !/usr/bin/env python3
# -*- cosing: utf-8 -*-
import sys
if __name__ == "__main__":
x = input("Введите строку: ")
c = input("Введите символ, который нужно заменить: ")
h = input("Введите символ, на который заменить: ")
import fun1 as f
rep = f.fun1(h, c, x)
print(rep)
| 20.866667
| 58
| 0.571885
|
8069f90d4443287096f3263fdb214f2d378a7597
| 43,696
|
py
|
Python
|
src/sage/schemes/plane_conics/con_field.py
|
drvinceknight/sage
|
00199fb220aa173d8585b9e90654dafd3247d82d
|
[
"BSL-1.0"
] | 2
|
2015-08-11T05:05:47.000Z
|
2019-05-15T17:27:25.000Z
|
src/sage/schemes/plane_conics/con_field.py
|
kaushik94/sage
|
00199fb220aa173d8585b9e90654dafd3247d82d
|
[
"BSL-1.0"
] | null | null | null |
src/sage/schemes/plane_conics/con_field.py
|
kaushik94/sage
|
00199fb220aa173d8585b9e90654dafd3247d82d
|
[
"BSL-1.0"
] | 1
|
2020-07-24T11:56:55.000Z
|
2020-07-24T11:56:55.000Z
|
r"""
Projective plane conics over a field
AUTHORS:
- Marco Streng (2010-07-20)
- Nick Alexander (2008-01-08)
"""
#*****************************************************************************
# Copyright (C) 2008 Nick Alexander <ncalexander@gmail.com>
# Copyright (C) 2009/2010 Marco Streng <marco.streng@gmail.com>
#
# Distributed under the terms of the GNU General Public License (GPL)
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# The full text of the GPL is available at:
#
# http://www.gnu.org/licenses/
#*****************************************************************************
from sage.rings.all import PolynomialRing
from sage.rings.complex_field import is_ComplexField
from sage.rings.real_mpfr import is_RealField
from sage.modules.free_module_element import vector
from sage.structure.sequence import Sequence
from sage.structure.element import is_Vector
from sage.schemes.projective.projective_space import ProjectiveSpace
from sage.matrix.constructor import Matrix
from sage.matrix.matrix import is_Matrix
from sage.schemes.plane_curves.projective_curve import ProjectiveCurve_generic
from sage.categories.fields import Fields
_Fields = Fields()
class ProjectiveConic_field(ProjectiveCurve_generic):
r"""
Create a projective plane conic curve over a field.
See ``Conic`` for full documentation.
EXAMPLES::
sage: K = FractionField(PolynomialRing(QQ, 't'))
sage: P.<X, Y, Z> = K[]
sage: Conic(X^2 + Y^2 - Z^2)
Projective Conic Curve over Fraction Field of Univariate Polynomial Ring in t over Rational Field defined by X^2 + Y^2 - Z^2
TESTS::
sage: K = FractionField(PolynomialRing(QQ, 't'))
sage: Conic([K(1), 1, -1])._test_pickling()
"""
def __init__(self, A, f):
r"""
See ``Conic`` for full documentation.
EXAMPLES:
::
sage: c = Conic([1, 1, 1]); c
Projective Conic Curve over Rational Field defined by x^2 + y^2 + z^2
"""
ProjectiveCurve_generic.__init__(self, A, f)
self._coefficients = [f[(2,0,0)], f[(1,1,0)], f[(1,0,1)],
f[(0,2,0)], f[(0,1,1)], f[(0,0,2)]]
self._parametrization = None
self._diagonal_matrix = None
self._rational_point = None
def _repr_type(self):
r"""
Returns ``'Projective Conic'``, which is the first part of the
plain text representation of this object as output by
the function ``_repr_`` of the class ``Curve_generic``.
EXAMPLES::
sage: c = Conic([1, 1, 1]); c
Projective Conic Curve over Rational Field defined by x^2 + y^2 + z^2
sage: c._repr_()
'Projective Conic Curve over Rational Field defined by x^2 + y^2 + z^2'
sage: c._repr_type()
'Projective Conic'
"""
return "Projective Conic"
def base_extend(self, S):
r"""
Returns the conic over ``S`` given by the same equation as ``self``.
EXAMPLES::
sage: c = Conic([1, 1, 1]); c
Projective Conic Curve over Rational Field defined by x^2 + y^2 + z^2
sage: c.has_rational_point()
False
sage: d = c.base_extend(QuadraticField(-1, 'i')); d
Projective Conic Curve over Number Field in i with defining polynomial x^2 + 1 defined by x^2 + y^2 + z^2
sage: d.rational_point(algorithm = 'rnfisnorm')
(i : 1 : 0)
"""
if S in _Fields:
B = self.base_ring()
if B == S:
return self
if not S.has_coerce_map_from(B):
raise ValueError("No natural map from the base ring of self " \
"(= %s) to S (= %s)" % (self, S))
from constructor import Conic
con = Conic([S(c) for c in self.coefficients()], \
self.variable_names())
if self._rational_point is not None:
pt = [S(c) for c in Sequence(self._rational_point)]
if not pt == [0,0,0]:
# The following line stores the point in the cache
# if (and only if) there is no point in the cache.
pt = con.point(pt)
return con
return ProjectiveCurve_generic.base_extend(self, S)
def cache_point(self, p):
r"""
Replace the point in the cache of ``self`` by ``p`` for use
by ``self.rational_point()`` and ``self.parametrization()``.
EXAMPLES::
sage: c = Conic([1, -1, 1])
sage: c.point([15, 17, 8])
(15/8 : 17/8 : 1)
sage: c.rational_point()
(15/8 : 17/8 : 1)
sage: c.cache_point(c.rational_point(read_cache = False))
sage: c.rational_point()
(-1 : 1 : 0)
"""
if isinstance(p, (tuple, list)):
p = self.point(p)
self._rational_point = p
def coefficients(self):
r"""
Gives a the `6` coefficients of the conic ``self``
in lexicographic order.
EXAMPLES::
sage: Conic(QQ, [1,2,3,4,5,6]).coefficients()
[1, 2, 3, 4, 5, 6]
sage: P.<x,y,z> = GF(13)[]
sage: a = Conic(x^2+5*x*y+y^2+z^2).coefficients(); a
[1, 5, 0, 1, 0, 1]
sage: Conic(a)
Projective Conic Curve over Finite Field of size 13 defined by x^2 + 5*x*y + y^2 + z^2
"""
return self._coefficients
def derivative_matrix(self):
r"""
Gives the derivative of the defining polynomial of
the conic ``self``, which is a linear map,
as a `3 \times 3` matrix.
EXAMPLES:
In characteristic different from `2`, the
derivative matrix is twice the symmetric matrix:
::
sage: c = Conic(QQ, [1,1,1,1,1,0])
sage: c.symmetric_matrix()
[ 1 1/2 1/2]
[1/2 1 1/2]
[1/2 1/2 0]
sage: c.derivative_matrix()
[2 1 1]
[1 2 1]
[1 1 0]
An example in characteristic `2`:
::
sage: P.<t> = GF(2)[]
sage: c = Conic([t, 1, t^2, 1, 1, 0]); c
Projective Conic Curve over Fraction Field of Univariate Polynomial Ring in t over Finite Field of size 2 (using NTL) defined by t*x^2 + x*y + y^2 + t^2*x*z + y*z
sage: c.is_smooth()
True
sage: c.derivative_matrix()
[ 0 1 t^2]
[ 1 0 1]
[t^2 1 0]
"""
from sage.matrix.constructor import matrix
[a,b,c,d,e,f] = self.coefficients()
return matrix([[ 2*a , b , c ],
[ b , 2*d , e ],
[ c , e , 2*f ]])
def determinant(self):
r"""
Returns the determinant of the symmetric matrix that defines
the conic ``self``.
This is defined only if the base field has characteristic
different from `2`.
EXAMPLES:
::
sage: C = Conic([1,2,3,4,5,6])
sage: C.determinant()
41/4
sage: C.symmetric_matrix().determinant()
41/4
Determinants are only defined in characteristic different from `2`::
sage: C = Conic(GF(2), [1, 1, 1, 1, 1, 0])
sage: C.is_smooth()
True
sage: C.determinant()
Traceback (most recent call last):
...
ValueError: The conic self (= Projective Conic Curve over Finite Field of size 2 defined by x^2 + x*y + y^2 + x*z + y*z) has no symmetric matrix because the base field has characteristic 2
"""
return self.symmetric_matrix().determinant()
def diagonal_matrix(self):
r"""
Returns a diagonal matrix `D` and a matrix `T` such that `T^t A T = D`
holds, where `(x, y, z) A (x, y, z)^t` is the defining polynomial
of the conic ``self``.
EXAMPLES:
::
sage: c = Conic(QQ, [1,2,3,4,5,6])
sage: d, t = c.diagonal_matrix(); d, t
(
[ 1 0 0] [ 1 -1 -7/6]
[ 0 3 0] [ 0 1 -1/3]
[ 0 0 41/12], [ 0 0 1]
)
sage: t.transpose()*c.symmetric_matrix()*t
[ 1 0 0]
[ 0 3 0]
[ 0 0 41/12]
Diagonal matrices are only defined in characteristic different
from `2`:
::
sage: c = Conic(GF(4, 'a'), [0, 1, 1, 1, 1, 1])
sage: c.is_smooth()
True
sage: c.diagonal_matrix()
Traceback (most recent call last):
...
ValueError: The conic self (= Projective Conic Curve over Finite Field in a of size 2^2 defined by x*y + y^2 + x*z + y*z + z^2) has no symmetric matrix because the base field has characteristic 2
"""
A = self.symmetric_matrix()
B = self.base_ring()
basis = [vector(B,{2:0,i:1}) for i in range(3)]
for i in range(3):
zerovalue = (basis[i]*A*basis[i].column()== 0)
if zerovalue:
for j in range(i+1,3):
if basis[j]*A*basis[j].column() != 0:
b = basis[i]
basis[i] = basis[j]
basis[j] = b
zerovalue = False
if zerovalue:
for j in range(i+1,3):
if basis[i]*A*basis[j].column() != 0:
basis[i] = basis[i]+basis[j]
zerovalue = False
if not zerovalue:
l = (basis[i]*A*basis[i].column())
for j in range(i+1,3):
basis[j] = basis[j] - \
(basis[i]*A*basis[j].column())/l * basis[i]
T = Matrix(basis).transpose()
return T.transpose()*A*T, T
def diagonalization(self,names = None):
r"""
Returns a diagonal conic `C`, an isomorphism of schemes `M: C` -> ``self``
and the inverse `N` of `M`.
EXAMPLES::
sage: Conic(GF(5), [1,0,1,1,0,1]).diagonalization()
(Projective Conic Curve over Finite Field of size 5 defined by x^2 + y^2 + 2*z^2,
Scheme morphism:
From: Projective Conic Curve over Finite Field of size 5 defined by x^2 + y^2 + 2*z^2
To: Projective Conic Curve over Finite Field of size 5 defined by x^2 + y^2 + x*z + z^2
Defn: Defined on coordinates by sending (x : y : z) to
(x + 2*z : y : z),
Scheme morphism:
From: Projective Conic Curve over Finite Field of size 5 defined by x^2 + y^2 + x*z + z^2
To: Projective Conic Curve over Finite Field of size 5 defined by x^2 + y^2 + 2*z^2
Defn: Defined on coordinates by sending (x : y : z) to
(x - 2*z : y : z))
The diagonalization is only defined in characteristic different
from 2:
::
sage: Conic(GF(2), [1,1,1,1,1,0]).diagonalization()
Traceback (most recent call last):
...
ValueError: The conic self (= Projective Conic Curve over Finite Field of size 2 defined by x^2 + x*y + y^2 + x*z + y*z) has no symmetric matrix because the base field has characteristic 2
"""
if names is None:
names = self.defining_polynomial().parent().variable_names()
from constructor import Conic
D, T = self.diagonal_matrix()
con = Conic(D, names = names)
return con, con.hom(T, self), self.hom(T.inverse(), con)
def gens(self):
r"""
Returns the generators of the coordinate ring of ``self``.
EXAMPLES:
::
sage: P.<x,y,z> = QQ[]
sage: c = Conic(x^2+y^2+z^2)
sage: c.gens()
(xbar, ybar, zbar)
sage: c.defining_polynomial()(c.gens())
0
The function ``gens()`` is required for the following construction:
::
sage: C.<a,b,c> = Conic(GF(3), [1, 1, 1])
sage: C
Projective Conic Curve over Finite Field of size 3 defined by a^2 + b^2 + c^2
"""
return self.coordinate_ring().gens()
def has_rational_point(self, point = False,
algorithm = 'default', read_cache = True):
r"""
Returns True if and only if the conic ``self``
has a point over its base field `B`.
If ``point`` is True, then returns a second output, which is
a rational point if one exists.
Points are cached whenever they are found. Cached information
is used if and only if ``read_cache`` is True.
ALGORITHM:
The parameter ``algorithm`` specifies the algorithm
to be used:
- ``'default'`` -- If the base field is real or complex,
use an elementary native Sage implementation.
- ``'magma'`` (requires Magma to be installed) --
delegates the task to the Magma computer algebra
system.
EXAMPLES:
sage: Conic(RR, [1, 1, 1]).has_rational_point()
False
sage: Conic(CC, [1, 1, 1]).has_rational_point()
True
sage: Conic(RR, [1, 2, -3]).has_rational_point(point = True)
(True, (1.73205080756888 : 0.000000000000000 : 1.00000000000000))
Conics over polynomial rings can not be solved yet without Magma::
sage: R.<t> = QQ[]
sage: C = Conic([-2,t^2+1,t^2-1])
sage: C.has_rational_point()
Traceback (most recent call last):
...
NotImplementedError: has_rational_point not implemented for conics over base field Fraction Field of Univariate Polynomial Ring in t over Rational Field
But they can be solved with Magma::
sage: C.has_rational_point(algorithm='magma') # optional - magma
True
sage: C.has_rational_point(algorithm='magma', point=True) # optional - magma
(True, (t : 1 : 1))
sage: D = Conic([t,1,t^2])
sage: D.has_rational_point(algorithm='magma') # optional - magma
False
TESTS:
One of the following fields comes with an embedding into the complex
numbers, one does not. Check that they are both handled correctly by
the Magma interface. ::
sage: K.<i> = QuadraticField(-1)
sage: K.coerce_embedding()
Generic morphism:
From: Number Field in i with defining polynomial x^2 + 1
To: Complex Lazy Field
Defn: i -> 1*I
sage: Conic(K, [1,1,1]).rational_point(algorithm='magma') # optional - magma
(-i : 1 : 0)
sage: x = QQ['x'].gen()
sage: L.<i> = NumberField(x^2+1, embedding=None)
sage: Conic(L, [1,1,1]).rational_point(algorithm='magma') # optional - magma
(-i : 1 : 0)
sage: L == K
False
"""
if read_cache:
if self._rational_point is not None:
if point:
return True, self._rational_point
else:
return True
B = self.base_ring()
if algorithm == 'magma':
from sage.interfaces.magma import magma
M = magma(self)
b = M.HasRationalPoint().sage()
if not point:
return b
if not b:
return False, None
M_pt = M.HasRationalPoint(nvals=2)[1]
# Various attempts will be made to convert `pt` to
# a Sage object. The end result will always be checked
# by self.point().
pt = [M_pt[1], M_pt[2], M_pt[3]]
# The first attempt is to use sequences. This is efficient and
# succeeds in cases where the Magma interface fails to convert
# number field elements, because embeddings between number fields
# may be lost on conversion to and from Magma.
# This should deal with all absolute number fields.
try:
return True, self.point([B(c.Eltseq().sage()) for c in pt])
except TypeError:
pass
# The second attempt tries to split Magma elements into
# numerators and denominators first. This is neccessary
# for the field of rational functions, because (at the moment of
# writing) fraction field elements are not converted automatically
# from Magma to Sage.
try:
return True, self.point( \
[B(c.Numerator().sage()/c.Denominator().sage()) for c in pt])
except (TypeError, NameError):
pass
# Finally, let the Magma interface handle conversion.
try:
return True, self.point([B(c.sage()) for c in pt])
except (TypeError, NameError):
pass
raise NotImplementedError("No correct conversion implemented for converting the Magma point %s on %s to a correct Sage point on self (=%s)" % (M_pt, M, self))
if algorithm != 'default':
raise ValueError("Unknown algorithm: %s" % algorithm)
if is_ComplexField(B):
if point:
[_,_,_,d,e,f] = self._coefficients
if d == 0:
return True, self.point([0,1,0])
return True, self.point([0, ((e**2-4*d*f).sqrt()-e)/(2*d), 1],
check = False)
return True
if is_RealField(B):
D, T = self.diagonal_matrix()
[a, b, c] = [D[0,0], D[1,1], D[2,2]]
if a == 0:
ret = True, self.point(T*vector([1,0,0]), check = False)
elif a*c <= 0:
ret = True, self.point(T*vector([(-c/a).sqrt(),0,1]),
check = False)
elif b == 0:
ret = True, self.point(T*vector([0,1,0]), check = False)
elif b*c <= 0:
ret = True, self.point(T*vector([0,(-c/b).sqrt(),0,1]),
check = False)
else:
ret = False, None
if point:
return ret
return ret[0]
raise NotImplementedError("has_rational_point not implemented for " \
"conics over base field %s" % B)
def has_singular_point(self, point = False):
r"""
Return True if and only if the conic ``self`` has a rational
singular point.
If ``point`` is True, then also return a rational singular
point (or ``None`` if no such point exists).
EXAMPLES:
::
sage: c = Conic(QQ, [1,0,1]); c
Projective Conic Curve over Rational Field defined by x^2 + z^2
sage: c.has_singular_point(point = True)
(True, (0 : 1 : 0))
sage: P.<x,y,z> = GF(7)[]
sage: e = Conic((x+y+z)*(x-y+2*z)); e
Projective Conic Curve over Finite Field of size 7 defined by x^2 - y^2 + 3*x*z + y*z + 2*z^2
sage: e.has_singular_point(point = True)
(True, (2 : 4 : 1))
sage: Conic([1, 1, -1]).has_singular_point()
False
sage: Conic([1, 1, -1]).has_singular_point(point = True)
(False, None)
``has_singular_point`` is not implemented over all fields
of characteristic `2`. It is implemented over finite fields.
::
sage: F.<a> = FiniteField(8)
sage: Conic([a, a+1, 1]).has_singular_point(point = True)
(True, (a + 1 : 0 : 1))
sage: P.<t> = GF(2)[]
sage: C = Conic(P, [t,t,1]); C
Projective Conic Curve over Fraction Field of Univariate Polynomial Ring in t over Finite Field of size 2 (using NTL) defined by t*x^2 + t*y^2 + z^2
sage: C.has_singular_point(point = False)
Traceback (most recent call last):
...
NotImplementedError: Sorry, find singular point on conics not implemented over all fields of characteristic 2.
"""
if not point:
ret = self.has_singular_point(point = True)
return ret[0]
B = self.base_ring()
if B.characteristic() == 2:
[a,b,c,d,e,f] = self.coefficients()
if b == 0 and c == 0 and e == 0:
for i in range(3):
if [a, d, f][i] == 0:
return True, self.point(vector(B, {2:0, i:1}))
if hasattr(a/f, 'is_square') and hasattr(a/f, 'sqrt'):
if (a/f).is_square():
return True, self.point([1,0,(a/f).sqrt()])
if (d/f).is_square():
return True, self.point([0,1,(d/f).sqrt()])
raise NotImplementedError("Sorry, find singular point on conics not implemented over all fields of characteristic 2.")
pt = [e, c, b]
if self.defining_polynomial()(pt) == 0:
return True, self.point(pt)
return False, None
D = self.symmetric_matrix()
if D.determinant() == 0:
return True, self.point(Sequence(D.right_kernel().gen()))
return False, None
def hom(self, x, Y=None):
r"""
Return the scheme morphism from ``self`` to ``Y`` defined by ``x``.
Here ``x`` can be a matrix or a sequence of polynomials.
If ``Y`` is omitted, then a natural image is found if possible.
EXAMPLES:
Here are a few Morphisms given by matrices. In the first
example, ``Y`` is omitted, in the second example, ``Y`` is specified.
::
sage: c = Conic([-1, 1, 1])
sage: h = c.hom(Matrix([[1,1,0],[0,1,0],[0,0,1]])); h
Scheme morphism:
From: Projective Conic Curve over Rational Field defined by -x^2 + y^2 + z^2
To: Projective Conic Curve over Rational Field defined by -x^2 + 2*x*y + z^2
Defn: Defined on coordinates by sending (x : y : z) to
(x + y : y : z)
sage: h([-1, 1, 0])
(0 : 1 : 0)
sage: c = Conic([-1, 1, 1])
sage: d = Conic([4, 1, -1])
sage: c.hom(Matrix([[0, 0, 1/2], [0, 1, 0], [1, 0, 0]]), d)
Scheme morphism:
From: Projective Conic Curve over Rational Field defined by -x^2 + y^2 + z^2
To: Projective Conic Curve over Rational Field defined by 4*x^2 + y^2 - z^2
Defn: Defined on coordinates by sending (x : y : z) to
(1/2*z : y : x)
``ValueError`` is raised if the wrong codomain ``Y`` is specified:
::
sage: c = Conic([-1, 1, 1])
sage: c.hom(Matrix([[0, 0, 1/2], [0, 1, 0], [1, 0, 0]]), c)
Traceback (most recent call last):
...
ValueError: The matrix x (= [ 0 0 1/2]
[ 0 1 0]
[ 1 0 0]) does not define a map from self (= Projective Conic Curve over Rational Field defined by -x^2 + y^2 + z^2) to Y (= Projective Conic Curve over Rational Field defined by -x^2 + y^2 + z^2)
"""
if is_Matrix(x):
from constructor import Conic
y = x.inverse()
A = y.transpose()*self.matrix()*y
im = Conic(A)
if Y is None:
Y = im
else:
q = Y.defining_polynomial()/im.defining_polynomial()
if not (q.numerator().is_constant()
and q.denominator().is_constant()):
raise ValueError("The matrix x (= %s) does not define a " \
"map from self (= %s) to Y (= %s)" % \
(x, self, Y))
x = Sequence(x*vector(self.ambient_space().gens()))
return self.Hom(Y)(x, check = False)
return ProjectiveCurve_generic.hom(self, x, Y)
def is_diagonal(self):
r"""
Return True if and only if the conic has the form
`a*x^2 + b*y^2 + c*z^2`.
EXAMPLES:
::
sage: c=Conic([1,1,0,1,0,1]); c
Projective Conic Curve over Rational Field defined by x^2 + x*y + y^2 + z^2
sage: d,t = c.diagonal_matrix()
sage: c.is_diagonal()
False
sage: c.diagonalization()[0].is_diagonal()
True
"""
return all([self.coefficients()[i] == 0 for i in [1,2,4]])
def is_smooth(self):
r"""
Returns True if and only if ``self`` is smooth.
EXAMPLES:
::
sage: Conic([1,-1,0]).is_smooth()
False
sage: Conic(GF(2),[1,1,1,1,1,0]).is_smooth()
True
"""
if self.base_ring().characteristic() == 2:
[a,b,c,d,e,f] = self.coefficients()
if b == 0 and c == 0 and e == 0:
return False
return self.defining_polynomial()([e, c, b]) != 0
return self.determinant() != 0
def _magma_init_(self, magma):
"""
Internal function. Returns a string to initialize this
conic in the Magma subsystem.
EXAMPLES::
sage: C = Conic(QQ, [1,2,3])
sage: C._magma_init_(magma) # optional - magma
'Conic([_sage_ref...|1/1,2/1,3/1,0/1,0/1,0/1])'
sage: C = Conic(GF(41), [-1,2,5]) # optional - magma
sage: C._magma_init_(magma) # optional - magma
'Conic([_sage_ref...|GF(41)!40,GF(41)!2,GF(41)!5,GF(41)!0,GF(41)!0,GF(41)!0])'
sage: F.<a> = GF(25)
sage: C = Conic([3,0,1,4,a,2])
sage: C
Projective Conic Curve over Finite Field in a of size 5^2 defined by -2*x^2 - y^2 + x*z + (a)*y*z + 2*z^2
sage: magma(C) # optional - magma
Conic over GF(5^2) defined by
3*X^2 + 4*Y^2 + X*Z + a*Y*Z + 2*Z^2
sage: magma(Conic([1/2,2/3,-4/5,6/7,8/9,-10/11])) # optional - magma
Conic over Rational Field defined by
1/2*X^2 + 2/3*X*Y + 6/7*Y^2 - 4/5*X*Z + 8/9*Y*Z - 10/11*Z^2
sage: R.<x> = Frac(QQ['x'])
sage: magma(Conic([x,1+x,1-x])) # optional - magma
Conic over Univariate rational function field over Rational Field defined by
x*X^2 + (x + 1)*Y^2 + (-x + 1)*Z^2
sage: P.<x> = QQ[]
sage: K.<b> = NumberField(x^3+x+1)
sage: magma(Conic([b,1,2])) # optional - magma
Conic over Number Field with defining polynomial x^3 + x + 1 over the Rational Field defined by
b*X^2 + Y^2 + 2*Z^2
"""
kmn = magma(self.base_ring())._ref()
coeffs = self.coefficients()
magma_coeffs = [coeffs[i]._magma_init_(magma) for i in [0, 3, 5, 1, 4, 2]]
return 'Conic([%s|%s])' % (kmn,','.join(magma_coeffs))
def matrix(self):
r"""
Returns a matrix `M` such that `(x, y, z) M (x, y, z)^t`
is the defining equation of ``self``.
The matrix `M` is upper triangular if the base field has
characteristic `2` and symmetric otherwise.
EXAMPLES::
sage: R.<x, y, z> = QQ[]
sage: C = Conic(x^2 + x*y + y^2 + z^2)
sage: C.matrix()
[ 1 1/2 0]
[1/2 1 0]
[ 0 0 1]
sage: R.<x, y, z> = GF(2)[]
sage: C = Conic(x^2 + x*y + y^2 + x*z + z^2)
sage: C.matrix()
[1 1 1]
[0 1 0]
[0 0 1]
"""
if self.base_ring().characteristic() == 2:
return self.upper_triangular_matrix()
return self.symmetric_matrix()
_matrix_ = matrix
def parametrization(self, point=None, morphism=True):
r"""
Return a parametrization `f` of ``self`` together with the
inverse of `f`.
If ``point`` is specified, then that point is used
for the parametrization. Otherwise, use ``self.rational_point()``
to find a point.
If ``morphism`` is True, then `f` is returned in the form
of a Scheme morphism. Otherwise, it is a tuple of polynomials
that gives the parametrization.
EXAMPLES:
An example over a finite field ::
sage: c = Conic(GF(2), [1,1,1,1,1,0])
sage: c.parametrization()
(Scheme morphism:
From: Projective Space of dimension 1 over Finite Field of size 2
To: Projective Conic Curve over Finite Field of size 2 defined by x^2 + x*y
+ y^2 + x*z + y*z
Defn: Defined on coordinates by sending (x : y) to
(x*y + y^2 : x^2 + x*y : x^2 + x*y + y^2),
Scheme morphism:
From: Projective Conic Curve over Finite Field of size 2 defined by x^2 + x*y
+ y^2 + x*z + y*z
To: Projective Space of dimension 1 over Finite Field of size 2
Defn: Defined on coordinates by sending (x : y : z) to
(y : x))
An example with ``morphism = False`` ::
sage: R.<x,y,z> = QQ[]
sage: C = Curve(7*x^2 + 2*y*z + z^2)
sage: (p, i) = C.parametrization(morphism = False); (p, i)
([-2*x*y, 7*x^2 + y^2, -2*y^2], [-1/2*x, -1/2*z])
sage: C.defining_polynomial()(p)
0
sage: i[0](p) / i[1](p)
x/y
A ``ValueError`` is raised if ``self`` has no rational point ::
sage: C = Conic(x^2 + y^2 + 7*z^2)
sage: C.parametrization()
Traceback (most recent call last):
...
ValueError: Conic Projective Conic Curve over Rational Field defined by x^2 + y^2 + 7*z^2 has no rational points over Rational Field!
A ``ValueError`` is raised if ``self`` is not smooth ::
sage: C = Conic(x^2 + y^2)
sage: C.parametrization()
Traceback (most recent call last):
...
ValueError: The conic self (=Projective Conic Curve over Rational Field defined by x^2 + y^2) is not smooth, hence does not have a parametrization.
"""
if (not self._parametrization is None) and not point:
par = self._parametrization
else:
if not self.is_smooth():
raise ValueError("The conic self (=%s) is not smooth, hence does not have a parametrization." % self)
if point is None:
point = self.rational_point()
point = Sequence(point)
B = self.base_ring()
Q = PolynomialRing(B, 'x,y')
[x, y] = Q.gens()
gens = self.ambient_space().gens()
P = PolynomialRing(B, 4, ['X', 'Y', 'T0', 'T1'])
[X, Y, T0, T1] = P.gens()
c3 = [j for j in range(2,-1,-1) if point[j] != 0][0]
c1 = [j for j in range(3) if j != c3][0]
c2 = [j for j in range(3) if j != c3 and j != c1][0]
L = [0,0,0]
L[c1] = Y*T1*point[c1] + Y*T0
L[c2] = Y*T1*point[c2] + X*T0
L[c3] = Y*T1*point[c3]
bezout = P(self.defining_polynomial()(L) / T0)
t = [bezout([x,y,0,-1]),bezout([x,y,1,0])]
par = (tuple([Q(p([x,y,t[0],t[1]])/y) for p in L]),
tuple([gens[m]*point[c3]-gens[c3]*point[m]
for m in [c2,c1]]))
if self._parametrization is None:
self._parametrization = par
if not morphism:
return par
P1 = ProjectiveSpace(self.base_ring(), 1, 'x,y')
return P1.hom(par[0],self), self.Hom(P1)(par[1], check = False)
def point(self, v, check=True):
r"""
Constructs a point on ``self`` corresponding to the input ``v``.
If ``check`` is True, then checks if ``v`` defines a valid
point on ``self``.
If no rational point on ``self`` is known yet, then also caches the point
for use by ``self.rational_point()`` and ``self.parametrization()``.
EXAMPLES ::
sage: c = Conic([1, -1, 1])
sage: c.point([15, 17, 8])
(15/8 : 17/8 : 1)
sage: c.rational_point()
(15/8 : 17/8 : 1)
sage: d = Conic([1, -1, 1])
sage: d.rational_point()
(-1 : 1 : 0)
"""
if is_Vector(v):
v = Sequence(v)
p = ProjectiveCurve_generic.point(self, v, check=check)
if self._rational_point is None:
self._rational_point = p
return p
def random_rational_point(self, *args1, **args2):
r"""
Return a random rational point of the conic ``self``.
ALGORITHM:
1. Compute a parametrization `f` of ``self`` using
``self.parametrization()``.
2. Computes a random point `(x:y)` on the projective
line.
3. Output `f(x:y)`.
The coordinates x and y are computed using
``B.random_element``, where ``B`` is the base field of
``self`` and additional arguments to ``random_rational_point``
are passed to ``random_element``.
If the base field is a finite field, then the
output is uniformly distributed over the points of self.
EXAMPLES ::
sage: c = Conic(GF(2), [1,1,1,1,1,0])
sage: [c.random_rational_point() for i in range(10)] # output is random
[(1 : 0 : 1), (1 : 0 : 1), (1 : 0 : 1), (0 : 1 : 1), (1 : 0 : 1), (0 : 0 : 1), (1 : 0 : 1), (1 : 0 : 1), (0 : 0 : 1), (1 : 0 : 1)]
sage: d = Conic(QQ, [1, 1, -1])
sage: d.random_rational_point(den_bound = 1, num_bound = 5) # output is random
(-24/25 : 7/25 : 1)
sage: Conic(QQ, [1, 1, 1]).random_rational_point()
Traceback (most recent call last):
...
ValueError: Conic Projective Conic Curve over Rational Field defined by x^2 + y^2 + z^2 has no rational points over Rational Field!
"""
if not self.is_smooth():
raise NotImplementedError("Sorry, random points not implemented " \
"for non-smooth conics")
par = self.parametrization()
x = 0
y = 0
B = self.base_ring()
while x == 0 and y == 0:
x = B.random_element(*args1, **args2)
y = B.random_element(*args1, **args2)
return par[0]([x,y])
def rational_point(self, algorithm = 'default', read_cache = True):
r"""
Return a point on ``self`` defined over the base field.
Raises ``ValueError`` if no rational point exists.
See ``self.has_rational_point`` for the algorithm used
and for the use of the parameters ``algorithm`` and ``read_cache``.
EXAMPLES:
Examples over `\QQ` ::
sage: R.<x,y,z> = QQ[]
sage: C = Conic(7*x^2 + 2*y*z + z^2)
sage: C.rational_point()
(0 : 1 : 0)
sage: C = Conic(x^2 + 2*y^2 + z^2)
sage: C.rational_point()
Traceback (most recent call last):
...
ValueError: Conic Projective Conic Curve over Rational Field defined by x^2 + 2*y^2 + z^2 has no rational points over Rational Field!
sage: C = Conic(x^2 + y^2 + 7*z^2)
sage: C.rational_point(algorithm = 'rnfisnorm')
Traceback (most recent call last):
...
ValueError: Conic Projective Conic Curve over Rational Field defined by x^2 + y^2 + 7*z^2 has no rational points over Rational Field!
Examples over number fields ::
sage: P.<x> = QQ[]
sage: L.<b> = NumberField(x^3-5)
sage: C = Conic(L, [3, 2, -5])
sage: p = C.rational_point(algorithm = 'rnfisnorm')
sage: p # output is random
(60*b^2 - 196*b + 161 : -120*b^2 - 6*b + 361 : 1)
sage: C.defining_polynomial()(list(p))
0
sage: K.<i> = QuadraticField(-1)
sage: D = Conic(K, [3, 2, 5])
sage: D.rational_point(algorithm = 'rnfisnorm') # output is random
(-3 : 4*i : 1)
sage: L.<s> = QuadraticField(2)
sage: Conic(QQ, [1, 1, -3]).has_rational_point()
False
sage: E = Conic(L, [1, 1, -3])
sage: E.rational_point() # output is random
(-1 : -s : 1)
Currently Magma is better at solving conics over number fields than
Sage, so it helps to use the algorithm 'magma' if Magma is installed::
sage: q = C.rational_point(algorithm = 'magma', read_cache=False) # optional - magma
sage: q # output is random, optional - magma
(-1 : -1 : 1)
sage: C.defining_polynomial()(list(p)) # optional - magma
0
sage: len(str(p)) / len(str(q)) > 2 # optional - magma
True
sage: D.rational_point(algorithm = 'magma', read_cache=False) # random, optional - magma
(1 : 2*i : 1)
sage: E.rational_point(algorithm='magma', read_cache=False) # random, optional - magma
(-s : 1 : 1)
sage: F = Conic([L.gen(), 30, -20])
sage: q = F.rational_point(algorithm='magma') # optional - magma
sage: q # output is random, optional - magma
(-10/7*s + 40/7 : 5/7*s - 6/7 : 1)
sage: p = F.rational_point(read_cache=False)
sage: p # output is random
(788210*s - 1114700 : -171135*s + 242022 : 1)
sage: len(str(p)) > len(str(q)) # optional - magma
True
sage: Conic([L.gen(), 30, -21]).has_rational_point(algorithm='magma') # optional - magma
False
Examples over finite fields ::
sage: F.<a> = FiniteField(7^20)
sage: C = Conic([1, a, -5]); C
Projective Conic Curve over Finite Field in a of size 7^20 defined by x^2 + (a)*y^2 + 2*z^2
sage: C.rational_point() # output is random
(4*a^19 + 5*a^18 + 4*a^17 + a^16 + 6*a^15 + 3*a^13 + 6*a^11 + a^9 + 3*a^8 + 2*a^7 + 4*a^6 + 3*a^5 + 3*a^4 + a^3 + a + 6 : 5*a^18 + a^17 + a^16 + 6*a^15 + 4*a^14 + a^13 + 5*a^12 + 5*a^10 + 2*a^9 + 6*a^8 + 6*a^7 + 6*a^6 + 2*a^4 + 3 : 1)
Examples over `\RR` and `\CC` ::
sage: Conic(CC, [1, 2, 3]).rational_point()
(0 : 1.22474487139159*I : 1)
sage: Conic(RR, [1, 1, 1]).rational_point()
Traceback (most recent call last):
...
ValueError: Conic Projective Conic Curve over Real Field with 53 bits of precision defined by x^2 + y^2 + z^2 has no rational points over Real Field with 53 bits of precision!
"""
bl,pt = self.has_rational_point(point = True, algorithm = algorithm,
read_cache = read_cache)
if bl:
return pt
raise ValueError("Conic %s has no rational points over %s!" % \
(self, self.ambient_space().base_ring()))
def singular_point(self):
r"""
Returns a singular rational point of ``self``
EXAMPLES:
::
sage: Conic(GF(2), [1,1,1,1,1,1]).singular_point()
(1 : 1 : 1)
``ValueError`` is raised if the conic has no rational singular point
::
sage: Conic(QQ, [1,1,1,1,1,1]).singular_point()
Traceback (most recent call last):
...
ValueError: The conic self (= Projective Conic Curve over Rational Field defined by x^2 + x*y + y^2 + x*z + y*z + z^2) has no rational singular point
"""
b = self.has_singular_point(point = True)
if not b[0]:
raise ValueError("The conic self (= %s) has no rational " \
"singular point" % self)
return b[1]
def symmetric_matrix(self):
r"""
The symmetric matrix `M` such that `(x y z) M (x y z)^t`
is the defining equation of ``self``.
EXAMPLES ::
sage: R.<x, y, z> = QQ[]
sage: C = Conic(x^2 + x*y/2 + y^2 + z^2)
sage: C.symmetric_matrix()
[ 1 1/4 0]
[1/4 1 0]
[ 0 0 1]
sage: C = Conic(x^2 + 2*x*y + y^2 + 3*x*z + z^2)
sage: v = vector([x, y, z])
sage: v * C.symmetric_matrix() * v
x^2 + 2*x*y + y^2 + 3*x*z + z^2
"""
[a,b,c,d,e,f] = self.coefficients()
if self.base_ring().characteristic() == 2:
if b == 0 and c == 0 and e == 0:
return matrix([[a,0,0],[0,d,0],[0,0,f]])
raise ValueError("The conic self (= %s) has no symmetric matrix " \
"because the base field has characteristic 2" % \
self)
from sage.matrix.constructor import matrix
return matrix([[ a , b/2, c/2 ],
[ b/2, d , e/2 ],
[ c/2, e/2, f ]])
def upper_triangular_matrix(self):
r"""
The upper-triangular matrix `M` such that `(x y z) M (x y z)^t`
is the defining equation of ``self``.
EXAMPLES::
sage: R.<x, y, z> = QQ[]
sage: C = Conic(x^2 + x*y + y^2 + z^2)
sage: C.upper_triangular_matrix()
[1 1 0]
[0 1 0]
[0 0 1]
sage: C = Conic(x^2 + 2*x*y + y^2 + 3*x*z + z^2)
sage: v = vector([x, y, z])
sage: v * C.upper_triangular_matrix() * v
x^2 + 2*x*y + y^2 + 3*x*z + z^2
"""
from sage.matrix.constructor import matrix
[a,b,c,d,e,f] = self.coefficients()
return matrix([[ a, b, c ],
[ 0, d, e ],
[ 0, 0, f ]])
def variable_names(self):
r"""
Returns the variable names of the defining polynomial
of ``self``.
EXAMPLES:
::
sage: c=Conic([1,1,0,1,0,1], 'x,y,z')
sage: c.variable_names()
('x', 'y', 'z')
sage: c.variable_name()
'x'
The function ``variable_names()`` is required
for the following construction:
::
sage: C.<p,q,r> = Conic(QQ, [1, 1, 1])
sage: C
Projective Conic Curve over Rational Field defined by p^2 + q^2 + r^2
"""
return self.defining_polynomial().parent().variable_names()
| 37.539519
| 246
| 0.500114
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.