code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
from flask.blueprints import Blueprint
import logging
from flask_login import login_required, current_user
from waitlist.ts3.connection import send_poke
from flask import jsonify
bp = Blueprint('api_ts3', __name__)
logger = logging.getLogger(__name__)
@bp.route("/test_poke")
@login_required
def test_poke():
send_poke(current_user.get_eve_name(), "Test Poke")
resp = jsonify(status_code=201, message="Poke was send!")
resp.status_code = 201
return resp
| SpeedProg/eve-inc-waitlist | waitlist/blueprints/api/teamspeak.py | Python | mit | 473 |
from util import hash256, hash_pks
from copy import deepcopy
class AggregationInfo:
"""
AggregationInfo represents information of how a tree of aggregate
signatures was created. Different tress will result in different
signatures, due to exponentiations required for security.
An AggregationInfo is represented as a map from (message_hash, pk)
to exponents. When verifying, a verifier will take the signature,
along with this map, and raise each public key to the correct
exponent, and multiply the pks together, for identical messages.
"""
def __init__(self, tree, message_hashes, public_keys):
self.tree = tree
self.message_hashes = message_hashes
self.public_keys = public_keys
def empty(self):
return not self.tree
def __eq__(self, other):
return not self.__lt__(other) and not other.__lt__(self)
def __lt__(self, other):
"""
Compares two AggregationInfo objects, this is necessary for sorting
them. Comparison is done by comparing (message hash, pk, exponent)
"""
combined = [(self.message_hashes[i], self.public_keys[i],
self.tree[(self.message_hashes[i], self.public_keys[i])])
for i in range(len(self.public_keys))]
combined_other = [(other.message_hashes[i], other.public_keys[i],
other.tree[(other.message_hashes[i],
other.public_keys[i])])
for i in range(len(other.public_keys))]
for i in range(max(len(combined), len(combined_other))):
if i == len(combined):
return True
if i == len(combined_other):
return False
if combined[i] < combined_other[i]:
return True
if combined_other[i] < combined[i]:
return False
return False
def __str__(self):
ret = ""
for key, value in self.tree.items():
ret += ("(" + key[0].hex() + "," + key[1].serialize().hex()
+ "):\n" + hex(value) + "\n")
return ret
def __deepcopy__(self, memo):
new_tree = deepcopy(self.tree, memo)
new_mh = deepcopy(self.message_hashes, memo)
new_pubkeys = deepcopy(self.public_keys, memo)
return AggregationInfo(new_tree, new_mh, new_pubkeys)
@staticmethod
def from_msg_hash(public_key, message_hash):
tree = {}
tree[(message_hash, public_key)] = 1
return AggregationInfo(tree, [message_hash], [public_key])
@staticmethod
def from_msg(pk, message):
return AggregationInfo.from_msg_hash(pk, hash256(message))
@staticmethod
def simple_merge_infos(aggregation_infos):
"""
Infos are just merged together with no addition of exponents,
since they are disjoint
"""
new_tree = {}
for info in aggregation_infos:
new_tree.update(info.tree)
mh_pubkeys = [k for k, v in new_tree.items()]
mh_pubkeys.sort()
message_hashes = [message_hash for (message_hash, public_key)
in mh_pubkeys]
public_keys = [public_key for (message_hash, public_key)
in mh_pubkeys]
return AggregationInfo(new_tree, message_hashes, public_keys)
@staticmethod
def secure_merge_infos(colliding_infos):
"""
Infos are merged together with combination of exponents
"""
# Groups are sorted by message then pk then exponent
# Each info object (and all of it's exponents) will be
# exponentiated by one of the Ts
colliding_infos.sort()
sorted_keys = []
for info in colliding_infos:
for key, value in info.tree.items():
sorted_keys.append(key)
sorted_keys.sort()
sorted_pks = [public_key for (message_hash, public_key)
in sorted_keys]
computed_Ts = hash_pks(len(colliding_infos), sorted_pks)
# Group order, exponents can be reduced mod the order
order = sorted_pks[0].value.ec.n
new_tree = {}
for i in range(len(colliding_infos)):
for key, value in colliding_infos[i].tree.items():
if key not in new_tree:
# This message & pk have not been included yet
new_tree[key] = (value * computed_Ts[i]) % order
else:
# This message and pk are already included, so multiply
addend = value * computed_Ts[i]
new_tree[key] = (new_tree[key] + addend) % order
mh_pubkeys = [k for k, v in new_tree.items()]
mh_pubkeys.sort()
message_hashes = [message_hash for (message_hash, public_key)
in mh_pubkeys]
public_keys = [public_key for (message_hash, public_key)
in mh_pubkeys]
return AggregationInfo(new_tree, message_hashes, public_keys)
@staticmethod
def merge_infos(aggregation_infos):
messages = set()
colliding_messages = set()
for info in aggregation_infos:
messages_local = set()
for key, value in info.tree.items():
if key[0] in messages and key[0] not in messages_local:
colliding_messages.add(key[0])
messages.add(key[0])
messages_local.add(key[0])
if len(colliding_messages) == 0:
return AggregationInfo.simple_merge_infos(aggregation_infos)
colliding_infos = []
non_colliding_infos = []
for info in aggregation_infos:
info_collides = False
for key, value in info.tree.items():
if key[0] in colliding_messages:
info_collides = True
colliding_infos.append(info)
break
if not info_collides:
non_colliding_infos.append(info)
combined = AggregationInfo.secure_merge_infos(colliding_infos)
non_colliding_infos.append(combined)
return AggregationInfo.simple_merge_infos(non_colliding_infos)
"""
Copyright 2018 Chia Network Inc
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
| zcoinofficial/zcoin | src/bls-signatures/python-impl/aggregation_info.py | Python | mit | 6,831 |
import csv
import logging
import ntpath
import re
import sys
from uuid import UUID
from django.conf import settings
from django.contrib.auth.hashers import make_password
from django.core.exceptions import ValidationError
from django.core.management.base import CommandError
from django.utils import translation
from django.utils.translation import gettext_lazy as _
from django.utils.translation import pgettext_lazy
from kolibri.core.auth.constants import role_kinds
from kolibri.core.auth.constants.collection_kinds import CLASSROOM
from kolibri.core.auth.constants.demographics import choices
from kolibri.core.auth.constants.demographics import DEFERRED
from kolibri.core.auth.models import Classroom
from kolibri.core.auth.models import Facility
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.models import Membership
from kolibri.core.tasks.management.commands.base import AsyncCommand
from kolibri.core.tasks.utils import get_current_job
from kolibri.core.utils.csv import open_csv_for_reading
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
logger = logging.getLogger(__name__)
# TODO: decide whether these should be internationalized
fieldnames = (
"UUID",
"USERNAME",
"PASSWORD",
"FULL_NAME",
"USER_TYPE",
"IDENTIFIER",
"BIRTH_YEAR",
"GENDER",
"ENROLLED_IN",
"ASSIGNED_TO",
)
# These constants must be entered vertbatim in the CSV
roles_map = {
"LEARNER": None,
"ADMIN": role_kinds.ADMIN,
"FACILITY_COACH": role_kinds.COACH,
"CLASS_COACH": role_kinds.ASSIGNABLE_COACH,
}
# Error messages ###
UNEXPECTED_EXCEPTION = 0
TOO_LONG = 1
INVALID = 2
DUPLICATED_USERNAME = 3
INVALID_USERNAME = 4
REQUIRED_COLUMN = 5
INVALID_HEADER = 6
NO_FACILITY = 7
FILE_READ_ERROR = 8
FILE_WRITE_ERROR = 9
REQUIRED_PASSWORD = 10
NON_EXISTENT_UUID = 11
INVALID_UUID = 12
MESSAGES = {
UNEXPECTED_EXCEPTION: pgettext_lazy(
"Error message that might appear when there's a programming error importing a CSV file",
"Unexpected error [{}]: {}",
),
TOO_LONG: pgettext_lazy(
"Error when the command is executed in the Terminal (command prompt)",
"Value in column '{}' has too many characters",
),
INVALID: _("Invalid value in column '{}'"),
DUPLICATED_USERNAME: _("Username is duplicated"),
INVALID_USERNAME: _(
"Username only can contain characters, numbers and underscores"
),
REQUIRED_COLUMN: pgettext_lazy(
"Error message indicating that the CSV file selected for import is missing a required column",
"The column '{}' is required",
),
INVALID_HEADER: pgettext_lazy(
"Error message indicating that one column header in the CSV file selected for import is missing or incorrect",
"Invalid header label found in the first row",
),
NO_FACILITY: pgettext_lazy(
"Error when the command is executed in the Terminal (command prompt)",
"No default facility exists. Make sure to set up a facility on the device before importing users and classes",
),
FILE_READ_ERROR: _("Error trying to read csv file: {}"),
FILE_WRITE_ERROR: _("Error trying to write csv file: {}"),
REQUIRED_PASSWORD: _(
"The password field is required. To leave the password unchanged in existing users, insert an asterisk (*)"
),
NON_EXISTENT_UUID: _(
"Cannot update user with ID: '{}' because no user with that database ID exists in this facility"
),
INVALID_UUID: _("Database ID is not valid"),
}
# Validators ###
def number_range(min, max, allow_null=False):
"""
Return a value check function which raises a ValueError if the supplied
value is less than `min` or greater than `max`.
"""
def checker(v):
if v == DEFERRED:
return checker
if allow_null and not v:
return None
if v is None or (int(v) < min or int(v) > max):
raise ValueError(v)
return checker
def not_empty():
"""
Return a value check function which raises a ValueError if the supplied
value is None or an empty string
"""
def checker(v):
if v is None:
raise ValueError(v)
if len(v) == 0:
raise ValueError(v)
return checker
def value_length(length, allow_null=False, multiple=False):
"""
Return a value check function which raises a ValueError if the supplied
value has a length greater than 'length'
If null is not True raises a ValueError if the supplied value is None.
If multiple is True it checks the length of each of the separated by commas value
"""
def checker(v):
def check_single_value(v):
if v is None or len(v) > length:
raise ValueError(v)
if v == DEFERRED:
return checker
if allow_null and v is None:
return None
if multiple:
values = v.split(",")
for value in values:
check_single_value(value)
else:
check_single_value(v)
return checker
def valid_uuid(allow_null=True):
"""
Return a value check function which raises a ValueError if the supplied
value is ot a valid uuid
"""
def checker(v):
if allow_null and (v is None or v == ""):
return None
try:
UUID(v).version
except (ValueError, TypeError):
raise ValueError(v)
return checker
def enumeration(*args):
"""
Return a value check function which raises a ValueError if the value (case
insensitive) is not in the enumeration of values provided by args.
"""
if len(args) == 1:
# assume the first argument defines the membership
members = args[0].lower()
else:
members = tuple(map(str.lower, args))
def checker(value):
if value == DEFERRED:
return checker
if value.lower() not in members:
raise ValueError(value)
return checker
def valid_name(username=True, allow_null=False):
"""
Return a value check function which raises a ValueError if the value has
some of the punctuaction chars that are not allowed.
If username is False it allows spaces, slashes and hyphens.
If null is not True raises a ValueError if the supplied value is None.
"""
def checker(v):
if allow_null and v is None:
return checker
if v is None:
raise ValueError(v)
has_punc = "[\s`~!@#$%^&*()\-+={}\[\]\|\\\/:;\"'<>,\.\?]" # noqa
if not username:
has_punc = "[`~!@#$%^&*()\+={}\[\]\|\\\/:;\"'<>\.\?]" # noqa
if re.search(has_punc, v):
raise ValueError(v)
return checker
def reverse_dict(original):
"""
Returns a dictionary based on an original dictionary
where previous keys are values and previous values pass
to be the keys
"""
final = {}
for k, value in original.items():
if type(value) == list:
for v in value:
final.setdefault(v, []).append(k)
else:
final.setdefault(value, []).append(k)
return final
class Validator(object):
"""
Class to apply different validation checks on a CSV data reader.
"""
def __init__(self, header_translation):
self._checks = list()
self.classrooms = dict()
self.coach_classrooms = dict()
self.users = dict()
self.header_translation = header_translation
self.roles = {r: list() for r in roles_map.values() if r is not None}
def add_check(self, header_name, check, message):
"""
Add a header check, i.e., check whether the header record is consistent
with the expected field names.
`header_name` - name of the header for the column to be checked
`check`- function to be used as validator of the values in the column
`message` - problem message to report if a value is not valid
"""
self._checks.append((header_name, check, message))
def get_username(self, row):
username = row.get(self.header_translation["USERNAME"])
if username in self.users.keys():
return None
return username
def check_classroom(self, row, username):
def append_users(class_list, key):
class_list_normalized = {c.lower(): c for c in class_list.keys()}
try:
classes_list = [c.strip() for c in row.get(key, None).split(",")]
for classroom in classes_list:
if not classroom:
continue
if classroom.lower() in class_list_normalized:
classroom_real_name = class_list_normalized[classroom.lower()]
class_list[classroom_real_name].append(username)
else:
class_list[classroom] = [username]
class_list_normalized[classroom.lower()] = classroom
except AttributeError:
# there are not members of 'key'
pass
# enrolled learners:
append_users(self.classrooms, self.header_translation["ENROLLED_IN"])
# assigned coaches
user_role = row.get(self.header_translation["USER_TYPE"], "LEARNER").upper()
if user_role != "LEARNER":
# a student can't be assigned to coach a classroom
append_users(self.coach_classrooms, self.header_translation["ASSIGNED_TO"])
self.roles[roles_map[user_role]].append(username)
def validate(self, data):
"""
Validate `data` and return an iterator over errors found.
"""
for index, row in enumerate(data):
error_flag = False
username = self.get_username(row)
if not username:
error = {
"row": index + 1,
"message": MESSAGES[DUPLICATED_USERNAME],
"field": "USERNAME",
"value": row.get(self.header_translation["USERNAME"]),
}
error_flag = True
yield error
for header_name, check, message in self._checks:
value = row[self.header_translation[header_name]]
try:
check(value)
except ValueError:
error = {
"row": index + 1,
"message": message,
"field": header_name,
"value": value,
}
error_flag = True
yield error
except Exception as e:
error = {
"row": index + 1,
"message": MESSAGES[UNEXPECTED_EXCEPTION].format(
(e.__class__.__name__, e)
),
"field": header_name,
"value": value,
}
error_flag = True
yield error
# if there aren't any errors, let's add the user and classes
if not error_flag:
self.check_classroom(row, username)
row["position"] = index + 1
self.users[username] = row
class Command(AsyncCommand):
def add_arguments(self, parser):
parser.add_argument(
"filepath", action="store", type=str, help="Path to CSV file."
)
parser.add_argument(
"--facility",
action="store",
type=str,
help="Facility id to import the users into",
)
parser.add_argument(
"--dryrun",
action="store_true",
help="Validate data without doing actual database updates",
)
parser.add_argument(
"--delete",
action="store_true",
help="Delete all users in the facility not included in this import (excepting actual user)",
)
parser.add_argument(
"--userid",
action="store",
type=str,
default=None,
help="Id of the user executing the command, it will not be deleted in case deleted is set",
)
parser.add_argument(
"--locale",
action="store",
type=str,
default=None,
help="Code of the language for the messages to be translated",
)
parser.add_argument(
"--errorlines",
action="store",
type=str,
default=None,
help="File to store errors output (to be used in internal tests only)",
)
def csv_values_validation(self, reader, header_translation):
per_line_errors = []
validator = Validator(header_translation)
validator.add_check("UUID", valid_uuid(), MESSAGES[INVALID_UUID])
validator.add_check(
"FULL_NAME", value_length(125), MESSAGES[TOO_LONG].format("FULL_NAME")
)
validator.add_check(
"BIRTH_YEAR",
number_range(1900, 99999, allow_null=True),
MESSAGES[INVALID].format("BIRTH_YEAR"),
)
validator.add_check(
"USERNAME", value_length(125), MESSAGES[TOO_LONG].format("USERNAME")
)
validator.add_check("USERNAME", valid_name(), MESSAGES[INVALID_USERNAME])
validator.add_check(
"USERNAME", not_empty(), MESSAGES[REQUIRED_COLUMN].format("USERNAME")
)
validator.add_check(
"PASSWORD", value_length(128), MESSAGES[TOO_LONG].format("PASSWORD")
)
validator.add_check("PASSWORD", not_empty(), MESSAGES[REQUIRED_PASSWORD])
validator.add_check(
"USER_TYPE",
enumeration(*roles_map.keys()),
MESSAGES[INVALID].format("USER_TYPE"),
)
validator.add_check(
"GENDER",
enumeration("", *tuple(str(val[0]) for val in choices)),
MESSAGES[INVALID].format("GENDER"),
)
validator.add_check(
"IDENTIFIER", value_length(64), MESSAGES[TOO_LONG].format("IDENTIFIER")
)
validator.add_check(
"ENROLLED_IN",
value_length(50, allow_null=True, multiple=True),
MESSAGES[TOO_LONG].format("Class name"),
)
validator.add_check(
"ASSIGNED_TO",
value_length(50, allow_null=True, multiple=True),
MESSAGES[TOO_LONG].format("Class name"),
)
row_errors = validator.validate(reader)
for err in row_errors:
per_line_errors.append(err)
# cleaning classes names:
normalized_learner_classroooms = {c.lower(): c for c in validator.classrooms}
coach_classrooms = [cl for cl in validator.coach_classrooms]
for classroom in coach_classrooms:
normalized_name = classroom.lower()
if normalized_name in normalized_learner_classroooms:
real_name = normalized_learner_classroooms[normalized_name]
if classroom != real_name:
validator.coach_classrooms[
real_name
] = validator.coach_classrooms.pop(classroom)
return (
per_line_errors,
(validator.classrooms, validator.coach_classrooms),
validator.users,
validator.roles,
)
def csv_headers_validation(self, filepath):
csv_file = open_csv_for_reading(filepath)
with csv_file as f:
header = next(csv.reader(f, strict=True))
has_header = False
self.header_translation = {
lbl.partition("(")[2].partition(")")[0]: lbl for lbl in header
}
neutral_header = self.header_translation.keys()
# If every item in the first row matches an item in the fieldnames, consider it a header row
if all(col in fieldnames for col in neutral_header):
has_header = True
# If any col is missing from the header, it's an error
for col in fieldnames:
if col not in neutral_header:
self.overall_error.append(MESSAGES[REQUIRED_COLUMN].format(col))
elif any(col in fieldnames for col in neutral_header):
self.overall_error.append(MESSAGES[INVALID_HEADER])
return has_header
def get_field_values(self, user_row):
password = user_row.get(self.header_translation["PASSWORD"], None)
if password != "*":
password = make_password(password)
else:
password = None
gender = user_row.get(self.header_translation["GENDER"], "").strip().upper()
gender = "" if gender == DEFERRED else gender
birth_year = (
user_row.get(self.header_translation["BIRTH_YEAR"], "").strip().upper()
)
birth_year = "" if birth_year == DEFERRED else birth_year
id_number = (
user_row.get(self.header_translation["IDENTIFIER"], "").strip().upper()
)
id_number = "" if id_number == DEFERRED else id_number
full_name = user_row.get(self.header_translation["FULL_NAME"], None)
return {
"uuid": user_row.get(self.header_translation["UUID"], ""),
"username": user_row.get(self.header_translation["USERNAME"], ""),
"password": password,
"gender": gender,
"birth_year": birth_year,
"id_number": id_number,
"full_name": full_name,
}
def compare_fields(self, user_obj, values):
changed = False
for field in values:
if field == "uuid":
continue # uuid can't be updated
if field != "password" or values["password"] is not None:
if getattr(user_obj, field) != values[field]:
changed = True
setattr(user_obj, field, values[field])
return changed
def build_users_objects(self, users):
new_users = list()
update_users = list()
keeping_users = list()
per_line_errors = list()
users_uuid = [
u[self.header_translation["UUID"]]
for u in users.values()
if u[self.header_translation["UUID"]] != ""
]
existing_users = (
FacilityUser.objects.filter(facility=self.default_facility)
.filter(id__in=users_uuid)
.values_list("id", flat=True)
)
# creating the users takes half of the time
progress = (100 / self.number_lines) * 0.5
for user in users:
self.progress_update(progress)
user_row = users[user]
values = self.get_field_values(user_row)
if values["uuid"] in existing_users:
user_obj = FacilityUser.objects.get(
id=values["uuid"], facility=self.default_facility
)
keeping_users.append(user_obj)
if user_obj.username != user:
# check for duplicated username in the facility
existing_user = FacilityUser.objects.get(
username=user, facility=self.default_facility
)
if existing_user:
error = {
"row": users[user]["position"],
"username": user,
"message": MESSAGES[DUPLICATED_USERNAME],
"field": "USERNAME",
"value": user,
}
per_line_errors.append(error)
continue
if self.compare_fields(user_obj, values):
update_users.append(user_obj)
else:
if values["uuid"] != "":
error = {
"row": users[user]["position"],
"username": user,
"message": MESSAGES[NON_EXISTENT_UUID].format(user),
"field": "PASSWORD",
"value": "*",
}
per_line_errors.append(error)
elif not values["password"]:
error = {
"row": users[user]["position"],
"username": user,
"message": MESSAGES[REQUIRED_PASSWORD],
"field": "PASSWORD",
"value": "*",
}
per_line_errors.append(error)
else:
user_obj = FacilityUser(
username=user, facility=self.default_facility
)
# user_obj.id = user_obj.calculate_uuid() # Morango does not work properly with this
for field in values:
if values[field]:
setattr(user_obj, field, values[field])
new_users.append(user_obj)
return (new_users, update_users, keeping_users, per_line_errors)
def db_validate_list(self, db_list, users=False):
errors = []
# validating the users takes aprox 40% of the time
if users:
progress = (
(100 / self.number_lines) * 0.4 * (len(db_list) / self.number_lines)
)
for obj in db_list:
if users:
self.progress_update(progress)
try:
obj.full_clean()
except ValidationError as e:
for message in e.message_dict:
error = {
"row": str(obj),
"message": e.message_dict[message][0],
"field": message,
"value": vars(obj)[message],
}
errors.append(error)
return errors
def build_classes_objects(self, classes):
"""
Using current database info, builds the list of classes to be
updated or created.
It also returns an updated classes list, using the case insensitive
names of the classes that were already in the database
`classes` - Tuple containing two dictionaries: enrolled classes + assigned classes
Returns:
new_classes - List of database objects of classes to be created
update_classes - List of database objects of classes to be updated
fixed_classes - Same original classes tuple, but with the names normalized
"""
new_classes = list()
update_classes = list()
total_classes = set([k for k in classes[0]] + [v for v in classes[1]])
existing_classes = (
Classroom.objects.filter(parent=self.default_facility)
# .filter(name__in=total_classes) # can't be done if classes names are case insensitive
.values_list("name", flat=True)
)
normalized_name_existing = {c.lower(): c for c in existing_classes}
for classroom in total_classes:
if classroom.lower() in normalized_name_existing:
real_name = normalized_name_existing[classroom.lower()]
class_obj = Classroom.objects.get(
name=real_name, parent=self.default_facility
)
update_classes.append(class_obj)
if real_name != classroom:
if classroom in classes[0]:
classes[0][real_name] = classes[0].pop(classroom)
if classroom in classes[1]:
classes[1][real_name] = classes[1].pop(classroom)
else:
class_obj = Classroom(name=classroom, parent=self.default_facility)
class_obj.id = class_obj.calculate_uuid()
new_classes.append(class_obj)
self.progress_update(1)
return (new_classes, update_classes, classes)
def get_facility(self, options):
if options["facility"]:
default_facility = Facility.objects.get(pk=options["facility"])
else:
default_facility = Facility.get_default_facility()
if not default_facility:
self.overall_error.append(MESSAGES[NO_FACILITY])
raise CommandError(self.overall_error[-1])
return default_facility
def get_number_lines(self, filepath):
try:
with open(filepath) as f:
number_lines = len(f.readlines())
except (ValueError, FileNotFoundError, csv.Error) as e:
number_lines = None
self.overall_error.append(MESSAGES[FILE_READ_ERROR].format(e))
return number_lines
def get_delete(self, options, keeping_users, update_classes):
if not options["delete"]:
return ([], [])
users_not_to_delete = [u.id for u in keeping_users]
admins = self.default_facility.get_admins()
users_not_to_delete += admins.values_list("id", flat=True)
if options["userid"]:
users_not_to_delete.append(options["userid"])
users_to_delete = FacilityUser.objects.filter(
facility=self.default_facility
).exclude(id__in=users_not_to_delete)
# Classes not included in the csv will be cleared of users,
# but not deleted to keep possible lessons and quizzes created for them:
classes_not_to_clear = [c.id for c in update_classes]
classes_to_clear = (
Classroom.objects.filter(parent=self.default_facility)
.exclude(id__in=classes_not_to_clear)
.values_list("id", flat=True)
)
return (users_to_delete, classes_to_clear)
def delete_users(self, users):
for user in users:
user.delete(hard_delete=True)
def clear_classes(self, classes):
for classroom in classes:
Membership.objects.filter(collection=classroom).delete()
def get_user(self, username, users):
user = users.get(username, None)
if not user: # the user has not been created nor updated:
user = FacilityUser.objects.get(
username=username, facility=self.default_facility
)
return user
def add_classes_memberships(self, classes, users, db_classes):
enrolled = classes[0]
assigned = classes[1]
classes = {k.name: k for k in db_classes}
for classroom in enrolled:
db_class = classes[classroom]
for username in enrolled[classroom]:
# db validation might have rejected a csv validated user:
if username in users:
user = self.get_user(username, users)
if not user.is_member_of(db_class):
db_class.add_member(user)
for classroom in assigned:
db_class = classes[classroom]
for username in assigned[classroom]:
# db validation might have rejected a csv validated user:
if username in users:
user = self.get_user(username, users)
db_class.add_coach(user)
def add_roles(self, users, roles):
for role in roles.keys():
for username in roles[role]:
# db validation might have rejected a csv validated user:
if username in users:
user = self.get_user(username, users)
self.default_facility.add_role(user, role)
def exit_if_error(self):
if self.overall_error:
classes_report = {"created": 0, "updated": 0, "cleared": 0}
users_report = {"created": 0, "updated": 0, "deleted": 0}
if self.job:
self.job.extra_metadata["overall_error"] = self.overall_error
self.job.extra_metadata["per_line_errors"] = 0
self.job.extra_metadata["classes"] = classes_report
self.job.extra_metadata["users"] = users_report
self.job.extra_metadata["filename"] = ""
self.job.save_meta()
raise CommandError("File errors: {}".format(str(self.overall_error)))
sys.exit(1)
return
def remove_memberships(self, users, enrolled, assigned):
users_enrolled = reverse_dict(enrolled)
users_assigned = reverse_dict(assigned)
for user in users:
# enrolled:
to_remove = user.memberships.filter(collection__kind=CLASSROOM)
username = (
user.username
if sys.version_info[0] >= 3
else user.username.encode("utf-8")
)
if username in users_enrolled.keys():
to_remove.exclude(
collection__name__in=users_enrolled[username]
).delete()
else:
to_remove.delete()
# assigned:
to_remove = user.roles.filter(collection__kind=CLASSROOM)
if username in users_assigned.keys():
to_remove.exclude(
collection__name__in=users_assigned[username]
).delete()
else:
to_remove.delete()
def output_messages(
self, per_line_errors, classes_report, users_report, filepath, errorlines
):
# Show output error messages on loggers, job metadata or io errorlines for testing
# freeze message translations:
for line in per_line_errors:
line["message"] = str(line["message"])
self.overall_error = [str(msg) for msg in self.overall_error]
if self.job:
self.job.extra_metadata["overall_error"] = self.overall_error
self.job.extra_metadata["per_line_errors"] = per_line_errors
self.job.extra_metadata["classes"] = classes_report
self.job.extra_metadata["users"] = users_report
self.job.extra_metadata["filename"] = ntpath.basename(filepath)
self.job.save_meta()
else:
logger.info("File errors: {}".format(str(self.overall_error)))
logger.info("Data errors: {}".format(str(per_line_errors)))
logger.info("Classes report: {}".format(str(classes_report)))
logger.info("Users report: {}".format(str(users_report)))
if errorlines:
for line in per_line_errors:
errorlines.write(str(line))
errorlines.write("\n")
def handle_async(self, *args, **options):
# initialize stats data structures:
self.overall_error = []
db_new_classes = []
db_update_classes = []
classes_to_clear = []
db_new_users = []
db_update_users = []
users_to_delete = []
per_line_errors = []
# set language for the translation of the messages
locale = settings.LANGUAGE_CODE if not options["locale"] else options["locale"]
translation.activate(locale)
self.job = get_current_job()
filepath = options["filepath"]
self.default_facility = self.get_facility(options)
self.number_lines = self.get_number_lines(filepath)
self.exit_if_error()
with self.start_progress(total=100) as self.progress_update:
# validate csv headers:
has_header = self.csv_headers_validation(filepath)
if not has_header:
self.overall_error.append(MESSAGES[INVALID_HEADER])
self.exit_if_error()
self.progress_update(1) # state=csv_headers
try:
csv_file = open_csv_for_reading(filepath)
with csv_file as f:
reader = csv.DictReader(f, strict=True)
per_line_errors, classes, users, roles = self.csv_values_validation(
reader, self.header_translation
)
except (ValueError, FileNotFoundError, csv.Error) as e:
self.overall_error.append(MESSAGES[FILE_READ_ERROR].format(e))
self.exit_if_error()
(
db_new_users,
db_update_users,
keeping_users,
more_line_errors,
) = self.build_users_objects(users)
per_line_errors += more_line_errors
(
db_new_classes,
db_update_classes,
fixed_classes,
) = self.build_classes_objects(classes)
classes = fixed_classes
users_to_delete, classes_to_clear = self.get_delete(
options, keeping_users, db_update_classes
)
per_line_errors += self.db_validate_list(db_new_users, users=True)
per_line_errors += self.db_validate_list(db_update_users, users=True)
# progress = 91%
per_line_errors += self.db_validate_list(db_new_classes)
per_line_errors += self.db_validate_list(db_update_classes)
if not options["dryrun"]:
self.delete_users(users_to_delete)
# clear users from classes not included in the csv:
Membership.objects.filter(collection__in=classes_to_clear).delete()
# bulk_create and bulk_update are not possible with current Morango:
db_users = db_new_users + db_update_users
for user in db_users:
user.save()
# assign roles to users:
users_data = {u.username: u for u in db_users}
self.add_roles(users_data, roles)
db_created_classes = []
for classroom in db_new_classes:
created_class = Classroom.objects.create(
name=classroom.name, parent=classroom.parent
)
db_created_classes.append(created_class)
# hack to get ids created by Morango:
db_new_classes = db_created_classes
self.add_classes_memberships(
classes, users_data, db_new_classes + db_update_classes
)
self.remove_memberships(keeping_users, classes[0], classes[1])
classes_report = {
"created": len(db_new_classes),
"updated": len(db_update_classes),
"cleared": len(classes_to_clear),
}
users_report = {
"created": len(db_new_users),
"updated": len(db_update_users),
"deleted": len(users_to_delete),
}
self.output_messages(
per_line_errors,
classes_report,
users_report,
filepath,
options["errorlines"],
)
translation.deactivate()
| indirectlylit/kolibri | kolibri/core/auth/management/commands/bulkimportusers.py | Python | mit | 35,248 |
import matplotlib, os, errno
# IF WE ARE ON SERVER WITH NO DISPLAY, then we use Agg:
#print matplotlib.get_backend()
if not('DISPLAY' in os.environ):
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import numpy as np
def visualize_history(hi, show=True, save=False, save_path='', show_also='', custom_title=None):
# Visualize history of Keras model run.
'''
Example calls:
hi = model.fit(...)
saveHistory(hi.history, 'tmp_saved_history.npy')
visualize_history(loadHistory('tmp_saved_history.npy'))
'''
# list all data in history
print(hi.keys())
# summarize history for loss
plt.plot(hi['loss'])
plt.plot(hi['val_loss'])
if show_also <> '':
plt.plot(hi[show_also], linestyle='dotted')
plt.plot(hi['val_'+show_also], linestyle='dotted')
if custom_title is None:
plt.title('model loss')
else:
plt.title(custom_title)
plt.ylabel('loss')
plt.xlabel('epoch')
if show_also == '':
plt.legend(['train', 'test'], loc='upper left')
else:
plt.legend(['train', 'test', 'train-'+show_also, 'test-'+show_also], loc='upper left')
if save:
filename = save_path #+'loss.png'
if not os.path.exists(os.path.dirname(filename)):
try:
os.makedirs(os.path.dirname(filename))
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
plt.savefig(filename)
plt.savefig(filename+'.pdf', format='pdf')
print "Saved image to "+filename
if show:
plt.show()
plt.clf()
return plt
def visualize_histories(histories, names, plotvalues='loss', show=True, save=False, save_path='', custom_title=None, just_val=False):
'''
Visualize multiple histories.
Example usage:
h1 = loadHistory('history1.npy')
h2 = loadHistory('history2.npy')
visualize_histories([h1, h2], ['history1', 'history2'])
'''
import matplotlib.pyplot as plt
if custom_title is None:
custom_title = 'model ' + plotvalues
if just_val:
custom_title = custom_title + ' (just validation results)'
i = 0
leg = []
for hi in histories:
n = names[i]
# list all data in history
print(hi.keys())
# summarize history for loss
if not just_val:
plt.plot(hi[plotvalues])
plt.plot(hi['val_'+plotvalues])
plt.title(custom_title)
plt.ylabel('loss')
plt.xlabel('epoch')
if not just_val:
leg.append(n + '')
leg.append(n + '_val')
i += 1
#plt.legend(leg, loc='lower left')
plt.legend(leg, loc='best')
if save:
plt.savefig(save_path) #+plotvalues+'.png')
plt.savefig(save_path+'.pdf', format='pdf')
if show:
plt.show()
plt.clf()
return plt
def visualize_special_histories(histories, plotvalues='loss', show=True, save=False, save_path='', custom_title=None, just_val=False):
'''
We are visualizing results of a k-fold crossvalidation training. In <histories> we have the individual runs of the experiment.
'''
train_color = 'grey'
val_color = 'blue'
avg_train_color = 'red'
avg_val_color = 'green'
avg_train = []
avg_val = []
# count the averages
epochs = len(histories[0][plotvalues])
for epoch in range(0, epochs):
trains = []
vals = []
for hi in histories:
train = hi[plotvalues][epoch]
val = hi['val_'+plotvalues][epoch]
trains.append(train)
vals.append(val)
avg_train.append( np.mean(trains) )
avg_val.append( np.mean(vals) )
import matplotlib.pyplot as plt
plt.figure()
if custom_title is None:
custom_title = 'model ' + plotvalues
if just_val:
custom_title = custom_title + ' (just validation results)'
i = 0
leg = []
if not just_val:
leg.append('average training')
leg.append('average validation')
if not just_val:
leg.append('training errors')
leg.append('validation errors')
# now averages:
if not just_val:
plt.plot(avg_train, color=avg_train_color)
plt.plot(avg_val, color=avg_val_color)
for hi in histories:
# list all data in history
print(hi.keys())
# summarize history for loss
if not just_val:
plt.plot(hi[plotvalues], linestyle='dashed', color=train_color)
plt.plot(hi['val_'+plotvalues], linestyle='dashed', color=val_color)
i += 1
# OK, but we also want these on top...:
if not just_val:
plt.plot(avg_train, color=avg_train_color)
plt.plot(avg_val, color=avg_val_color)
plt.title(custom_title)
plt.ylabel('loss')
plt.xlabel('epoch')
#plt.legend(leg, loc='lower left')
plt.legend(leg, loc='best')
if save:
plt.savefig(save_path) #+plotvalues+'.png')
plt.savefig(save_path+'.pdf', format='pdf')
if show:
plt.show()
plt.clf()
return plt
def visualize_whiskered_boxed(whiskered_boxes_data, names, show=True, save=False, save_path='', custom_title=''):
'''
We are visualizing results of a k-fold crossvalidation training.
In <whiskered_boxes_data> we have data for whiskered box plots.
'''
from DatasetHandler.DatasetVizualizators import zoomOutY
plt.close()
plt.figure(figsize=(5, 8))
legend_on = True
if custom_title == '':
custom_title = ','.join(names)
# mark values
save_path += custom_title + '.png'
y_max = 1.0
y_min = 0.0
#y_max = -100.0
#y_min = 100.0
#for i in whiskered_boxes_data:
# y_max = max(max(i),y_max)
# y_min = min(min(i),y_min)
axes = plt.axes()
import matplotlib.ticker as ticker
axes.yaxis.set_major_locator(ticker.MultipleLocator(np.abs(y_max-y_min)/10.0))
axes.yaxis.set_minor_locator(ticker.MultipleLocator(np.abs(y_max-y_min)/100.0))
#meanpointprops = dict(linewidth=0.0)
meanpointprops = dict(linewidth=1.0)
boxplot = plt.boxplot(whiskered_boxes_data, notch=False, showmeans=True, meanprops=meanpointprops)
#plt.xticks(names)
if (legend_on):
boxplot['medians'][0].set_label('median')
boxplot['means'][0].set_label('mean')
boxplot['fliers'][0].set_label('outlayers')
# boxplot['boxes'][0].set_label('boxes')
# boxplot['whiskers'][0].set_label('whiskers')
boxplot['caps'][0].set_label('caps')
#axes.set_xlim([0.7, 1.7])
plt.legend(numpoints = 1)
axes.set_title(custom_title)
axes.set_xticklabels(names)
zoomOutY(axes, [0.0,1.0], 0.1)
## save
if save:
plt.savefig(save_path) #+plotvalues+'.png')
plt.savefig(save_path+'.pdf', format='pdf')
if show:
plt.show()
plt.clf()
return plt
def saveHistory(history_dict, filename):
# Save history or histories into npy file
if not os.path.exists(os.path.dirname(filename)):
try:
os.makedirs(os.path.dirname(filename))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
to_be_saved = data = {'S': history_dict}
np.save(open(filename, 'w'), to_be_saved)
def loadHistory(filename):
# Load history object
loaded = np.load(open(filename))
return loaded[()]['S']
| previtus/MGR-Project-Code | Downloader/VisualizeHistory.py | Python | mit | 7,469 |
# from django.contrib import admin
# from libraryapp.models import Author, Book, Interest, History, GoogleUser, \
# Category, Ratings, Quote
| andela-sjames/Django-ReactJS-Library-App | reactlibapp/libraryapp/admin.py | Python | mit | 145 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.core.urlresolvers import reverse
from django.views.generic import DetailView, ListView, RedirectView, UpdateView
from django.contrib.auth.mixins import LoginRequiredMixin
from .models import User
class UserDetailView(LoginRequiredMixin, DetailView):
model = User
# These next two lines tell the view to index lookups by username
slug_field = 'username'
slug_url_kwarg = 'username'
class UserRedirectView(LoginRequiredMixin, RedirectView):
permanent = False
def get_redirect_url(self):
return reverse('users:detail',
kwargs={'username': self.request.user.username})
class UserUpdateView(LoginRequiredMixin, UpdateView):
fields = ['name', 'phone']
# we already imported User in the view code above, remember?
model = User
# send the user back to their own page after a successful update
def get_success_url(self):
return reverse('users:detail',
kwargs={'username': self.request.user.username})
def get_object(self):
# Only get the User record for the user making the request
return User.objects.get(username=self.request.user.username)
class UserListView(LoginRequiredMixin, ListView):
model = User
# These next two lines tell the view to index lookups by username
slug_field = 'username'
slug_url_kwarg = 'username'
| bilgorajskim/soman | server/soman/users/views.py | Python | mit | 1,466 |
from django.db import models
from django.contrib.auth.models import User
from video.models import Video
# Create your models here.
class Comment(models.Model):
author = models.ForeignKey(User)
video = models.ForeignKey(Video)
content = models.TextField()
time = models.DateTimeField(auto_now=True, auto_now_add=True)
| lumig242/Video-Share-System | comments/models.py | Python | mit | 335 |
'''
Created on 16 Sep 2016
@author: rizarse
'''
import jks, textwrap, base64
from os.path import expanduser
import os.path
import atexit
import shutil
from os import makedirs
class JksHandler(object):
def __init__(self, params):
pass
@staticmethod
def writePkAndCerts(ks, token):
uid = None
home = expanduser("~")
def deleteCerts(self, path):
shutil.rmtree(path)
atexit.register(deleteCerts, home + '/magistral/' + token)
for alias, pk in ks.private_keys.items():
uid = alias
if pk.algorithm_oid == jks.util.RSA_ENCRYPTION_OID:
if os.path.exists(home + '/magistral/' + token) == False:
makedirs(home + '/magistral/' + token)
key = home + '/magistral/' + token + '/key.pem'
if os.path.exists(key): os.remove(key)
with open(key, 'wb') as f:
f.seek(0)
f.write(bytearray(b"-----BEGIN RSA PRIVATE KEY-----\r\n"))
f.write(bytes("\r\n".join(textwrap.wrap(base64.b64encode(pk.pkey).decode('ascii'), 64)), 'utf-8'))
f.write(bytearray(b"\r\n-----END RSA PRIVATE KEY-----"))
f.close()
counter = 0;
cert = home + '/magistral/' + token + '/certificate.pem'
if os.path.exists(cert): os.remove(cert)
with open(cert, 'wb') as f:
f.seek(0)
for c in pk.cert_chain:
f.write(bytearray(b"-----BEGIN CERTIFICATE-----\r\n"))
f.write(bytes("\r\n".join(textwrap.wrap(base64.b64encode(c[1]).decode('ascii'), 64)), 'utf-8'))
f.write(bytearray(b"\r\n-----END CERTIFICATE-----\r\n"))
counter = counter + 1
if (counter == 2): break
f.close()
ca = home + '/magistral/' + token + '/ca.pem'
if os.path.exists(ca): os.remove(ca)
with open(ca, 'wb') as f:
for alias, c in ks.certs.items():
f.write(bytearray(b"-----BEGIN CERTIFICATE-----\r\n"))
f.write(bytes("\r\n".join(textwrap.wrap(base64.b64encode(c.cert).decode('ascii'), 64)), 'utf-8'))
f.write(bytearray(b"\r\n-----END CERTIFICATE-----\r\n"))
f.close()
return uid
@staticmethod
def printJks(ks):
def print_pem(der_bytes, _type_):
print("-----BEGIN %s-----" % _type_)
print("\r\n".join(textwrap.wrap(base64.b64encode(der_bytes).decode('ascii'), 64)))
print("-----END %s-----" % _type_)
for _, pk in ks.private_keys.items():
print("Private key: %s" % pk.alias)
if pk.algorithm_oid == jks.util.RSA_ENCRYPTION_OID:
print_pem(pk.pkey, "RSA PRIVATE KEY")
else:
print_pem(pk.pkey_pkcs8, "PRIVATE KEY")
for c in pk.cert_chain:
print_pem(c[1], "CERTIFICATE")
print()
for _, c in ks.certs.items():
print("Certificate: %s" % c.alias)
print_pem(c.cert, "CERTIFICATE")
print()
for _, sk in ks.secret_keys.items():
print("Secret key: %s" % sk.alias)
print(" Algorithm: %s" % sk.algorithm)
print(" Key size: %d bits" % sk.key_size)
print(" Key: %s" % "".join("{:02x}".format(b) for b in bytearray(sk.key)))
print()
| magistral-io/MagistralPython | src/magistral/client/util/JksHandler.py | Python | mit | 3,975 |
"""
homeassistant.components.sensor.netatmo
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
NetAtmo Weather Service service.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.netatmo/
"""
import logging
from datetime import timedelta
from homeassistant.components.sensor import DOMAIN
from homeassistant.const import (
CONF_API_KEY, CONF_PASSWORD, CONF_USERNAME, TEMP_CELCIUS)
from homeassistant.helpers import validate_config
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
REQUIREMENTS = [
'https://github.com/HydrelioxGitHub/netatmo-api-python/archive/'
'43ff238a0122b0939a0dc4e8836b6782913fb6e2.zip'
'#lnetatmo==0.4.0']
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES = {
'temperature': ['Temperature', TEMP_CELCIUS, 'mdi:thermometer'],
'co2': ['CO2', 'ppm', 'mdi:cloud'],
'pressure': ['Pressure', 'mbar', 'mdi:gauge'],
'noise': ['Noise', 'dB', 'mdi:volume-high'],
'humidity': ['Humidity', '%', 'mdi:water-percent']
}
CONF_SECRET_KEY = 'secret_key'
ATTR_MODULE = 'modules'
# Return cached results if last scan was less then this time ago
# NetAtmo Data is uploaded to server every 10mn
# so this time should not be under
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=600)
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Get the NetAtmo sensor. """
if not validate_config({DOMAIN: config},
{DOMAIN: [CONF_API_KEY,
CONF_USERNAME,
CONF_PASSWORD,
CONF_SECRET_KEY]},
_LOGGER):
return None
import lnetatmo
authorization = lnetatmo.ClientAuth(config.get(CONF_API_KEY, None),
config.get(CONF_SECRET_KEY, None),
config.get(CONF_USERNAME, None),
config.get(CONF_PASSWORD, None))
if not authorization:
_LOGGER.error(
"Connection error "
"Please check your settings for NatAtmo API.")
return False
data = NetAtmoData(authorization)
dev = []
try:
# Iterate each module
for module_name, monitored_conditions in config[ATTR_MODULE].items():
# Test if module exist """
if module_name not in data.get_module_names():
_LOGGER.error('Module name: "%s" not found', module_name)
continue
# Only create sensor for monitored """
for variable in monitored_conditions:
if variable not in SENSOR_TYPES:
_LOGGER.error('Sensor type: "%s" does not exist', variable)
else:
dev.append(
NetAtmoSensor(data, module_name, variable))
except KeyError:
pass
add_devices(dev)
# pylint: disable=too-few-public-methods
class NetAtmoSensor(Entity):
""" Implements a NetAtmo sensor. """
def __init__(self, netatmo_data, module_name, sensor_type):
self._name = "NetAtmo {} {}".format(module_name,
SENSOR_TYPES[sensor_type][0])
self.netatmo_data = netatmo_data
self.module_name = module_name
self.type = sensor_type
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self.update()
@property
def name(self):
return self._name
@property
def icon(self):
return SENSOR_TYPES[self.type][2]
@property
def state(self):
""" Returns the state of the device. """
return self._state
@property
def unit_of_measurement(self):
""" Unit of measurement of this entity, if any. """
return self._unit_of_measurement
# pylint: disable=too-many-branches
def update(self):
""" Gets the latest data from NetAtmo API and updates the states. """
self.netatmo_data.update()
data = self.netatmo_data.data[self.module_name]
if self.type == 'temperature':
self._state = round(data['Temperature'], 1)
elif self.type == 'humidity':
self._state = data['Humidity']
elif self.type == 'noise':
self._state = data['Noise']
elif self.type == 'co2':
self._state = data['CO2']
elif self.type == 'pressure':
self._state = round(data['Pressure'], 1)
class NetAtmoData(object):
""" Gets the latest data from NetAtmo. """
def __init__(self, auth):
self.auth = auth
self.data = None
def get_module_names(self):
""" Return all module available on the API as a list. """
self.update()
return self.data.keys()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
""" Call the NetAtmo API to update the data. """
import lnetatmo
# Gets the latest data from NetAtmo. """
dev_list = lnetatmo.DeviceList(self.auth)
self.data = dev_list.lastData(exclude=3600)
| nnic/home-assistant | homeassistant/components/sensor/netatmo.py | Python | mit | 5,202 |
__author__ = 'mwagner'
from PyQt4.Qt import Qt
from PyQt4.QtGui import QDialog, QIcon
from ..view.Ui_VertexDialog import Ui_VertexDialog
from ..model.VertexToolsError import *
class VertexDialog(QDialog, Ui_VertexDialog):
def __init__(self, plugin, parent=None):
super(VertexDialog, self).__init__(parent)
self.setAttribute(Qt.WA_DeleteOnClose)
self.plugin = plugin
self.setupUi(self)
self.helpButton.setIcon(self.plugin.get_icon("help.gif"))
self.setWindowIcon(QIcon(":beninCad/info.png")) | allspatial/vertex-tools | controller/VertexDialog.py | Python | mit | 550 |
'''
ÏÂÀý չʾÁË traceback Ä£¿éÔÊÐíÄãÔÚ³ÌÐòÀï´òÓ¡Òì³£µÄ¸ú×Ù·µ»Ø(Traceback)ÐÅÏ¢, ÀàËÆÎ´²¶»ñÒ쳣ʱ½âÊÍÆ÷Ëù×öµÄ.
'''
# ×¢Òâ! µ¼Èë traceback »áÇåÀíµôÒ쳣״̬, ËùÒÔ
# ×îºÃ±ðÔÚÒì³£´¦Àí´úÂëÖе¼Èë¸ÃÄ£¿é
import traceback
try:
raise SyntaxError, "example"
except:
traceback.print_exc() | iamweilee/pylearn | traceback-example-1.py | Python | mit | 285 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class RouteFiltersOperations(object):
"""RouteFiltersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_05_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
route_filter_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
route_filter_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
route_filter_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.RouteFilter"
"""Gets the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param expand: Expands referenced express route bgp peering resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteFilter, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_05_01.models.RouteFilter
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
route_filter_name, # type: str
route_filter_parameters, # type: "_models.RouteFilter"
**kwargs # type: Any
):
# type: (...) -> "_models.RouteFilter"
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(route_filter_parameters, 'RouteFilter')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('RouteFilter', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
route_filter_name, # type: str
route_filter_parameters, # type: "_models.RouteFilter"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.RouteFilter"]
"""Creates or updates a route filter in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param route_filter_parameters: Parameters supplied to the create or update route filter
operation.
:type route_filter_parameters: ~azure.mgmt.network.v2020_05_01.models.RouteFilter
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either RouteFilter or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_05_01.models.RouteFilter]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
route_filter_parameters=route_filter_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
route_filter_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.RouteFilter"
"""Updates tags of a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param parameters: Parameters supplied to update route filter tags.
:type parameters: ~azure.mgmt.network.v2020_05_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteFilter, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_05_01.models.RouteFilter
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.RouteFilterListResult"]
"""Gets all route filters in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteFilterListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.RouteFilterListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('RouteFilterListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters'} # type: ignore
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.RouteFilterListResult"]
"""Gets all route filters in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteFilterListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.RouteFilterListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('RouteFilterListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/routeFilters'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_05_01/operations/_route_filters_operations.py | Python | mit | 27,222 |
from office365.directory.identities.identity_set import IdentitySet
from office365.entity import Entity
class CallRecord(Entity):
"""Represents a single peer-to-peer call or a group call between multiple participants,
sometimes referred to as an online meeting."""
@property
def join_web_url(self):
"""Meeting URL associated to the call. May not be available for a peerToPeer call record type."""
return self.properties.get("joinWebUrl", None)
@property
def organizer(self):
"""The organizing party's identity.."""
return self.properties.get("organizer", IdentitySet())
| vgrem/Office365-REST-Python-Client | office365/communications/callrecords/call_record.py | Python | mit | 630 |
#Copyright 2008, Meka Robotics
#All rights reserved.
#http://mekabot.com
#Redistribution and use in source and binary forms, with or without
#modification, are permitted.
#THIS SOFTWARE IS PROVIDED BY THE Copyright HOLDERS AND CONTRIBUTORS
#"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
#LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
#FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
#Copyright OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
#INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES INCLUDING,
#BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
#LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
#LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
#ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
#POSSIBILITY OF SUCH DAMAGE.
import time
import numpy.numarray as na
#import Numeric as nu
import math
import os
import sys
import yaml
import m3.unit_conversion as m3u
from m3qa.calibrate import *
from m3qa.calibrate_sensors import *
from m3qa.calibrate_actuator_ec_r2 import *
import m3.actuator_ec_pb2 as aec
import m3qa.config_head_s2r2 as s2r2
# ######################################## Default S2 ############################################################
config_default_s2_j0={
'calib':s2r2.config_head_s2r2_actuator_j0['calib'],
'param':s2r2.config_head_s2r2_actuator_j0['param'],
'param_internal':
{
'joint_limits': [-36.0,19.0],
}
}
config_default_s2_j1={
'calib':s2r2.config_head_s2r2_actuator_j1['calib'],
'param':s2r2.config_head_s2r2_actuator_j1['param'],
'param_internal':
{
'joint_limits': [-60.0,60.0],
}
}
config_default_s2_j2={
'calib':s2r2.config_head_s2r2_actuator_j2['calib'],
'param':s2r2.config_head_s2r2_actuator_j2['param'],
'param_internal':
{
'joint_limits': [-20.0,20.0],
}
}
config_default_s2_j3={
'calib':s2r2.config_head_s2r2_actuator_j3['calib'],
'param':s2r2.config_head_s2r2_actuator_j3['param'],
'param_internal':
{
'joint_limits': [-7.5,13.0],
}
}
config_default_s2_j4={
'calib':s2r2.config_head_s2r2_actuator_j4['calib'],
'param':s2r2.config_head_s2r2_actuator_j4['param'],
'param_internal':
{
'joint_limits': [-65.0,65.0],
}
}
config_default_s2_j5={
'calib':s2r2.config_head_s2r2_actuator_j5['calib'],
'param':s2r2.config_head_s2r2_actuator_j5['param'],
'param_internal':
{
'joint_limits': [-36.0,36.0],
}
}
config_default_s2_j6={
'calib':s2r2.config_head_s2r2_actuator_j6['calib'],
'param':s2r2.config_head_s2r2_actuator_j6['param'],
'param_internal':
{
'joint_limits': [-40.0,32.0],
}
}
# ######################################## ENS S2 ############################################################
config_default_s2_j7={
'calib':s2r2.config_head_s2r2_actuator_j7_ens_eyelids['calib'],
'param':s2r2.config_head_s2r2_actuator_j7_ens_eyelids['param'],
'param_internal':
{
'joint_limits': [0.0,191.0],
'pwm_theta': [-800,800]
}
}
# ###########################################################################
class M3Calibrate_Head_S2R2(M3CalibrateActuatorEcR2):
def __init__(self):
M3CalibrateActuatorEcR2.__init__(self)
def do_task(self,ct):
if ct=='tt':
self.reset_sensor('theta')
if self.jid>=7:
self.calibrate_theta(use_pwm=True)
else:
self.calibrate_theta()
self.write_config()
return True
if M3CalibrateActuatorEcR2.do_task(self,ct):
return True
return False
def print_tasks(self):
M3Calibrate.print_tasks(self)
print 'et: ext_temp'
print 'at: amp_temp'
print 'sa: sensor analyze'
print 'tt: calibrate theta'
print 'zt: zero theta'
def start(self,ctype):
self.joint_names=['Neck Tilt J0',
'Neck Pan J1',
'Head Roll J2',
'Head Tilt J3',
'Eye Tilt J4',
'Eye Pan Right J5',
'Eye Pan Left J6',
'Eyelids J7']
self.config_default=[
config_default_s2_j0,
config_default_s2_j1,
config_default_s2_j2,
config_default_s2_j3,
config_default_s2_j4,
config_default_s2_j5,
config_default_s2_j6,
config_default_s2_j7]
if not M3CalibrateActuatorEcR2.start(self,ctype):
return False
self.jid=int(self.comp_ec.name[self.comp_ec.name.find('_j')+2:])
self.calib_default=self.config_default[self.jid]['calib']
self.param_default=self.config_default[self.jid]['param']
self.param_internal=self.config_default[self.jid]['param_internal']
print 'Calibrating joint',self.joint_names[self.jid]
return True
| ahoarau/m3meka | python/scripts/m3qa/calibrate_head_s2r2.py | Python | mit | 4,572 |
year = 2015
print(year)
| codermoji-contrib/python | start/Intro to variables/002/setvar2.py | Python | mit | 24 |
from lintreview.review import Problems
from lintreview.review import Comment
from lintreview.tools.shellcheck import Shellcheck
from lintreview.utils import in_path
from unittest import TestCase
from unittest import skipIf
from nose.tools import eq_
shellcheck_missing = not(in_path('shellCheck'))
class Testshellcheck(TestCase):
needs_shellcheck = skipIf(shellcheck_missing, 'Needs shellcheck')
fixtures = [
'tests/fixtures/shellcheck/no_errors.sh',
'tests/fixtures/shellcheck/has_errors.sh',
]
def setUp(self):
self.problems = Problems()
self.tool = Shellcheck(self.problems)
def test_match_file(self):
self.assertTrue(self.tool.match_file('test.sh'))
self.assertTrue(self.tool.match_file('dir/name/test.sh'))
self.assertFalse(self.tool.match_file('dir/name/test.py'))
self.assertFalse(self.tool.match_file('test.py'))
self.assertFalse(self.tool.match_file('test.js'))
@needs_shellcheck
def test_check_dependencies(self):
self.assertTrue(self.tool.check_dependencies())
@needs_shellcheck
def test_process_files__one_file_pass(self):
self.tool.process_files([self.fixtures[0]])
eq_([], self.problems.all(self.fixtures[0]))
@needs_shellcheck
def test_process_files__one_file_fail(self):
self.tool.process_files([self.fixtures[1]])
problems = self.problems.all(self.fixtures[1])
eq_(2, len(problems))
fname = self.fixtures[1]
expected = Comment(
fname,
5,
3,
'a is referenced but not assigned.\nDouble quote to prevent '
'globbing and word splitting.')
eq_(expected, problems[0])
expected = Comment(
fname,
5,
5,
'The order of the 2>&1 and the redirect matters. The 2>&1 has to '
'be last.')
eq_(expected, problems[1])
@needs_shellcheck
def test_process_files_two_files(self):
self.tool.process_files(self.fixtures)
eq_([], self.problems.all(self.fixtures[0]))
problems = self.problems.all(self.fixtures[1])
eq_(2, len(problems))
@needs_shellcheck
def test_process_files_with_config(self):
config = {
'shell': 'bash',
'exclude': 'SC2154,SC2069'
}
tool = Shellcheck(self.problems, config)
tool.process_files([self.fixtures[1]])
problems = self.problems.all(self.fixtures[1])
eq_(1, len(problems), 'Changing standards changes error counts')
| adrianmoisey/lint-review | tests/tools/test_shellcheck.py | Python | mit | 2,600 |
import sys
sys.path.append('../')
import Graffity
import numpy
import PlotTools
import glob
import astropy.io.fits as pyfits
from scipy import optimize
from os import walk
from os.path import join
def fitGaussian(x, y):
errfunc = lambda p, x, y: numpy.abs(p[0]*numpy.exp(-(x**2.0)/(2*p[1]**2.0)) - y)
coeffs = [300000.0, 25.0]
pfit, success = optimize.leastsq(errfunc, coeffs, args=(x,y))
return pfit
figs, axes = PlotTools.configurePlots(4)
observationFiles = numpy.array([])
startdir = '/gvstore1/forFrank/'
for root, dirs, files in walk(startdir):
if 'reduced_20180307' in root:
for f in files:
if 'dualscip2vmred.fits' in f:
observationFiles = numpy.append(observationFiles, join(root, f))
color = {0:'b', 1:'g', 2:'r', 3:'y'}
offsets = {}
offsets[0] = numpy.array([])
offsets[1] = numpy.array([])
offsets[2] = numpy.array([])
offsets[3] = numpy.array([])
strehls = {}
strehls[0] = numpy.array([])
strehls[1] = numpy.array([])
strehls[2] = numpy.array([])
strehls[3] = numpy.array([])
flux = {}
flux[0] = numpy.array([])
flux[1] = numpy.array([])
flux[2] = numpy.array([])
flux[3] = numpy.array([])
for f in observationFiles:
SObjName = pyfits.getheader(f).get("ESO INS SOBJ NAME")
if SObjName == 'IRS16NW':
Grav = Graffity.GRAVITY_Dual_Sci_P2VM(fileBase=f[:-20], processAcqCamData=True)
print "Good - %s" %f
for i in Grav.AcqCamDat.newStrehl.data.keys():
FiberOffset = (Grav.MET_SOBJ_DRA[i]**2.0 + Grav.MET_SOBJ_DDEC[i]**2.0)**0.5
offsets[i] = numpy.append(offsets[i], FiberOffset)
strehls[i] = numpy.append(strehls[i], Grav.Strehl[i])
flux[i] = numpy.append(flux[i], numpy.mean(Grav.AcqCamDat.TOTALFLUX_SC.data[i]))
axes[2].scatter([FiberOffset],[flux[i][-1]],
color=color[i], s=2*2**(10*strehls[i][-1]))
axes[3].scatter([strehls[i][-1]], [flux[i][-1]], color=color[i])
del(Grav)
else:
print "Bad"
binSize=0.2
StrehlBins = numpy.arange(0.05, 0.65, binSize)
offsetRange = numpy.linspace(0.0, 50.0)
Amps = {}
sigs = {}
srs = {}
dFib = numpy.linspace(0, 50.0)
for i in color.keys():
Amps[i] = []
sigs[i] = []
srs[i] = []
for s in StrehlBins:
current = (strehls[i] > s) & (strehls[i] < s+binSize)
if numpy.sum(current) > 5:
#axes[0].clear()
#axes[0].scatter(offsets[i][current], flux[i][current], color=color[i])
fit = fitGaussian(offsets[i][current], flux[i][current])
#print fit[1]
#axes[0].plot(offsetRange, fit[0]*numpy.exp(-(offsetRange)**2.0/(2.0*fit[1])**2.0))
#figs[0].show()
#raw_input()
Amps[i].append(fit[0])
sigs[i].append(fit[1])
srs[i].append(s)
if sigs[i][-1] < 70:
axes[2].plot(dFib,
Amps[i][-1]*numpy.exp(-(dFib)**2.0/(2.0*sigs[i][-1])**2.0),
color = color[i], lw=s)
Amps[i] = numpy.array(Amps[i])
sigs[i] = numpy.array(sigs[i])
srs[i] = numpy.array(srs[i])
axes[0].scatter(srs[i], Amps[i]/1000000.0, color=color[i], label="Telescope = %d"%(i+1))
axes[1].scatter(srs[i][sigs[i] < 70], sigs[i][sigs[i] < 70],
color=color[i], label="Telescope = %d"%(i+1))
axes[0].set_xlabel("Strehl Ratio")
axes[0].set_ylabel("Amplitude")
axes[1].set_xlabel("Strehl Ratio")
axes[1].set_ylabel("Sigma (mas)")
axes[2].set_xlabel("Fiber Offset (mas)")
axes[2].set_ylabel("Total Flux (SC)")
axes[3].set_xlabel("Strehl Ratio")
axes[3].set_ylabel("Total Flux (SC)")
figs[0].suptitle("IRS16NW")
figs[1].suptitle("IRS16NW")
figs[2].suptitle("IRS16NW")
figs[3].suptitle("IRS16NW")
axes[0].legend(loc=4)
axes[1].legend(loc=1)
figs[0].show()
figs[0].savefig("Amplitude_V_Strehl.png")
figs[1].show()
figs[1].savefig("Sigma_V_Strehl.png")
figs[2].show()
figs[3].show()
figs[2].savefig("Offset_V_Flux.png")
figs[3].savefig("Strehl_V_Flux.png")
| soylentdeen/Graffity | src/BCI/IRS16NM.py | Python | mit | 4,007 |
"""Initial Structure (Creating test and page tables)
Revision ID: 6aab1ac34c3
Revises: None
Create Date: 2015-07-19 12:58:04.968533
"""
# revision identifiers, used by Alembic.
revision = '6aab1ac34c3'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('test',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('data', sa.Text(), nullable=False),
sa.Column('run_date', sa.DateTime(), nullable=True),
sa.Column('browser_name', sa.String(length=20), nullable=True),
sa.Column('browser_version', sa.String(length=10), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('page',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('test_id', sa.Integer(), nullable=False),
sa.Column('page_id', sa.String(length=64), nullable=False),
sa.Column('fqdn', sa.String(length=256), nullable=True),
sa.Column('time_to_first_byte', sa.Float(), nullable=True),
sa.Column('html_load_time', sa.Float(), nullable=True),
sa.Column('video_load_time', sa.Float(), nullable=True),
sa.Column('audio_load_time', sa.Float(), nullable=True),
sa.Column('js_load_time', sa.Float(), nullable=True),
sa.Column('css_load_time', sa.Float(), nullable=True),
sa.Column('image_load_time', sa.Float(), nullable=True),
sa.Column('page_load_time', sa.Float(), nullable=True),
sa.Column('page_size', sa.Float(), nullable=True),
sa.Column('image_size', sa.Float(), nullable=True),
sa.Column('css_size', sa.Float(), nullable=True),
sa.Column('text_size', sa.Float(), nullable=True),
sa.Column('js_size', sa.Float(), nullable=True),
sa.Column('audio_size', sa.Float(), nullable=True),
sa.Column('video_size', sa.Float(), nullable=True),
sa.ForeignKeyConstraint(['test_id'], ['test.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('page')
op.drop_table('test')
### end Alembic commands ###
| mrname/haralyzer-api | migrations/versions/6aab1ac34c3_.py | Python | mit | 2,147 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class EventsApplicationInfo(Model):
"""Application info for an event result.
:param version: Version of the application
:type version: str
"""
_attribute_map = {
'version': {'key': 'version', 'type': 'str'},
}
def __init__(self, *, version: str=None, **kwargs) -> None:
super(EventsApplicationInfo, self).__init__(**kwargs)
self.version = version
| Azure/azure-sdk-for-python | sdk/applicationinsights/azure-applicationinsights/azure/applicationinsights/models/events_application_info_py3.py | Python | mit | 917 |
# -*- coding: utf-8 -*-
from httoop.status.types import StatusException
from httoop.uri import URI
class RedirectStatus(StatusException):
u"""REDIRECTIONS = 3xx
A redirection to other URI(s) which are set in the Location-header.
"""
location = None
def __init__(self, location, *args, **kwargs):
if not isinstance(location, (type(None), list, tuple)):
location = [location]
if location is not None:
kwargs.setdefault('headers', {})['Location'] = ', '.join(str(URI(uri)) for uri in location)
super(RedirectStatus, self).__init__(*args, **kwargs)
def to_dict(self):
dct = super(RedirectStatus, self).to_dict()
if self.headers.get('Location'):
dct.update(dict(Location=self.headers['Location']))
return dct
class MULTIPLE_CHOICES(RedirectStatus):
u"""The server has multiple representations of the requested resource.
And the client e.g. did not specify the Accept-header or
the requested representation does not exists.
"""
code = 300
class MOVED_PERMANENTLY(RedirectStatus):
u"""The the server knows the target resource but the URI
is incorrect (wrong domain, trailing slash, etc.).
It can also be send if a resource have moved or
renamed to prevent broken links."""
code = 301
cacheable = True
class FOUND(RedirectStatus):
code = 302
cacheable = True
class SEE_OTHER(RedirectStatus):
u"""The request has been processed but instead of serving a
representation of the result or resource it links to another
document which contains a static status message, etc. so
the client is not forced to download the data.
This is also useful for links like
/release-latest.tar.gz -> /release-1.2.tar.gz"""
code = 303
cacheable = True
class NOT_MODIFIED(RedirectStatus):
u"""The client already has the data which is provided through the
information in the Etag or If-Modified-Since-header.
The Date-header is required, the ETag-header and
Content-Location-header are useful.
Also the caching headers Expires, Cache-Control and Vary are
required if they differ from those sent previously.
TODO: what to do if the representation format has
changed but not the representation itself?
The response body has to be empty."""
code = 304
body = None
def __init__(self, *args, **kwargs):
# don't set location
super(NOT_MODIFIED, self).__init__(None, *args, **kwargs)
header_to_remove = (
"Allow", "Content-Encoding", "Content-Language",
"Content-Length", "Content-MD5", "Content-Range",
"Content-Type", "Expires", "Location"
)
class USE_PROXY(RedirectStatus):
code = 305
class TEMPORARY_REDIRECT(RedirectStatus):
u"""The request has not processed because the requested
resource is located at a different URI.
The client should resent the request to the URI given in the Location-header.
for GET this is the same as 303 but for POST, PUT and DELETE it is
important that the request was not processed."""
code = 307
cacheable = True
class PERMANENT_REDIRECT(RedirectStatus):
code = 308
cacheable = True
| spaceone/httoop | httoop/status/redirect.py | Python | mit | 3,015 |
from typing import Sequence
from numbers import Number
from tabulate import tabulate
class Matrix(Sequence):
def __init__(self, matrix: Sequence[Sequence[float]]):
assert (isinstance(matrix, Sequence) and
isinstance(matrix, Sequence)), "Wrong data"
self.__matrix = [[float(x) for x in row] for row in matrix]
@staticmethod
def one(rows: int, columns: int):
return [
[1 if i == j else 0 for j in range(columns)] for i in range(rows)
]
@staticmethod
def zero(rows: int, columns: int):
return [[0] * columns for _ in range(rows)]
def __repr__(self):
return 'Matrix({})'.format(self.__matrix)
def __str__(self):
return tabulate(self.__matrix)
def __len__(self):
return len(self.__matrix)
def __getitem__(self, item):
return self.__matrix.__getitem__(item)
def __iter__(self):
return iter(self.__matrix)
def __mul__(self, other):
assert isinstance(other, Sequence)
# Количество столбцов равно количеству строк / элементов
assert len(self.__matrix[0]) == len(other), "Wrong data"
if isinstance(other[0], Sequence):
return Matrix([
[
sum(self[i][k] * other[k][j] for k in range(len(other))) for j in range(len(other[0]))
] for i in range(len(self))
])
else:
return [
sum(x * y for x, y in zip(row, other)) for row in self
]
def __rmul__(self, other):
assert isinstance(other, Number)
return Matrix([
[other * x for x in row] for row in self.__matrix
])
def __add__(self, other):
# and all(len(other) == len(row) for row in other)), "Wrong data"
assert (isinstance(other, Sequence) and
isinstance(other[0], Sequence) and
len(self) == len(other) and
len(self[0]) == len(other[0])), "Wrong data"
return Matrix([
[x + y for x, y in zip(r1, r2)] for r1, r2 in zip(self.__matrix, other)
])
def __neg__(self):
return Matrix([
[-x for x in row] for row in self.__matrix
])
def __sub__(self, other):
assert (isinstance(other, Sequence) and
isinstance(other[0], Sequence) and
all(len(other) == len(row) for row in other)), "Wrong data"
return Matrix([
[x - y for x, y in zip(r1, r2)] for r1, r2 in zip(self, other)
])
@property
def shape(self):
return len(self.__matrix), len(self.__matrix[0])
if __name__ == '__main__':
m = Matrix([[1, 2, 1], [2, 3, 0]])
a = Matrix([[1, 0, 0], [2, 1, 0], [1, 1, 0]])
# print(m, m.shape)
# print(a, a.shape)
print(m * a)
| FeodorM/Computer-Graphics | util/matrix.py | Python | mit | 2,886 |
#encoding: utf8
class MemoryStorage(object):
"""
Simple(dummy) in-memory storage
In production better to use some database. Written to describe the api and testing
"""
def __init__(self, config):
# configuration of channels and services provided by application
self.config = config
# Storage of undelivered messages
self.storage = dict()
# Dict with services and subscriptions
self.clients = dict()
self.load_clients()
# The list of keys to interact with the application
self.api_keys = list()
self.load_api_keys()
def load_clients(self):
"""
Loading services and key data from config
"""
channel_dict = self.config.get('channels', {})
for channel in channel_dict:
self.add_channel(channel)
for user_data in channel_dict.get(channel, []):
self.subscribe(user_data['url'], user_data['key'], channel)
def load_api_keys(self):
"""
Loading keys to store messages
"""
self.api_keys = self.config.get('keys', [])
def push(self, client, channel, data, status_code):
"""
Adding the undeliverable message in the storage
"""
if client not in self.storage:
self.storage[client] = {}
if channel not in self.storage[client]:
self.storage[client][channel] = []
self.storage[client][channel].append((data, status_code))
def waiting_clients(self):
"""
Returns the services that have undelivered messages
"""
return self.storage.iterkeys()
def waiting_messages(self, client):
"""
Returns a dict with the channels in which there are unsent messages
for a given service
"""
return self.storage.get(client, None)
def get_client_key(self, client, channel):
"""
Returns the key to send the data to the client in a given channel
"""
result = None
if channel in self.clients:
for service in self.clients[channel]:
if service[0] != client:
continue
result = service[1]
return result
def get_clients(self, channel):
"""
Returns a list of services subscribed to the channel
"""
if channel not in self.clients.keys():
return []
return self.clients[channel]
def drop_message(self, client, channel, i):
"""
Deletes the message from storage
"""
del self.storage[client][channel][i]
def messages_in_channel(self, client, channel):
"""
Returns the number of unsent messages for service in a given channel
"""
result = None
if client not in self.storage:
return result
if channel not in self.storage[client]:
return result
result = len(self.storage[client][channel])
return result
def __repr__(self):
"""
In print we trust
"""
return repr(self.storage)
def add_channel(self, channel):
"""
Try to create a channel in the storage
"""
if channel in self.clients:
return False
self.clients[channel] = []
return True
def subscribe(self, client, api_key, channel):
"""
Subscribe client to the channel
"""
if channel not in self.clients:
return False
pair = (client, api_key)
if pair in self.clients[channel]:
return False
self.clients[channel].append(pair)
return True
def unsubscribe(self, client, channel):
"""
Unsubscribe client from the channel
N.B. we must drop all messages in queue for this client
"""
clients = self.clients.get(channel)
if clients is None:
return False
index = None
for i, pair in enumerate(clients):
if pair[0] != client:
continue
index = i
break
if index is not None:
del self.clients[channel][index]
return True
def drop_channel(self, channel):
"""
Drop channel with all messages and clients
"""
return self.clients.pop(channel, None)
| Sulverus/aqueduct | aqueduct/storages/basic.py | Python | mit | 4,409 |
########################################
# Automatically generated, do not edit.
########################################
from pyvisdk.thirdparty import Enum
PerfFormat = Enum(
'csv',
'normal',
)
| xuru/pyvisdk | pyvisdk/enums/perf_format.py | Python | mit | 209 |
from app import db
import datetime
from geoalchemy2 import Geometry
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy import Column
import sqlalchemy
from slugify import slugify
from flask.ext.login import UserMixin
class User(UserMixin, db.Model):
__tablename__ = "users"
id = Column(UUID(as_uuid=True),
server_default=sqlalchemy.text("uuid_generate_v4()"), primary_key=True)
session_token = Column(UUID(as_uuid=True),
server_default=sqlalchemy.text("uuid_generate_v4()"), unique=True)
permissions_group = db.Column(db.String, default='user') # user, staff, admin
ddw_access_token = db.Column(db.String)
ddw_token_expires_in = db.Column(db.Integer)
ddw_avatar_url = db.Column(db.String)
nickname = db.Column(db.String)
social_id = db.Column(db.String, unique=True)
ddw_user_created = db.Column(db.Date)
ddw_user_updated = db.Column(db.Date)
data = db.Column(postgresql.JSONB, nullable=True)
def get_id(self):
return unicode(self.session_token) | hardworkingcoder/dw_experiments | models.py | Python | mit | 1,081 |
from flask import Flask, render_template, send_from_directory, request
from settings import getDatabaseString
import os
import time
import psycopg2
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'uploads'
def genFilename(s):
arr_s = s.split('.')
t = time.strftime("%d-%m-%Yh%Hm%Ms%S", time.localtime())
return arr_s[0] + t + '.' + arr_s[1]
def createDBTable():
try:
conn = psycopg2.connect(getDatabaseString())
cur = conn.cursor()
sql_command = """
CREATE TABLE picture (
timestamp TIMESTAMP PRIMARY KEY DEFAULT CURRENT_TIMESTAMP,
pic VARCHAR(150));"""
cur.execute(sql_command)
conn.commit()
r = 'success'
except:
r = "error happens"
return r
@app.route('/uploads/<filename>')
def uploaded_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'], filename)
@app.route("/", methods=['GET', 'POST'])
def home():
if request.method == 'GET':
return render_template('index.html')
elif request.method == 'POST':
file = request.files['file']
#Generate new filename
file.filename = genFilename(file.filename)
if file:
#Save file to server
file.save(os.path.join(app.config['UPLOAD_FOLDER'], file.filename))
file_url = request.url + 'uploads/' + file.filename
#Insert file url into the database
conn = psycopg2.connect(getDatabaseString())
cur = conn.cursor()
query = "INSERT INTO picture (pic) VALUES ('{}')"
query = query.format(file.filename)
print query
cur.execute(query)
conn.commit()
#Render on the page
return 'Your pic path is: ' + file_url
else:
return 'Invalid'
if __name__ == "__main__":
app.run(debug=True)
| DreamN/Pic-storage | app.py | Python | mit | 1,843 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class InboundNatRulesOperations:
"""InboundNatRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs: Any
) -> AsyncIterable["_models.InboundNatRuleListResult"]:
"""Gets all the inbound nat rules in a load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either InboundNatRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_11_01.models.InboundNatRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.InboundNatRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('InboundNatRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/inboundNatRules'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
load_balancer_name: str,
inbound_nat_rule_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'inboundNatRuleName': self._serialize.url("inbound_nat_rule_name", inbound_nat_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/inboundNatRules/{inboundNatRuleName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
load_balancer_name: str,
inbound_nat_rule_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified load balancer inbound nat rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param inbound_nat_rule_name: The name of the inbound nat rule.
:type inbound_nat_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
load_balancer_name=load_balancer_name,
inbound_nat_rule_name=inbound_nat_rule_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'inboundNatRuleName': self._serialize.url("inbound_nat_rule_name", inbound_nat_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/inboundNatRules/{inboundNatRuleName}'} # type: ignore
async def get(
self,
resource_group_name: str,
load_balancer_name: str,
inbound_nat_rule_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.InboundNatRule":
"""Gets the specified load balancer inbound nat rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param inbound_nat_rule_name: The name of the inbound nat rule.
:type inbound_nat_rule_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: InboundNatRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_11_01.models.InboundNatRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.InboundNatRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'inboundNatRuleName': self._serialize.url("inbound_nat_rule_name", inbound_nat_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('InboundNatRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/inboundNatRules/{inboundNatRuleName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
load_balancer_name: str,
inbound_nat_rule_name: str,
inbound_nat_rule_parameters: "_models.InboundNatRule",
**kwargs: Any
) -> "_models.InboundNatRule":
cls = kwargs.pop('cls', None) # type: ClsType["_models.InboundNatRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'inboundNatRuleName': self._serialize.url("inbound_nat_rule_name", inbound_nat_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(inbound_nat_rule_parameters, 'InboundNatRule')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('InboundNatRule', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('InboundNatRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/inboundNatRules/{inboundNatRuleName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
load_balancer_name: str,
inbound_nat_rule_name: str,
inbound_nat_rule_parameters: "_models.InboundNatRule",
**kwargs: Any
) -> AsyncLROPoller["_models.InboundNatRule"]:
"""Creates or updates a load balancer inbound nat rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param inbound_nat_rule_name: The name of the inbound nat rule.
:type inbound_nat_rule_name: str
:param inbound_nat_rule_parameters: Parameters supplied to the create or update inbound nat
rule operation.
:type inbound_nat_rule_parameters: ~azure.mgmt.network.v2018_11_01.models.InboundNatRule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either InboundNatRule or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_11_01.models.InboundNatRule]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.InboundNatRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
load_balancer_name=load_balancer_name,
inbound_nat_rule_name=inbound_nat_rule_name,
inbound_nat_rule_parameters=inbound_nat_rule_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('InboundNatRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'inboundNatRuleName': self._serialize.url("inbound_nat_rule_name", inbound_nat_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/inboundNatRules/{inboundNatRuleName}'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_11_01/aio/operations/_inbound_nat_rules_operations.py | Python | mit | 22,205 |
# -*- coding: utf-8 -*-
# Copyright © 2011 Varyant, LLC
import re
from wsgiref.util import request_uri
from wack.http.errors import HttpMethodNotAllowed, HttpNotFound
from wack.http.request import Request
from wack.http.response import Response
class Controllers(object):
"""
WSGI application that maps URLs and HTTP methods to functions wack that have
been mounted on a path of the URL tree via a regular expression.
At runtime the Controllers instance matches a given URL path with the
regular expressions it has been given, in the order they were given, looking
for a match. The Controllers instance will only match the entire PATH_INFO
variable or not. If a match is found, the Controllers instance consumes the
PATH_INFO variable, appending it to the SCRIPT_NAME variable. It then
matches the request method with the methods supplied. If no match is found
for either the URL path or the request method, an HttpNotFound exception is
raised. Matched groups from the regular expression are passed into the
controller's method handler as arguments.
:ivar mappings: ordered list of (regex, list of methods, application) tuples
:type mappings: list
"""
def __init__(self):
self.mappings = []
def __call__(self, environ, start_response):
path_info = environ['PATH_INFO']
script_name = environ['SCRIPT_NAME']
request_method = environ['REQUEST_METHOD']
for regex, method_list, controller in self.mappings:
match = regex.match(path_info)
if not match:
continue
if request_method not in method_list:
raise HttpMethodNotAllowed({'uri': request_uri(environ, 0),
'method': request_method})
environ['PATH_INFO'] = ''
environ['SCRIPT_NAME'] = script_name + path_info
request = Request(environ)
response = Response()
args = match.groups()
body = controller(request, response, *args) or ''
start_response(*response.start_response_args(body))
return body
raise HttpNotFound({'uri': request_uri(environ, 0)})
def mount(self, regex_str, method_list, controller):
"""
Mount the controller function on the URL tree at the path specified by
the regular expression. The controller will only be invoked for the
given http methods.
:param regex_str: regular expression of URL path
:param method_list: list of HTTP methods
:param controller: python callable
:raise: ValueError if the regular expression is invalid
"""
if not regex_str.startswith('^/'):
raise ValueError('regular expression must start with: ^/')
if not regex_str.endswith('$'):
raise ValueError('regular expression must end with: $')
regex = re.compile(regex_str)
# XXX introspection on controller to see if its interface matches?
self.mappings.append((regex, method_list, controller))
| jlconnor/wsgistack | wack/controllers.py | Python | mit | 3,103 |
# -*- coding: utf8 -*-
# Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
from tencentcloud.common.abstract_client import AbstractClient
from tencentcloud.tms.v20201229 import models
class TmsClient(AbstractClient):
_apiVersion = '2020-12-29'
_endpoint = 'tms.tencentcloudapi.com'
_service = 'tms'
def TextModeration(self, request):
"""本接口(Text Moderation)用于提交文本内容进行智能审核任务。使用前请您使用腾讯云主账号登录控制台 [开通文本内容安全服务](https://console.cloud.tencent.com/cms/text/package) 并调整好对应的业务配置。
### 接口使用说明
- 前往“[内容安全控制台-文本内容安全](https://console.cloud.tencent.com/cms/text/package)”开启使用文本内容安全服务,首次开通服务的用户可免费领用试用套餐包,包含**1万条**文本识别额度,有效期为1个月。
- 该接口为收费接口,计费方式敬请参见 [腾讯云文本内容安全定价](https://cloud.tencent.com/product/tms/pricing)。
### 接口功能说明:
- 支持对文本文件进行检测,通过深度学习技术,识别可能令人反感、不安全或不适宜的违规文本内容;
- 支持识别多种违规场景,包括:低俗、谩骂、色情、广告等场景;
- 支持根据不同的业务场景配置自定义的审核策略,可在控制台文本内容安全-策略管理中配置;
- 支持用户自定义配置词库黑白名单,打击自定义识别类型的违规文本(目前仅支持黑名单配置);
- 支持在审核文本内容时同时关联账号或设备信息,可识别违规风险账号或设备。
### 接口调用说明:
- 文本内容大小支持:文本原文长度不能超过unicode编码长度10000个字符;
- 文本审核语言支持:目前支持中文、英文、阿拉伯数字的检测;
- 默认接口请求频率限制:**1000次/秒**,超过该频率限制则接口会报错。
:param request: Request instance for TextModeration.
:type request: :class:`tencentcloud.tms.v20201229.models.TextModerationRequest`
:rtype: :class:`tencentcloud.tms.v20201229.models.TextModerationResponse`
"""
try:
params = request._serialize()
body = self.call("TextModeration", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.TextModerationResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) | tzpBingo/github-trending | codespace/python/tencentcloud/tms/v20201229/tms_client.py | Python | mit | 3,829 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PeeringServicePrefixesOperations:
"""PeeringServicePrefixesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.peering.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
peering_service_name: str,
prefix_name: str,
**kwargs
) -> "_models.PeeringServicePrefix":
"""Gets the peering service prefix.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param peering_service_name: The peering service name.
:type peering_service_name: str
:param prefix_name: The prefix name.
:type prefix_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PeeringServicePrefix, or the result of cls(response)
:rtype: ~azure.mgmt.peering.models.PeeringServicePrefix
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PeeringServicePrefix"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01-preview"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'peeringServiceName': self._serialize.url("peering_service_name", peering_service_name, 'str'),
'prefixName': self._serialize.url("prefix_name", prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('PeeringServicePrefix', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Peering/peeringServices/{peeringServiceName}/prefixes/{prefixName}'} # type: ignore
async def create_or_update(
self,
resource_group_name: str,
peering_service_name: str,
prefix_name: str,
peering_service_prefix: "_models.PeeringServicePrefix",
**kwargs
) -> "_models.PeeringServicePrefix":
"""Creates or updates the peering prefix.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param peering_service_name: The peering service name.
:type peering_service_name: str
:param prefix_name: The prefix name.
:type prefix_name: str
:param peering_service_prefix: The IP prefix for an peering.
:type peering_service_prefix: ~azure.mgmt.peering.models.PeeringServicePrefix
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PeeringServicePrefix, or the result of cls(response)
:rtype: ~azure.mgmt.peering.models.PeeringServicePrefix
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PeeringServicePrefix"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'peeringServiceName': self._serialize.url("peering_service_name", peering_service_name, 'str'),
'prefixName': self._serialize.url("prefix_name", prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(peering_service_prefix, 'PeeringServicePrefix')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('PeeringServicePrefix', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('PeeringServicePrefix', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Peering/peeringServices/{peeringServiceName}/prefixes/{prefixName}'} # type: ignore
async def delete(
self,
resource_group_name: str,
peering_service_name: str,
prefix_name: str,
**kwargs
) -> None:
"""removes the peering prefix.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param peering_service_name: The peering service name.
:type peering_service_name: str
:param prefix_name: The prefix name.
:type prefix_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01-preview"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'peeringServiceName': self._serialize.url("peering_service_name", peering_service_name, 'str'),
'prefixName': self._serialize.url("prefix_name", prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Peering/peeringServices/{peeringServiceName}/prefixes/{prefixName}'} # type: ignore
| Azure/azure-sdk-for-python | sdk/peering/azure-mgmt-peering/azure/mgmt/peering/aio/operations/_peering_service_prefixes_operations.py | Python | mit | 11,684 |
# Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
map_item = client.sync \
.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_maps("Players") \
.sync_map_items("steph_curry") \
.fetch()
print(map_item.data)
| teoreteetik/api-snippets | sync/rest/maps/retrieve-map-item/retrieve-map-item.6.x.py | Python | mit | 471 |
#!/usr/bin/env python2
"""
Note: This does not sort the output.
For sorting simply run:
$ sort md5sums.txt > md5sums_sorted.txt
"""
import sys
import glob
import csv
import hashlib
from multiprocessing import Pool
def md5_file(filename):
with open(filename) as f:
return (hashlib.md5(f.read()).hexdigest(),filename)
directories = ["a","b","c","d","e","f","g","h","i","j","k","l","m",
"n","o","p","q","r","s","t","the","u","v","w","x","y"]
try:
base_directory = sys.argv[1]
pool = Pool(8)
with open("md5sums.txt","w") as f:
writer = csv.writer(f)
for d in directories:
print "Calculating hashes for the {} directory.".format(d)
image_files = glob.iglob("{}/{}/*".format(base_directory,d))
for hash_and_name in pool.imap(md5_file, image_files):
writer.writerow(hash_and_name)
except IndexError:
print "{0}: Syntax: {0} <album covers base directory>".format(sys.argv[0])
sys.exit(0)
| jpypi/dup-image-search | md5/calculate_md5_hashes.py | Python | mit | 1,021 |
# Python - 3.6.0
Test.expect(find_multiples(5, 25) == [5, 10, 15, 20, 25], f'{str(find_multiples(5, 25))} should equal [5, 10, 15, 20, 25]')
Test.expect(find_multiples(1, 2) == [1, 2], f'{str(find_multiples(1, 2))} should equal [1, 2]')
| RevansChen/online-judge | Codewars/8kyu/find-multiples-of-a-number/Python/test.py | Python | mit | 238 |
#!usr/bin/python
'''
Get satellite data according to input file.
'''
from random import shuffle,random
import os,json
from utils.mapbox_static import MapboxStatic
from utils.coordinate_converter import CoordConvert
from modules.getFeatures import latLon,getBBox
from libs.foldernames import satDataFolder,testDataFolder
def get_satellite(inputFile,mapboxtoken=None,count=1000,zoomLevel=17,
outputFolder='data',xpixel=480,ypixel=360,epsg=None,elements=None,
randomImages=False):
'''
Get satellite data in order to input GIS information.
Parameters:
'inputFile': Input file (GeoJSON format or parsed into GeoJSON)
'mapboxtoken': Access token for Mapbox (go to mapbox.com to create one)
'count': Number of satellite images to be downloaded
'zoomLevel': Zoom level (see libs/zoomLevel.csv for resolutions)
'outputFolder': Folder to store output data in
'xpixel': Number of pixels of satellite images (width)
'ypixel': Number of pixels of satellite images (height)
'epsg': EPSG code for coordinate system in GIS data (will try to find automatically
if not provided)
'elements': GIS data can also be input directly
'randomImages': Get center of random polygons (False) or within Boundary Box of data (True)
'''
if (not inputFile) and (not elements):
print "Error: Provide input file."
exit()
if not mapboxtoken:
print "Error: Provide mapbox token (more informations on www.mapbox.com)."
exit()
#parser.add_argument('--sport',
# type=str, default='baseball',
# help='Sport tag, for example: baseball, tennis, or soccer.')
# We need the elements
if not elements:
print 'Loading %s...' % inputFile
with open(inputFile, 'r') as f:
elements = json.load(f)
#get coordinate system
myCoordConvert = CoordConvert()
code=myCoordConvert.getCoordSystem(elements,epsg)
#create folders
subpath=outputFolder+"/"+os.path.split(inputFile)[-1][:-5]
if not os.path.isdir(subpath):
os.mkdir(subpath)
print 'Directory',subpath,'created'
if not os.path.isdir(subpath+satDataFolder):
os.mkdir(subpath+satDataFolder)
print 'Directory',subpath+satDataFolder,'created'
if not os.path.isdir(subpath+testDataFolder):
os.mkdir(subpath+testDataFolder)
print 'Directory',subpath+testDataFolder,'created'
#Write metadata
with open(subpath+satDataFolder+"meta.csv","a+") as f:
f.write("ZoomLevel,,"+str(zoomLevel)+"\n")
#get bbox if set to random
if randomImages:
xlist=[]
ylist=[]
for element in elements['features']:
minxe,maxxe,minye,maxye=getBBox(element)
xlist.append(minxe)
xlist.append(maxxe)
ylist.append(minye)
ylist.append(maxye)
minx=min(xlist)
maxx=max(xlist)
miny=min(ylist)
maxy=max(ylist)
element_list = []
index_list = range(len(elements['features'])) #featue map
# Randomize elements list to make sure we don't download all pics from the
shuffle(index_list)
for i in index_list:
element_list.append(elements['features'][i]) #feature map
# Now we're gonna download the satellite images for these locations
namespace= os.path.split(inputFile)[-1][:-5] #get input file name as namespace
mapbox_static = MapboxStatic(
namespace=namespace,
root_folder=subpath+satDataFolder[0:-1])
total_downloaded = 0
c = 0
print "------------------- Getting Satellite data -------------------"
for element in element_list:
if randomImages:
randomValue=random()
av_lon=minx+((maxx-minx)*randomValue)
av_lat=miny+((maxy-miny)*randomValue)
element_id_str=1000000+c #1000000 indicates random value
with open(subpath+satDataFolder+"meta.csv","a+") as f:
f.write(str(element_id_str)+","+str(av_lon)+","+str(av_lat)+"\n")
else:
element_id_str = index_list[c]
#figure out center of polygon
av_lon,av_lat=latLon(element)
#Convert to standard format
if code != 4319: # if not already in wgs84 standard format
lotlan= myCoordConvert.convert(av_lon,av_lat)
longitude=lotlan[0]
latitude=lotlan[1]
else: #if already in wgs84 format
latitude= av_lat
longitude= av_lon
#get url
print "Coordinates WSG64: "+str(longitude)+','+str(latitude)
if (av_lon != longitude) and (av_lat != latitude):
print "Coordinates Native: "+str(av_lon)+','+str(av_lat)
url = mapbox_static.get_url(
latitude=latitude,
longitude=longitude,
mapbox_zoom=zoomLevel,
access_token=mapboxtoken,
width=xpixel,
height=ypixel)
#download data
success = mapbox_static.download_tile(
element_id=element_id_str,
url=url,verbose=True)
if success:
total_downloaded += 1
print total_downloaded,'/',count
c += 1
if total_downloaded >= count:
break
| worldbank/cv4ag | modules/get_satellite.py | Python | mit | 4,605 |
#!/usr/bin/python3
import numpy as np
import cv2
from collections import deque
from obstacle_detector.distance_calculator import spline_dist
from obstacle_detector.perspective import inv_persp_new
from obstacle_detector.perspective import regress_perspecive
from obstacle_detector.depth_mapper import calculate_depth_map
from obstacle_detector.tm.image_shift_calculator import find_shift_value
def video_test(input_video_path=None, output_video_path=None):
cx = 595
cy = 303
roi_width = 25
roi_length = 90
cap = cv2.VideoCapture(
input_video_path \
if input_video_path is not None \
else input('enter video path: '))
old_images = deque()
original_frames = deque()
ret, frame = cap.read()
for i in range(15):
original_frames.append(frame)
img, pts1 = inv_persp_new(
frame, (cx, cy), (roi_width, roi_length), spline_dist, 200)
old_images.append(img)
ret, frame = cap.read()
height, width, _ = frame.shape
out_height, out_width, _ = img.shape
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter(
output_video_path \
if output_video_path is not None \
else 'output.avi',
fourcc, 15.0, (out_width * 4, out_height))
left = cv2.imread('aloeL.jpg')
right = cv2.imread('aloeR.jpg')
while(ret):
original_frames.popleft()
ret, frame = cap.read()
original_frames.append(frame)
img, pts1 = inv_persp_new(
frame, (cx, cy), (roi_width, roi_length), spline_dist, 200)
old_images.popleft()
old_images.append(img)
left = original_frames[-5][:, width // 2:]
right = original_frames[-1][:, width // 2:]
left = cv2.pyrDown(left)
left = cv2.blur(left, (3, 3))
right = cv2.pyrDown(right)
right = cv2.blur(right, (3, 3))
depth = calculate_depth_map(left, right)
cv2.imshow('left', left)
cv2.imshow('right', right)
cv2.imshow('depth', depth)
depth = cv2.cvtColor(depth, cv2.COLOR_GRAY2BGR)
res = cv2.addWeighted(left, 0.5, depth, 0.5, 0)
cv2.imshow('res', res)
# left = old_images[-1][300:,:]
# right = old_images[-9][300:,:]
#
# shift_value = find_shift_value(left, right, (30, 100, 60, 300))
# right = np.roll(right, shift_value[1], axis=0)#shift_value[0])
# right = np.roll(right, shift_value[0], axis=1)#shift_value[0])
# left = left[100:-100,:]
# right = right[100:-100,:]
#
# print(shift_value)
#
# left = np.rot90(left, 3)
# right = np.rot90(right, 3)
#
# cv2.imshow('left', left)
# cv2.imshow('right', right)
#
# shifted_map = cv2.equalizeHist(
# calculate_depth_map(
# left, right))
# cv2.imshow(
# 'shifted map', shifted_map)
# diff = cv2.absdiff(left, right)
# cv2.imshow('diff', diff)
# dm = calculate_depth_map(left, right)
# cv2.imshow('dm', dm)
# dm = cv2.equalizeHist(dm)
# cv2.imshow('eq dm', dm)
# dm = cv2.cvtColor(dm, cv2.COLOR_GRAY2BGR)
k = cv2.waitKey(1) & 0xff
if k == 27:
break
elif k == ord('s'):
cv2.imwrite('screen.png', img)
cap.release()
out.release()
cv2.destroyAllWindows()
video_test('../../video/6.mp4', '../results/depth_map_out.avi')
| Sid1057/obstacle_detector | depth_test.py | Python | mit | 3,465 |
# A WavPack reader/tagger
#
# Copyright 2006 Joe Wreschnig <piman@sacredchao.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# $Id: wavpack.py 4153 2007-08-05 07:07:49Z piman $
"""WavPack reading and writing.
WavPack is a lossless format that uses APEv2 tags. Read
http://www.wavpack.com/ for more information.
"""
__all__ = ["WavPack", "Open", "delete"]
from mutagen.apev2 import APEv2File, error, delete
from mutagen._util import cdata
class WavPackHeaderError(error): pass
RATES = [6000, 8000, 9600, 11025, 12000, 16000, 22050, 24000, 32000, 44100,
48000, 64000, 88200, 96000, 192000]
class WavPackInfo(object):
"""WavPack stream information.
Attributes:
channels - number of audio channels (1 or 2)
length - file length in seconds, as a float
sample_rate - audio sampling rate in Hz
version - WavPack stream version
"""
def __init__(self, fileobj):
header = fileobj.read(28)
if len(header) != 28 or not header.startswith("wvpk"):
raise WavPackHeaderError("not a WavPack file")
samples = cdata.uint_le(header[12:16])
flags = cdata.uint_le(header[24:28])
self.version = cdata.short_le(header[8:10])
self.channels = bool(flags & 4) or 2
self.sample_rate = RATES[(flags >> 23) & 0xF]
self.length = float(samples) / self.sample_rate
def pprint(self):
return "WavPack, %.2f seconds, %d Hz" % (self.length, self.sample_rate)
class WavPack(APEv2File):
_Info = WavPackInfo
_mimes = ["audio/x-wavpack"]
def score(filename, fileobj, header):
return header.startswith("wvpk") * 2
score = staticmethod(score)
| JustinTulloss/harmonize.fm | libs.py/mutagen/wavpack.py | Python | mit | 1,810 |
import sys
from traceback import format_exception
import colorama
def color(color_, settings):
"""Utility for ability to disabling colored output."""
if settings.no_colors:
return ''
else:
return color_
def exception(title, exc_info, settings):
sys.stderr.write(
u'{warn}[WARN] {title}:{reset}\n{trace}'
u'{warn}----------------------------{reset}\n\n'.format(
warn=color(colorama.Back.RED + colorama.Fore.WHITE
+ colorama.Style.BRIGHT, settings),
reset=color(colorama.Style.RESET_ALL, settings),
title=title,
trace=''.join(format_exception(*exc_info))))
def rule_failed(rule, exc_info, settings):
exception('Rule {}'.format(rule.name), exc_info, settings)
def show_command(new_command, settings):
sys.stderr.write('{bold}{command}{reset}\n'.format(
command=new_command,
bold=color(colorama.Style.BRIGHT, settings),
reset=color(colorama.Style.RESET_ALL, settings)))
def confirm_command(new_command, settings):
sys.stderr.write(
'{bold}{command}{reset} [{green}enter{reset}/{red}ctrl+c{reset}]'.format(
command=new_command,
bold=color(colorama.Style.BRIGHT, settings),
green=color(colorama.Fore.GREEN, settings),
red=color(colorama.Fore.RED, settings),
reset=color(colorama.Style.RESET_ALL, settings)))
sys.stderr.flush()
def failed(msg, settings):
sys.stderr.write('{red}{msg}{reset}\n'.format(
msg=msg,
red=color(colorama.Fore.RED, settings),
reset=color(colorama.Style.RESET_ALL, settings)))
| JianfengYao/thefuck | thefuck/logs.py | Python | mit | 1,661 |
#!/usr/bin/python
"""
Object
"""
import sys
from copy import copy as _copy
from collections import defaultdict
from .errors import ConfigurationError, ExecutionError, \
InvalidParameterDatatype
from .debugging import bacpypes_debugging, ModuleLogger
from .primitivedata import Atomic, BitString, Boolean, CharacterString, Date, \
Double, Integer, ObjectIdentifier, ObjectType, OctetString, Real, Time, \
Unsigned, Unsigned8, Unsigned16
from .constructeddata import AnyAtomic, Array, ArrayOf, List, ListOf, \
Choice, Element, Sequence
from .basetypes import AccessCredentialDisable, AccessCredentialDisableReason, \
AccessEvent, AccessPassbackMode, AccessRule, AccessThreatLevel, \
AccessUserType, AccessZoneOccupancyState, AccumulatorRecord, Action, \
ActionList, AddressBinding, AssignedAccessRights, AuditOperationFlags, AuditLevel, \
AuthenticationFactor, \
AuthenticationFactorFormat, AuthenticationPolicy, AuthenticationStatus, \
AuthorizationException, AuthorizationMode, BackupState, BDTEntry, BinaryPV, \
COVSubscription, CalendarEntry, ChannelValue, ClientCOV, \
CredentialAuthenticationFactor, DailySchedule, DateRange, DateTime, \
Destination, DeviceObjectPropertyReference, DeviceObjectReference, \
DeviceStatus, DoorAlarmState, DoorSecuredStatus, DoorStatus, DoorValue, \
EngineeringUnits, EventNotificationSubscription, EventParameter, \
EventState, EventTransitionBits, EventType, FaultParameter, FaultType, \
FileAccessMethod, FDTEntry, IPMode, HostNPort, LifeSafetyMode, LifeSafetyOperation, LifeSafetyState, \
LightingCommand, LightingInProgress, LightingTransition, LimitEnable, \
LockStatus, LogMultipleRecord, LogRecord, LogStatus, LoggingType, \
Maintenance, NameValue, NetworkNumberQuality, NetworkPortCommand, \
NetworkSecurityPolicy, NetworkType, NodeType, NotifyType, \
ObjectPropertyReference, ObjectTypesSupported, OptionalCharacterString, \
Polarity, PortPermission, Prescale, PriorityArray, ProcessIdSelection, \
ProgramError, ProgramRequest, ProgramState, PropertyAccessResult, \
PropertyIdentifier, ProtocolLevel, Recipient, Reliability, RestartReason, \
RouterEntry, Scale, SecurityKeySet, SecurityLevel, Segmentation, \
ServicesSupported, SetpointReference, ShedLevel, ShedState, SilencedState, \
SpecialEvent, StatusFlags, TimeStamp, VTClass, VTSession, VMACEntry, \
WriteStatus, OptionalUnsigned, PriorityFilter, ValueSource, \
OptionalPriorityFilter, OptionalReal, AuditNotification, PropertyReference, \
AuditLogRecord, ObjectSelector, OptionalBinaryPV, BinaryLightingPV, \
COVMultipleSubscription, LiftGroupMode, LandingCallStatus, LiftCarDirection, \
EscalatorOperationDirection, EscalatorMode, LiftFault, AssignedLandingCalls, \
LiftCarCallList, LiftCarDoorCommand, LiftCarDriveStatus, LiftCarMode, \
LandingDoorStatus, StageLimitValue, NameValueCollection, Relationship, \
TimerState, TimerStateChangeValue, TimerTransition
from .apdu import EventNotificationParameters, ReadAccessSpecification, \
ReadAccessResult
# some debugging
_debug = 0
_log = ModuleLogger(globals())
#
# PropertyError
#
class PropertyError(AttributeError):
pass
# a dictionary of object types and classes
registered_object_types = {}
#
# register_object_type
#
@bacpypes_debugging
def register_object_type(cls=None, vendor_id=0):
if _debug: register_object_type._debug("register_object_type %s vendor_id=%s", repr(cls), vendor_id)
# if cls isn't given, return a decorator
if not cls:
def _register(xcls):
if _debug: register_object_type._debug("_register %s (vendor_id=%s)", repr(cls), vendor_id)
return register_object_type(xcls, vendor_id)
if _debug: register_object_type._debug(" - returning decorator")
return _register
# make sure it's an Object derived class
if not issubclass(cls, Object):
raise RuntimeError("Object derived class required")
# build a property dictionary by going through the class and all its parents
_properties = {}
for c in cls.__mro__:
if _debug: register_object_type._debug(" - c: %r", c)
for prop in getattr(c, 'properties', []):
if prop.identifier not in _properties:
_properties[prop.identifier] = prop
# if the object type hasn't been provided, make an immutable one
if 'objectType' not in _properties:
_properties['objectType'] = ReadableProperty('objectType', ObjectType, cls.objectType, mutable=False)
# store this in the class
cls._properties = _properties
# now save this in all our types
registered_object_types[(cls.objectType, vendor_id)] = cls
# return the class as a decorator
return cls
#
# get_object_class
#
@bacpypes_debugging
def get_object_class(object_type, vendor_id=0):
"""Return the class associated with an object type."""
if _debug: get_object_class._debug("get_object_class %r vendor_id=%r", object_type, vendor_id)
# find the klass as given
cls = registered_object_types.get((object_type, vendor_id))
if _debug: get_object_class._debug(" - direct lookup: %s", repr(cls))
# if the class isn't found and the vendor id is non-zero, try the standard class for the type
if (not cls) and vendor_id:
cls = registered_object_types.get((object_type, 0))
if _debug: get_object_class._debug(" - default lookup: %s", repr(cls))
return cls
#
# get_datatype
#
@bacpypes_debugging
def get_datatype(object_type, propid, vendor_id=0):
"""Return the datatype for the property of an object."""
if _debug: get_datatype._debug("get_datatype %r %r vendor_id=%r", object_type, propid, vendor_id)
# get the related class
cls = get_object_class(object_type, vendor_id)
if not cls:
return None
# get the property
prop = cls._properties.get(propid)
if not prop:
return None
# return the datatype
return prop.datatype
#
# Property
#
@bacpypes_debugging
class Property(object):
def __init__(self, identifier, datatype, default=None, optional=True, mutable=True):
if _debug:
Property._debug("__init__ %s %s default=%r optional=%r mutable=%r",
identifier, datatype, default, optional, mutable
)
# keep the arguments
self.identifier = identifier
# check the datatype
self.datatype = datatype
if not issubclass(datatype, (Atomic, Sequence, Choice, Array, List, AnyAtomic)):
raise TypeError("invalid datatype for property: %s" % (identifier,))
self.optional = optional
self.mutable = mutable
self.default = default
def ReadProperty(self, obj, arrayIndex=None):
if _debug:
Property._debug("ReadProperty(%s) %s arrayIndex=%r",
self.identifier, obj, arrayIndex
)
# get the value
value = obj._values[self.identifier]
if _debug: Property._debug(" - value: %r", value)
# access an array
if arrayIndex is not None:
if not issubclass(self.datatype, Array):
raise ExecutionError(errorClass='property', errorCode='propertyIsNotAnArray')
if value is not None:
try:
# dive in, the water's fine
value = value[arrayIndex]
except IndexError:
raise ExecutionError(errorClass='property', errorCode='invalidArrayIndex')
# all set
return value
def WriteProperty(self, obj, value, arrayIndex=None, priority=None, direct=False):
if _debug:
Property._debug("WriteProperty(%s) %s %r arrayIndex=%r priority=%r direct=%r",
self.identifier, obj, value, arrayIndex, priority, direct
)
if direct:
if _debug: Property._debug(" - direct write")
else:
# see if it must be provided
if not self.optional and value is None:
raise ValueError("%s value required" % (self.identifier,))
# see if it can be changed
if not self.mutable:
if _debug: Property._debug(" - property is immutable")
raise ExecutionError(errorClass='property', errorCode='writeAccessDenied')
# if changing the length of the array, the value is unsigned
if arrayIndex == 0:
if not Unsigned.is_valid(value):
raise InvalidParameterDatatype("length of %s must be unsigned" % (
self.identifier,
))
# if it's atomic, make sure it's valid
elif issubclass(self.datatype, AnyAtomic):
if _debug: Property._debug(" - property is any atomic, checking value")
if not isinstance(value, Atomic):
raise InvalidParameterDatatype("%s must be an atomic instance" % (
self.identifier,
))
elif issubclass(self.datatype, Atomic):
if _debug: Property._debug(" - property is atomic, checking value")
if not self.datatype.is_valid(value):
raise InvalidParameterDatatype("%s must be of type %s" % (
self.identifier, self.datatype.__name__,
))
# if it's an array, make sure it's valid regarding arrayIndex provided
elif issubclass(self.datatype, Array):
if _debug: Property._debug(" - property is array, checking subtype and index")
# changing a single element
if arrayIndex is not None:
# if it's atomic, make sure it's valid
if issubclass(self.datatype.subtype, Atomic):
if _debug: Property._debug(" - subtype is atomic, checking value")
if not self.datatype.subtype.is_valid(value):
raise InvalidParameterDatatype("%s must be of type %s" % (
self.identifier, self.datatype.__name__,
))
# constructed type
elif not isinstance(value, self.datatype.subtype):
raise InvalidParameterDatatype("%s must be of type %s" % (
self.identifier, self.datatype.subtype.__name__
))
# replacing the array
elif isinstance(value, list):
# check validity regarding subtype
for item in value:
# if it's atomic, make sure it's valid
if issubclass(self.datatype.subtype, Atomic):
if _debug: Property._debug(" - subtype is atomic, checking value")
if not self.datatype.subtype.is_valid(item):
raise InvalidParameterDatatype("elements of %s must be of type %s" % (
self.identifier, self.datatype.subtype.__name__,
))
# constructed type
elif not isinstance(item, self.datatype.subtype):
raise InvalidParameterDatatype("elements of %s must be of type %s" % (
self.identifier, self.datatype.subtype.__name__
))
# value is mutated into a new array
value = self.datatype(value)
# if it's an array, make sure it's valid regarding arrayIndex provided
elif issubclass(self.datatype, List):
if _debug: Property._debug(" - property is list, checking subtype")
# changing a single element
if arrayIndex is not None:
raise ExecutionError(errorClass='property', errorCode='propertyIsNotAnArray')
# replacing the array
if not isinstance(value, list):
raise InvalidParameterDatatype("elements of %s must be of type %s" % (
self.identifier, self.datatype.subtype.__name__
))
# check validity regarding subtype
for item in value:
# if it's atomic, make sure it's valid
if issubclass(self.datatype.subtype, Atomic):
if _debug: Property._debug(" - subtype is atomic, checking value")
if not self.datatype.subtype.is_valid(item):
raise InvalidParameterDatatype("elements of %s must be of type %s" % (
self.identifier, self.datatype.subtype.__name__,
))
# constructed type
elif not isinstance(item, self.datatype.subtype):
raise InvalidParameterDatatype("elements of %s must be of type %s" % (
self.identifier, self.datatype.subtype.__name__
))
# value is mutated into a new list
value = self.datatype(value)
# some kind of constructed data
elif not isinstance(value, self.datatype):
if _debug: Property._debug(" - property is not atomic and wrong type")
raise InvalidParameterDatatype("%s must be of type %s" % (
self.identifier, self.datatype.__name__,
))
# local check if the property is monitored
is_monitored = self.identifier in obj._property_monitors
if arrayIndex is not None:
if not issubclass(self.datatype, Array):
raise ExecutionError(errorClass='property', errorCode='propertyIsNotAnArray')
# check the array
arry = obj._values[self.identifier]
if arry is None:
raise RuntimeError("%s uninitialized array" % (self.identifier,))
if is_monitored:
old_value = _copy(arry)
# seems to be OK, let the array object take over
if _debug: Property._debug(" - forwarding to array")
try:
arry[arrayIndex] = value
except IndexError:
raise ExecutionError(errorClass='property', errorCode='invalidArrayIndex')
except TypeError:
raise ExecutionError(errorClass='property', errorCode='valueOutOfRange')
# check for monitors, call each one with the old and new value
if is_monitored:
for fn in obj._property_monitors[self.identifier]:
if _debug: Property._debug(" - monitor: %r", fn)
fn(old_value, arry)
else:
if is_monitored:
old_value = obj._values.get(self.identifier, None)
# seems to be OK
obj._values[self.identifier] = value
# check for monitors, call each one with the old and new value
if is_monitored:
for fn in obj._property_monitors[self.identifier]:
if _debug: Property._debug(" - monitor: %r", fn)
fn(old_value, value)
#
# StandardProperty
#
@bacpypes_debugging
class StandardProperty(Property):
def __init__(self, identifier, datatype, default=None, optional=True, mutable=True):
if _debug:
StandardProperty._debug("__init__ %s %s default=%r optional=%r mutable=%r",
identifier, datatype, default, optional, mutable
)
# use one of the subclasses
if not isinstance(self, (OptionalProperty, ReadableProperty, WritableProperty)):
raise ConfigurationError(self.__class__.__name__ + " must derive from OptionalProperty, ReadableProperty, or WritableProperty")
# validate the identifier to be one of the standard property enumerations
if identifier not in PropertyIdentifier.enumerations:
raise ConfigurationError("unknown standard property identifier: %s" % (identifier,))
# continue with the initialization
Property.__init__(self, identifier, datatype, default, optional, mutable)
#
# OptionalProperty
#
@bacpypes_debugging
class OptionalProperty(StandardProperty):
"""The property is optional and need not be present."""
def __init__(self, identifier, datatype, default=None, optional=True, mutable=False):
if _debug:
OptionalProperty._debug("__init__ %s %s default=%r optional=%r mutable=%r",
identifier, datatype, default, optional, mutable
)
# continue with the initialization
StandardProperty.__init__(self, identifier, datatype, default, optional, mutable)
#
# ReadableProperty
#
@bacpypes_debugging
class ReadableProperty(StandardProperty):
"""The property is required to be present and readable using BACnet services."""
def __init__(self, identifier, datatype, default=None, optional=False, mutable=False):
if _debug:
ReadableProperty._debug("__init__ %s %s default=%r optional=%r mutable=%r",
identifier, datatype, default, optional, mutable
)
# continue with the initialization
StandardProperty.__init__(self, identifier, datatype, default, optional, mutable)
#
# WritableProperty
#
@bacpypes_debugging
class WritableProperty(StandardProperty):
"""The property is required to be present, readable, and writable using BACnet services."""
def __init__(self, identifier, datatype, default=None, optional=False, mutable=True):
if _debug:
WritableProperty._debug("__init__ %s %s default=%r optional=%r mutable=%r",
identifier, datatype, default, optional, mutable
)
# continue with the initialization
StandardProperty.__init__(self, identifier, datatype, default, optional, mutable)
#
# ObjectIdentifierProperty
#
@bacpypes_debugging
class ObjectIdentifierProperty(ReadableProperty):
def WriteProperty(self, obj, value, arrayIndex=None, priority=None, direct=False):
if _debug: ObjectIdentifierProperty._debug("WriteProperty %r %r arrayIndex=%r priority=%r", obj, value, arrayIndex, priority)
# make it easy to default
if value is None:
pass
elif isinstance(value, int):
value = (obj.objectType, value)
elif isinstance(value, tuple) and len(value) == 2:
if value[0] != obj.objectType:
raise ValueError("%s required" % (obj.objectType,))
else:
raise TypeError("object identifier")
return Property.WriteProperty( self, obj, value, arrayIndex, priority, direct )
#
# Object
#
@bacpypes_debugging
class Object:
_debug_contents = ('_app',)
_object_supports_cov = False
properties = \
[ ObjectIdentifierProperty('objectIdentifier', ObjectIdentifier, optional=False)
, ReadableProperty('objectName', CharacterString, optional=False)
, OptionalProperty('description', CharacterString)
, OptionalProperty('profileName', CharacterString)
, ReadableProperty('propertyList', ArrayOf(PropertyIdentifier))
, OptionalProperty('auditLevel', AuditLevel)
, OptionalProperty('auditableOperations', AuditOperationFlags)
, OptionalProperty('tags', ArrayOf(NameValue))
, OptionalProperty('profileLocation', CharacterString)
, OptionalProperty('profileName', CharacterString)
]
_properties = {}
def __init__(self, **kwargs):
"""Create an object, with default property values as needed."""
if _debug: Object._debug("__init__(%s) %r", self.__class__.__name__, kwargs)
# map the python names into property names and make sure they
# are appropriate for this object
initargs = {}
for key, value in kwargs.items():
if key not in self._properties:
raise PropertyError(key)
initargs[key] = value
# object is detached from an application until it is added
self._app = None
# start with a clean dict of values
self._values = {}
# empty list of property monitors
self._property_monitors = defaultdict(list)
# initialize the object
for propid, prop in self._properties.items():
if propid in initargs:
if _debug: Object._debug(" - setting %s from initargs", propid)
# defer to the property object for error checking
prop.WriteProperty(self, initargs[propid], direct=True)
elif prop.default is not None:
if _debug: Object._debug(" - setting %s from default", propid)
# default values bypass property interface
self._values[propid] = prop.default
else:
if not prop.optional:
if _debug: Object._debug(" - %s value required", propid)
self._values[propid] = None
if _debug: Object._debug(" - done __init__")
def _attr_to_property(self, attr):
"""Common routine to translate a python attribute name to a property name and
return the appropriate property."""
# get the property
prop = self._properties.get(attr)
if not prop:
raise PropertyError(attr)
# found it
return prop
def __getattr__(self, attr):
if _debug: Object._debug("__getattr__ %r", attr)
# do not redirect private attrs or functions
if attr.startswith('_') or attr[0].isupper() or (attr == 'debug_contents'):
return object.__getattribute__(self, attr)
# defer to the property to get the value
prop = self._attr_to_property(attr)
if _debug: Object._debug(" - deferring to %r", prop)
# defer to the property to get the value
return prop.ReadProperty(self)
def __setattr__(self, attr, value):
if _debug: Object._debug("__setattr__ %r %r", attr, value)
if attr.startswith('_') or attr[0].isupper() or (attr == 'debug_contents'):
return object.__setattr__(self, attr, value)
# defer to the property to normalize the value
prop = self._attr_to_property(attr)
if _debug: Object._debug(" - deferring to %r", prop)
return prop.WriteProperty(self, value, direct=True)
def add_property(self, prop):
"""Add a property to an object. The property is an instance of
a Property or one of its derived classes. Adding a property
disconnects it from the collection of properties common to all of the
objects of its class."""
if _debug: Object._debug("add_property %r", prop)
# make a copy of the properties dictionary
self._properties = _copy(self._properties)
# save the property reference and default value (usually None)
self._properties[prop.identifier] = prop
self._values[prop.identifier] = prop.default
def delete_property(self, prop):
"""Delete a property from an object. The property is an instance of
a Property or one of its derived classes, but only the property
is relavent. Deleting a property disconnects it from the collection of
properties common to all of the objects of its class."""
if _debug: Object._debug("delete_property %r", prop)
# make a copy of the properties dictionary
self._properties = _copy(self._properties)
# delete the property from the dictionary and values
del self._properties[prop.identifier]
if prop.identifier in self._values:
del self._values[prop.identifier]
def ReadProperty(self, propid, arrayIndex=None):
if _debug: Object._debug("ReadProperty %r arrayIndex=%r", propid, arrayIndex)
# get the property
prop = self._properties.get(propid)
if not prop:
raise PropertyError(propid)
# defer to the property to get the value
return prop.ReadProperty(self, arrayIndex)
def WriteProperty(self, propid, value, arrayIndex=None, priority=None, direct=False):
if _debug: Object._debug("WriteProperty %r %r arrayIndex=%r priority=%r", propid, value, arrayIndex, priority)
# get the property
prop = self._properties.get(propid)
if not prop:
raise PropertyError(propid)
# defer to the property to set the value
return prop.WriteProperty(self, value, arrayIndex, priority, direct)
def get_datatype(self, propid):
"""Return the datatype for the property of an object."""
if _debug: Object._debug("get_datatype %r", propid)
# get the property
prop = self._properties.get(propid)
if not prop:
raise PropertyError(propid)
# return the datatype
return prop.datatype
def _dict_contents(self, use_dict=None, as_class=dict):
"""Return the contents of an object as a dict."""
if _debug: Object._debug("dict_contents use_dict=%r as_class=%r", use_dict, as_class)
# make/extend the dictionary of content
if use_dict is None:
use_dict = as_class()
klasses = list(self.__class__.__mro__)
klasses.reverse()
# build a list of property identifiers "bottom up"
property_names = []
properties_seen = set()
for c in klasses:
for prop in getattr(c, 'properties', []):
if prop.identifier not in properties_seen:
property_names.append(prop.identifier)
properties_seen.add(prop.identifier)
# extract the values
for property_name in property_names:
# get the value
property_value = self._properties.get(property_name).ReadProperty(self)
if property_value is None:
continue
# if the value has a way to convert it to a dict, use it
if hasattr(property_value, "dict_contents"):
property_value = property_value.dict_contents(as_class=as_class)
# save the value
use_dict.__setitem__(property_name, property_value)
# return what we built/updated
return use_dict
def debug_contents(self, indent=1, file=sys.stdout, _ids=None):
"""Print out interesting things about the object."""
klasses = list(self.__class__.__mro__)
klasses.reverse()
# print special attributes "bottom up"
previous_attrs = ()
for c in klasses:
attrs = getattr(c, '_debug_contents', ())
# if we have seen this list already, move to the next class
if attrs is previous_attrs:
continue
for attr in attrs:
file.write("%s%s = %s\n" % (" " * indent, attr, getattr(self, attr)))
previous_attrs = attrs
# build a list of property identifiers "bottom up"
property_names = []
properties_seen = set()
for c in klasses:
for prop in getattr(c, 'properties', []):
if prop.identifier not in properties_seen:
property_names.append(prop.identifier)
properties_seen.add(prop.identifier)
# print out the values
for property_name in property_names:
property_value = self._values.get(property_name, None)
# printing out property values that are None is tedious
if property_value is None:
continue
if hasattr(property_value, "debug_contents"):
file.write("%s%s\n" % (" " * indent, property_name))
property_value.debug_contents(indent+1, file, _ids)
else:
file.write("%s%s = %r\n" % (" " * indent, property_name, property_value))
#
# Standard Object Types
#
@register_object_type
class AccessCredentialObject(Object):
objectType = 'accessCredential'
properties = \
[ WritableProperty('globalIdentifier', Unsigned)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('reliability', Reliability)
, ReadableProperty('credentialStatus', BinaryPV)
, ReadableProperty('reasonForDisable', ListOf(AccessCredentialDisableReason))
, ReadableProperty('authenticationFactors', ArrayOf(CredentialAuthenticationFactor))
, ReadableProperty('activationTime', DateTime)
, ReadableProperty('expirationTime', DateTime)
, ReadableProperty('credentialDisable', AccessCredentialDisable)
, OptionalProperty('daysRemaining', Integer)
, OptionalProperty('usesRemaining', Integer)
, OptionalProperty('absenteeLimit', Unsigned)
, OptionalProperty('belongsTo', DeviceObjectReference)
, ReadableProperty('assignedAccessRights', ArrayOf(AssignedAccessRights))
, OptionalProperty('lastAccessPoint', DeviceObjectReference)
, OptionalProperty('lastAccessEvent', AccessEvent)
, OptionalProperty('lastUseTime', DateTime)
, OptionalProperty('traceFlag', Boolean)
, OptionalProperty('threatAuthority', AccessThreatLevel)
, OptionalProperty('extendedTimeEnable', Boolean)
, OptionalProperty('authorizationExemptions', ListOf(AuthorizationException))
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
# , OptionalProperty('masterExemption', Boolean)
# , OptionalProperty('passbackExemption', Boolean)
# , OptionalProperty('occupancyExemption', Boolean)
]
@register_object_type
class AccessDoorObject(Object):
objectType = 'accessDoor'
_object_supports_cov = True
properties = \
[ WritableProperty('presentValue', DoorValue)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, ReadableProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('priorityArray', PriorityArray)
, ReadableProperty('relinquishDefault', DoorValue)
, OptionalProperty('doorStatus', DoorStatus)
, OptionalProperty('lockStatus', LockStatus)
, OptionalProperty('securedStatus', DoorSecuredStatus)
, OptionalProperty('doorMembers', ArrayOf(DeviceObjectReference))
, ReadableProperty('doorPulseTime', Unsigned)
, ReadableProperty('doorExtendedPulseTime', Unsigned)
, OptionalProperty('doorUnlockDelayTime', Unsigned)
, ReadableProperty('doorOpenTooLongTime', Unsigned)
, OptionalProperty('doorAlarmState', DoorAlarmState)
, OptionalProperty('maskedAlarmValues', ListOf(DoorAlarmState))
, OptionalProperty('maintenanceRequired', Maintenance)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('alarmValues', ListOf(DoorAlarmState))
, OptionalProperty('faultValues', ListOf(DoorAlarmState))
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class AccessPointObject(Object):
objectType = 'accessPoint'
_object_supports_cov = True
properties = \
[ ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, ReadableProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('authenticationStatus', AuthenticationStatus)
, ReadableProperty('activeAuthenticationPolicy', Unsigned)
, ReadableProperty('numberOfAuthenticationPolicies', Unsigned)
, OptionalProperty('authenticationPolicyList', ArrayOf(AuthenticationPolicy))
, OptionalProperty('authenticationPolicyNames', ArrayOf(CharacterString))
, ReadableProperty('authorizationMode', AuthorizationMode)
, OptionalProperty('verificationTime', Unsigned)
, OptionalProperty('lockout', Boolean)
, OptionalProperty('lockoutRelinquishTime', Unsigned)
, OptionalProperty('failedAttempts', Unsigned)
, OptionalProperty('failedAttemptEvents', ListOf(AccessEvent))
, OptionalProperty('maxFailedAttempts', Unsigned)
, OptionalProperty('failedAttemptsTime', Unsigned)
, OptionalProperty('threatLevel', AccessThreatLevel)
, OptionalProperty('occupancyUpperLimitEnforced', Boolean)
, OptionalProperty('occupancyLowerLimitEnforced', Boolean)
, OptionalProperty('occupancyCountAdjust', Boolean)
, OptionalProperty('accompanimentTime', Unsigned)
, ReadableProperty('accessEvent', AccessEvent)
, ReadableProperty('accessEventTag', Unsigned)
, ReadableProperty('accessEventTime', TimeStamp)
, ReadableProperty('accessEventCredential', DeviceObjectReference)
, OptionalProperty('accessEventAuthenticationFactor', AuthenticationFactor)
, ReadableProperty('accessDoors', ArrayOf(DeviceObjectReference))
, ReadableProperty('priorityForWriting', Unsigned)
, OptionalProperty('musterPoint', Boolean)
, OptionalProperty('zoneTo', DeviceObjectReference)
, OptionalProperty('zoneFrom', DeviceObjectReference)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('transactionNotificationClass', Unsigned)
, OptionalProperty('accessAlarmEvents', ListOf(AccessEvent))
, OptionalProperty('accessTransactionEvents', ListOf(AccessEvent))
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class AccessRightsObject(Object):
objectType = 'accessRights'
properties = \
[ WritableProperty('globalIdentifier', Unsigned)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('reliability', Reliability)
, ReadableProperty('enable', Boolean)
, ReadableProperty('negativeAccessRules', ArrayOf(AccessRule))
, ReadableProperty('positiveAccessRules', ArrayOf(AccessRule))
, OptionalProperty('accompaniment', DeviceObjectReference)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class AccessUserObject(Object):
objectType = 'accessUser'
properties = \
[ WritableProperty('globalIdentifier', Unsigned)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('reliability', Reliability)
, ReadableProperty('userType', AccessUserType)
, OptionalProperty('userName', CharacterString)
, OptionalProperty('userExternalIdentifier', CharacterString)
, OptionalProperty('userInformationReference', CharacterString)
, OptionalProperty('members', ListOf(DeviceObjectReference))
, OptionalProperty('memberOf', ListOf(DeviceObjectReference))
, ReadableProperty('credentials', ListOf(DeviceObjectReference))
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class AccessZoneObject(Object):
objectType = 'accessZone'
properties = \
[ WritableProperty('globalIdentifier', Unsigned)
, ReadableProperty('occupancyState', AccessZoneOccupancyState)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, ReadableProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, OptionalProperty('occupancyCount', Unsigned)
, OptionalProperty('occupancyCountEnable', Boolean)
, OptionalProperty('adjustValue', Integer)
, OptionalProperty('occupancyUpperLimit', Unsigned)
, OptionalProperty('occupancyLowerLimit', Unsigned)
, OptionalProperty('credentialsInZone', ListOf(DeviceObjectReference) )
, OptionalProperty('lastCredentialAdded', DeviceObjectReference)
, OptionalProperty('lastCredentialAddedTime', DateTime)
, OptionalProperty('lastCredentialRemoved', DeviceObjectReference)
, OptionalProperty('lastCredentialRemovedTime', DateTime)
, OptionalProperty('passbackMode', AccessPassbackMode)
, OptionalProperty('passbackTimeout', Unsigned)
, ReadableProperty('entryPoints', ListOf(DeviceObjectReference))
, ReadableProperty('exitPoints', ListOf(DeviceObjectReference))
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('alarmValues', ListOf(AccessZoneOccupancyState))
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class AccumulatorObject(Object):
objectType = 'accumulator'
properties = \
[ ReadableProperty('presentValue', Unsigned)
, OptionalProperty('deviceType', CharacterString)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('scale', Scale)
, ReadableProperty('units', EngineeringUnits)
, OptionalProperty('prescale', Prescale)
, ReadableProperty('maxPresValue', Unsigned)
, OptionalProperty('valueChangeTime', DateTime)
, OptionalProperty('valueBeforeChange', Unsigned)
, OptionalProperty('valueSet', Unsigned)
, OptionalProperty('loggingRecord', AccumulatorRecord)
, OptionalProperty('loggingObject', ObjectIdentifier)
, OptionalProperty('pulseRate', Unsigned)
, OptionalProperty('highLimit', Unsigned)
, OptionalProperty('lowLimit', Unsigned)
, OptionalProperty('limitMonitoringInterval', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('limitEnable', LimitEnable)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('faultHighLimit', Unsigned)
, OptionalProperty('faultLowLimit', Unsigned)
]
@register_object_type
class AlertEnrollmentObject(Object):
objectType = 'alertEnrollment'
properties = \
[ ReadableProperty('presentValue', ObjectIdentifier)
, ReadableProperty('eventState', EventState)
, ReadableProperty('eventDetectionEnable', Boolean)
, ReadableProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
]
@register_object_type
class AnalogInputObject(Object):
objectType = 'analogInput'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', Real)
, OptionalProperty('deviceType', CharacterString)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, OptionalProperty('updateInterval', Unsigned)
, ReadableProperty('units', EngineeringUnits)
, OptionalProperty('minPresValue', Real)
, OptionalProperty('maxPresValue', Real)
, OptionalProperty('resolution', Real)
, OptionalProperty('covIncrement', Real)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('highLimit', Real)
, OptionalProperty('lowLimit', Real)
, OptionalProperty('deadband', Real)
, OptionalProperty('limitEnable', LimitEnable)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('interfaceValue', OptionalReal)
, OptionalProperty('faultHighLimit', Real)
, OptionalProperty('faultLowLimit', Real)
]
@register_object_type
class AnalogOutputObject(Object):
objectType = 'analogOutput'
_object_supports_cov = True
properties = \
[ WritableProperty('presentValue', Real)
, OptionalProperty('deviceType', CharacterString)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('units', EngineeringUnits)
, OptionalProperty('minPresValue', Real)
, OptionalProperty('maxPresValue', Real)
, OptionalProperty('resolution', Real)
, ReadableProperty('priorityArray', PriorityArray)
, ReadableProperty('relinquishDefault', Real)
, OptionalProperty('covIncrement', Real)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('highLimit', Real)
, OptionalProperty('lowLimit', Real)
, OptionalProperty('deadband', Real)
, OptionalProperty('limitEnable', LimitEnable)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('interfaceValue', OptionalReal)
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class AnalogValueObject(Object):
objectType = 'analogValue'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', Real)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('units', EngineeringUnits)
, OptionalProperty('minPresValue', Real)
, OptionalProperty('maxPresValue', Real)
, OptionalProperty('resolution', Real)
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', Real)
, OptionalProperty('covIncrement', Real)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('highLimit', Real)
, OptionalProperty('lowLimit', Real)
, OptionalProperty('deadband', Real)
, OptionalProperty('limitEnable', LimitEnable)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('minPresValue', Real)
, OptionalProperty('maxPresValue', Real)
, OptionalProperty('resolution', Real)
, OptionalProperty('faultHighLimit', Real)
, OptionalProperty('faultLowLimit', Real)
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class AuditLogObject(Object):
objectType = 'auditLog'
properties = \
[ ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, WritableProperty('enable', Boolean)
, ReadableProperty('bufferSize', Unsigned) # Unsigned32
, ReadableProperty('logBuffer', ListOf(AuditLogRecord))
, ReadableProperty('recordCount', Unsigned) # Unsigned64
, ReadableProperty('totalRecordCount', Unsigned) # Unsigned64
, OptionalProperty('memberOf', DeviceObjectReference)
, OptionalProperty('deleteOnForward', Boolean)
, OptionalProperty('issueConfirmedNotifications', Boolean)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class AuditReporterObject(Object):
objectType = 'auditReporter'
properties = \
[ ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('eventState', EventState)
, ReadableProperty('auditLevel', AuditLevel)
, ReadableProperty('auditSourceReporter', Boolean)
, ReadableProperty('auditableOperations', AuditOperationFlags)
, ReadableProperty('auditablePriorityFilter', PriorityFilter)
, ReadableProperty('issueConfirmedNotifications', Boolean)
, OptionalProperty('monitoredObjects', ArrayOf(ObjectSelector))
, OptionalProperty('maximumSendDelay', Unsigned)
, OptionalProperty('sendNow', Boolean)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class AveragingObject(Object):
objectType = 'averaging'
properties = \
[ ReadableProperty('minimumValue', Real)
, OptionalProperty('minimumValueTimestamp', DateTime)
, ReadableProperty('averageValue', Real)
, OptionalProperty('varianceValue', Real)
, ReadableProperty('maximumValue', Real)
, OptionalProperty('maximumValueTimestamp', DateTime)
, WritableProperty('attemptedSamples', Unsigned)
, ReadableProperty('validSamples', Unsigned)
, ReadableProperty('objectPropertyReference', DeviceObjectPropertyReference)
, WritableProperty('windowInterval', Unsigned)
, WritableProperty('windowSamples', Unsigned)
]
@register_object_type
class BinaryInputObject(Object):
objectType = 'binaryInput'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', BinaryPV)
, OptionalProperty('deviceType', CharacterString)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('polarity', Polarity)
, OptionalProperty('inactiveText', CharacterString)
, OptionalProperty('activeText', CharacterString)
, OptionalProperty('changeOfStateTime', DateTime)
, OptionalProperty('changeOfStateCount', Unsigned)
, OptionalProperty('timeOfStateCountReset', DateTime)
, OptionalProperty('elapsedActiveTime', Unsigned)
, OptionalProperty('timeOfActiveTimeReset', DateTime)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('alarmValue', BinaryPV)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('interfaceValue', OptionalBinaryPV)
]
@register_object_type
class BinaryLightingOutputObject(Object):
objectType = 'binaryLightingOutput'
properties = \
[ WritableProperty('presentValue', BinaryLightingPV)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('blinkWarnEnable', Boolean)
, ReadableProperty('egressTime', Unsigned)
, ReadableProperty('egressActive', Boolean)
, OptionalProperty('feedbackValue', BinaryLightingPV)
, ReadableProperty('priorityArray', PriorityArray)
, ReadableProperty('relinquishDefault', BinaryLightingPV)
, OptionalProperty('power', Real)
, OptionalProperty('polarity', Polarity)
, OptionalProperty('elapsedActiveTime', Unsigned) # Unsigned32
, OptionalProperty('timeOfActiveTimeReset', DateTime)
, OptionalProperty('strikeCount', Unsigned)
, OptionalProperty('timeOfStrikeCountReset', DateTime)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, ReadableProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class BinaryOutputObject(Object):
objectType = 'binaryOutput'
_object_supports_cov = True
properties = \
[ WritableProperty('presentValue', BinaryPV)
, OptionalProperty('deviceType', CharacterString)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('polarity', Polarity)
, OptionalProperty('inactiveText', CharacterString)
, OptionalProperty('activeText', CharacterString)
, OptionalProperty('changeOfStateTime', DateTime)
, OptionalProperty('changeOfStateCount', Unsigned)
, OptionalProperty('timeOfStateCountReset', DateTime)
, OptionalProperty('elapsedActiveTime', Unsigned)
, OptionalProperty('timeOfActiveTimeReset', DateTime)
, OptionalProperty('minimumOffTime', Unsigned)
, OptionalProperty('minimumOnTime', Unsigned)
, ReadableProperty('priorityArray', PriorityArray)
, ReadableProperty('relinquishDefault', BinaryPV)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('feedbackValue', BinaryPV)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('interfaceValue', OptionalBinaryPV)
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class BinaryValueObject(Object):
objectType = 'binaryValue'
_object_supports_cov = True
properties = \
[ WritableProperty('presentValue', BinaryPV)
, ReadableProperty('statusFlags',StatusFlags)
, ReadableProperty('eventState',EventState)
, OptionalProperty('reliability',Reliability)
, ReadableProperty('outOfService',Boolean)
, OptionalProperty('inactiveText',CharacterString)
, OptionalProperty('activeText',CharacterString)
, OptionalProperty('changeOfStateTime',DateTime)
, OptionalProperty('changeOfStateCount',Unsigned)
, OptionalProperty('timeOfStateCountReset',DateTime)
, OptionalProperty('elapsedActiveTime',Unsigned)
, OptionalProperty('timeOfActiveTimeReset',DateTime)
, OptionalProperty('minimumOffTime',Unsigned)
, OptionalProperty('minimumOnTime',Unsigned)
, OptionalProperty('priorityArray',PriorityArray)
, OptionalProperty('relinquishDefault',BinaryPV)
, OptionalProperty('timeDelay',Unsigned)
, OptionalProperty('notificationClass',Unsigned)
, OptionalProperty('alarmValue',BinaryPV)
, OptionalProperty('eventEnable',EventTransitionBits)
, OptionalProperty('ackedTransitions',EventTransitionBits)
, OptionalProperty('notifyType',NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class BitStringValueObject(Object):
objectType = 'bitstringValue'
properties = \
[ ReadableProperty('presentValue', BitString)
, OptionalProperty('bitText', ArrayOf(CharacterString))
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', BitString)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('alarmValues', ArrayOf(BitString))
, OptionalProperty('bitMask', BitString)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class CalendarObject(Object):
objectType = 'calendar'
properties = \
[ ReadableProperty('presentValue', Boolean)
, ReadableProperty('dateList', ListOf(CalendarEntry))
]
@register_object_type
class ChannelObject(Object):
objectType = 'channel'
properties = \
[ WritableProperty('presentValue', ChannelValue)
, ReadableProperty('lastPriority', Unsigned)
, ReadableProperty('writeStatus', WriteStatus)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, WritableProperty('listOfObjectPropertyReferences', ArrayOf(DeviceObjectPropertyReference))
, OptionalProperty('executionDelay', ArrayOf(Unsigned))
, OptionalProperty('allowGroupDelayInhibit', Boolean)
, WritableProperty('channelNumber', Unsigned)
, WritableProperty('controlGroups', ArrayOf(Unsigned))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('eventState', EventState)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class CharacterStringValueObject(Object):
objectType = 'characterstringValue'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', CharacterString)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', CharacterString)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('alarmValues', ArrayOf(OptionalCharacterString))
, OptionalProperty('faultValues', ArrayOf(OptionalCharacterString))
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class CommandObject(Object):
objectType = 'command'
properties = \
[ WritableProperty('presentValue', Unsigned)
, ReadableProperty('inProcess', Boolean)
, ReadableProperty('allWritesSuccessful', Boolean)
, ReadableProperty('action', ArrayOf(ActionList))
, OptionalProperty('actionText', ArrayOf(CharacterString))
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('valueSource', ValueSource)
]
@register_object_type
class CredentialDataInputObject(Object):
objectType = 'credentialDataInput'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', AuthenticationFactor)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('supportedFormats', ArrayOf(AuthenticationFactorFormat))
, OptionalProperty('supportedFormatClasses', ArrayOf(Unsigned))
, ReadableProperty('updateTime', TimeStamp)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class DatePatternValueObject(Object):
objectType = 'datePatternValue'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', Date)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', Date)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class DateValueObject(Object):
objectType = 'dateValue'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', Date)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', Date)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class DateTimePatternValueObject(Object):
objectType = 'datetimePatternValue'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', DateTime)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, OptionalProperty('isUTC', Boolean)
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', DateTime)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class DateTimeValueObject(Object):
objectType = 'datetimeValue'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', DateTime)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', DateTime)
, OptionalProperty('isUTC', Boolean)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class DeviceObject(Object):
objectType = 'device'
properties = \
[ ReadableProperty('systemStatus', DeviceStatus)
, ReadableProperty('vendorName', CharacterString)
, ReadableProperty('vendorIdentifier', Unsigned)
, ReadableProperty('modelName', CharacterString)
, ReadableProperty('firmwareRevision', CharacterString)
, ReadableProperty('applicationSoftwareVersion', CharacterString)
, OptionalProperty('location', CharacterString)
, ReadableProperty('protocolVersion', Unsigned)
, ReadableProperty('protocolRevision', Unsigned)
, ReadableProperty('protocolServicesSupported', ServicesSupported)
, ReadableProperty('protocolObjectTypesSupported', ObjectTypesSupported)
, ReadableProperty('objectList', ArrayOf(ObjectIdentifier))
, OptionalProperty('structuredObjectList', ArrayOf(ObjectIdentifier))
, ReadableProperty('maxApduLengthAccepted', Unsigned)
, ReadableProperty('segmentationSupported', Segmentation)
, OptionalProperty('vtClassesSupported', ListOf(VTClass))
, OptionalProperty('activeVtSessions', ListOf(VTSession))
, OptionalProperty('localTime', Time)
, OptionalProperty('localDate', Date)
, OptionalProperty('utcOffset', Integer)
, OptionalProperty('daylightSavingsStatus', Boolean)
, OptionalProperty('apduSegmentTimeout', Unsigned)
, ReadableProperty('apduTimeout', Unsigned)
, ReadableProperty('numberOfApduRetries', Unsigned)
, OptionalProperty('timeSynchronizationRecipients', ListOf(Recipient))
, OptionalProperty('maxMaster', Unsigned)
, OptionalProperty('maxInfoFrames', Unsigned)
, ReadableProperty('deviceAddressBinding', ListOf(AddressBinding))
, ReadableProperty('databaseRevision', Unsigned)
, OptionalProperty('configurationFiles', ArrayOf(ObjectIdentifier))
, OptionalProperty('lastRestoreTime', TimeStamp)
, OptionalProperty('backupFailureTimeout', Unsigned)
, OptionalProperty('backupPreparationTime', Unsigned)
, OptionalProperty('restorePreparationTime', Unsigned)
, OptionalProperty('restoreCompletionTime', Unsigned)
, OptionalProperty('backupAndRestoreState', BackupState)
, OptionalProperty('activeCovSubscriptions', ListOf(COVSubscription))
, OptionalProperty('maxSegmentsAccepted', Unsigned)
, OptionalProperty('slaveProxyEnable', ArrayOf(Boolean))
, OptionalProperty('autoSlaveDiscovery', ArrayOf(Boolean))
, OptionalProperty('slaveAddressBinding', ListOf(AddressBinding))
, OptionalProperty('manualSlaveAddressBinding', ListOf(AddressBinding))
, OptionalProperty('lastRestartReason', RestartReason)
, OptionalProperty('timeOfDeviceRestart', TimeStamp)
, OptionalProperty('restartNotificationRecipients', ListOf(Recipient))
, OptionalProperty('utcTimeSynchronizationRecipients', ListOf(Recipient))
, OptionalProperty('timeSynchronizationInterval', Unsigned)
, OptionalProperty('alignIntervals', Boolean)
, OptionalProperty('intervalOffset', Unsigned)
, OptionalProperty('serialNumber', CharacterString)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('activeCovMultipleSubscriptions', ListOf(COVMultipleSubscription))
, OptionalProperty('auditNotificationRecipient', Recipient)
, OptionalProperty('deviceUUID', OctetString) # size 16
, OptionalProperty('deployedProfileLocation', CharacterString)
]
@register_object_type
class ElevatorGroupObject(Object):
objectType = 'elevatorGroup'
properties = \
[ ReadableProperty('machineRoomID', ObjectIdentifier)
, ReadableProperty('groupID', Unsigned8)
, ReadableProperty('groupMembers', ArrayOf(ObjectIdentifier))
, OptionalProperty('groupMode', LiftGroupMode)
, OptionalProperty('landingCalls', ListOf(LandingCallStatus))
, OptionalProperty('landingCallControl', LandingCallStatus)
]
@register_object_type
class EscalatorObject(Object):
objectType = 'escalator'
properties = \
[ ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('elevatorGroup', ObjectIdentifier)
, ReadableProperty('groupID', Unsigned8)
, ReadableProperty('installationID', Unsigned8)
, OptionalProperty('powerMode', Boolean)
, ReadableProperty('operationDirection', EscalatorOperationDirection)
, OptionalProperty('escalatorMode', EscalatorMode)
, OptionalProperty('energyMeter', Real)
, OptionalProperty('energyMeterRef', DeviceObjectReference)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, OptionalProperty('faultSignals', ListOf(LiftFault))
, ReadableProperty('passengerAlarm', Boolean)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('eventState', EventState)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class EventEnrollmentObject(Object):
objectType = 'eventEnrollment'
properties = \
[ ReadableProperty('eventType', EventType)
, ReadableProperty('notifyType', NotifyType)
, ReadableProperty('eventParameters', EventParameter)
, ReadableProperty('objectPropertyReference', DeviceObjectPropertyReference)
, ReadableProperty('eventState', EventState)
, ReadableProperty('eventEnable', EventTransitionBits)
, ReadableProperty('ackedTransitions', EventTransitionBits)
, ReadableProperty('notificationClass', Unsigned)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, ReadableProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('reliability', Reliability)
, OptionalProperty('faultType', FaultType)
, OptionalProperty('faultParameters', FaultParameter)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
#-----
class EventLogRecordLogDatum(Choice):
choiceElements = \
[ Element('logStatus', LogStatus, 0)
, Element('notification', EventNotificationParameters, 1)
, Element('timeChange', Real, 2)
]
class EventLogRecord(Sequence):
sequenceElements = \
[ Element('timestamp', DateTime, 0)
, Element('logDatum', EventLogRecordLogDatum, 1)
]
@register_object_type
class EventLogObject(Object):
objectType = 'eventLog'
properties = \
[ ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, WritableProperty('enable', Boolean)
, OptionalProperty('startTime', DateTime)
, OptionalProperty('stopTime', DateTime)
, ReadableProperty('stopWhenFull', Boolean)
, ReadableProperty('bufferSize', Unsigned)
, ReadableProperty('logBuffer', ListOf(EventLogRecord))
, WritableProperty('recordCount', Unsigned)
, ReadableProperty('totalRecordCount', Unsigned)
, OptionalProperty('notificationThreshold', Unsigned)
, OptionalProperty('recordsSinceNotification', Unsigned)
, OptionalProperty('lastNotifyRecord', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
#-----
@register_object_type
class FileObject(Object):
objectType = 'file'
properties = \
[ ReadableProperty('fileType', CharacterString)
, ReadableProperty('fileSize', Unsigned)
, ReadableProperty('modificationDate', DateTime)
, WritableProperty('archive', Boolean)
, ReadableProperty('readOnly', Boolean)
, ReadableProperty('fileAccessMethod', FileAccessMethod)
, OptionalProperty('recordCount', Unsigned)
]
#-----
@register_object_type
class GlobalGroupObject(Object):
objectType = 'globalGroup'
properties = \
[ ReadableProperty('groupMembers', ArrayOf(DeviceObjectPropertyReference))
, OptionalProperty('groupMemberNames', ArrayOf(CharacterString))
, ReadableProperty('presentValue', ArrayOf(PropertyAccessResult))
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, ReadableProperty('memberStatusFlags', StatusFlags)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, OptionalProperty('updateInterval', Unsigned)
, OptionalProperty('requestedUpdateInterval', Unsigned)
, OptionalProperty('covResubscriptionInterval', Unsigned)
, OptionalProperty('clientCovIncrement', ClientCOV)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('covuPeriod', Unsigned)
, OptionalProperty('covuRecipients', ListOf(Recipient))
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class GroupObject(Object):
objectType = 'group'
properties = \
[ ReadableProperty('listOfGroupMembers', ListOf(ReadAccessSpecification))
, ReadableProperty('presentValue', ListOf(ReadAccessResult))
]
@register_object_type
class IntegerValueObject(Object):
objectType = 'integerValue'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', Integer)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, ReadableProperty('units', EngineeringUnits)
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', Integer)
, OptionalProperty('covIncrement', Unsigned)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('highLimit', Integer)
, OptionalProperty('lowLimit', Integer)
, OptionalProperty('deadband', Unsigned)
, OptionalProperty('limitEnable', LimitEnable)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('minPresValue', Integer)
, OptionalProperty('maxPresValue', Integer)
, OptionalProperty('resolution', Integer)
, OptionalProperty('faultHighLimit', Integer)
, OptionalProperty('faultLowLimit', Integer)
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class LargeAnalogValueObject(Object):
objectType = 'largeAnalogValue'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', Double)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, ReadableProperty('units', EngineeringUnits)
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', Integer)
, OptionalProperty('covIncrement', Unsigned)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('highLimit', Double)
, OptionalProperty('lowLimit', Double)
, OptionalProperty('deadband', Double)
, OptionalProperty('limitEnable', LimitEnable)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('minPresValue', Double)
, OptionalProperty('maxPresValue', Double)
, OptionalProperty('resolution', Double)
, OptionalProperty('faultHighLimit', Double)
, OptionalProperty('faultLowLimit', Double)
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class LifeSafetyPointObject(Object):
objectType = 'lifeSafetyPoint'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', LifeSafetyState)
, ReadableProperty('trackingValue', LifeSafetyState)
, OptionalProperty('deviceType', CharacterString)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, ReadableProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, WritableProperty('mode', LifeSafetyMode)
, ReadableProperty('acceptedModes', ListOf(LifeSafetyMode))
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('lifeSafetyAlarmValues', ListOf(LifeSafetyState))
, OptionalProperty('alarmValues', ListOf(LifeSafetyState))
, OptionalProperty('faultValues', ListOf(LifeSafetyState))
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, ReadableProperty('silenced', SilencedState)
, ReadableProperty('operationExpected', LifeSafetyOperation)
, OptionalProperty('maintenanceRequired', Maintenance)
, OptionalProperty('setting', Unsigned)
, OptionalProperty('directReading', Real)
, OptionalProperty('units', EngineeringUnits)
, OptionalProperty('memberOf', ListOf(DeviceObjectReference))
, OptionalProperty('floorNumber', Unsigned8)
, OptionalProperty('valueSource', ValueSource)
]
@register_object_type
class LifeSafetyZoneObject(Object):
objectType = 'lifeSafetyZone'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', LifeSafetyState)
, ReadableProperty('trackingValue', LifeSafetyState)
, OptionalProperty('deviceType', CharacterString)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, ReadableProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, WritableProperty('mode', LifeSafetyMode)
, ReadableProperty('acceptedModes', ListOf(LifeSafetyMode))
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('lifeSafetyAlarmValues', ListOf(LifeSafetyState))
, OptionalProperty('alarmValues', ListOf(LifeSafetyState))
, OptionalProperty('faultValues', ListOf(LifeSafetyState))
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, ReadableProperty('silenced', SilencedState)
, ReadableProperty('operationExpected', LifeSafetyOperation)
, OptionalProperty('maintenanceRequired', Boolean)
, ReadableProperty('zoneMembers', ListOf(DeviceObjectReference))
, OptionalProperty('memberOf', ListOf(DeviceObjectReference))
, OptionalProperty('floorNumber', Unsigned8)
, OptionalProperty('valueSource', ValueSource)
]
@register_object_type
class LiftObject(Object):
objectType = 'lift'
properties = \
[ ReadableProperty('trackingValue', Real)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('elevatorGroup', ObjectIdentifier)
, ReadableProperty('groupID', Unsigned8)
, ReadableProperty('installationID', Unsigned8)
, OptionalProperty('floorText', ArrayOf(CharacterString))
, OptionalProperty('carDoorText', ArrayOf(CharacterString))
, OptionalProperty('assignedLandingCalls', ArrayOf(AssignedLandingCalls))
, OptionalProperty('makingCarCall', ArrayOf(Unsigned8))
, OptionalProperty('registeredCarCall', ArrayOf(LiftCarCallList))
, OptionalProperty('carPosition', Unsigned8)
, OptionalProperty('carMovingDirection', LiftCarDirection)
, OptionalProperty('carAssignedDirection', LiftCarDirection)
, OptionalProperty('carDoorStatus', ArrayOf(DoorStatus))
, OptionalProperty('carDoorCommand', ArrayOf(LiftCarDoorCommand))
, OptionalProperty('carDoorZone', Boolean)
, OptionalProperty('carMode', LiftCarMode)
, OptionalProperty('carLoad', Real)
, OptionalProperty('carLoadUnits', EngineeringUnits)
, OptionalProperty('nextStoppingFloor', Unsigned)
, ReadableProperty('passengerAlarm', Boolean)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('energyMeter', Real)
, OptionalProperty('energyMeterRef', DeviceObjectReference)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, OptionalProperty('carDriveStatus', LiftCarDriveStatus)
, OptionalProperty('faultSignals', ListOf(LiftFault))
, OptionalProperty('landingDoorStatus', ArrayOf(LandingDoorStatus))
, OptionalProperty('higherDeck', ObjectIdentifier)
, OptionalProperty('lowerDeck', ObjectIdentifier)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('eventState', EventState)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class LightingOutputObject(Object):
objectType = 'lightingOutput'
_object_supports_cov = True
properties = \
[ WritableProperty('presentValue', Real)
, ReadableProperty('trackingValue', Real)
, WritableProperty('lightingCommand', LightingCommand)
, ReadableProperty('inProgress', LightingInProgress)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('blinkWarnEnable', Boolean)
, ReadableProperty('egressTime', Unsigned)
, ReadableProperty('egressActive', Boolean)
, ReadableProperty('defaultFadeTime', Unsigned)
, ReadableProperty('defaultRampRate', Real)
, ReadableProperty('defaultStepIncrement', Real)
, OptionalProperty('transition', LightingTransition)
, OptionalProperty('feedbackValue', Real)
, ReadableProperty('priorityArray', PriorityArray)
, ReadableProperty('relinquishDefault', Real)
, OptionalProperty('power', Real)
, OptionalProperty('instantaneousPower', Real)
, OptionalProperty('minActualValue', Real)
, OptionalProperty('maxActualValue', Real)
, ReadableProperty('lightingCommandDefaultPriority', Unsigned)
, OptionalProperty('covIncrement', Real)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class LoadControlObject(Object):
objectType = 'loadControl'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', ShedState)
, OptionalProperty('stateDescription', CharacterString)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, WritableProperty('requestedShedLevel', ShedLevel)
, WritableProperty('startTime', DateTime)
, WritableProperty('shedDuration', Unsigned)
, WritableProperty('dutyWindow', Unsigned)
, WritableProperty('enable', Boolean)
, OptionalProperty('fullDutyBaseline', Real)
, ReadableProperty('expectedShedLevel', ShedLevel)
, ReadableProperty('actualShedLevel', ShedLevel)
, WritableProperty('shedLevels', ArrayOf(Unsigned))
, ReadableProperty('shedLevelDescriptions', ArrayOf(CharacterString))
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('valueSource', ValueSource)
]
@register_object_type
class LoopObject(Object):
objectType = 'loop'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', Real)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('updateInterval', Unsigned)
, ReadableProperty('outputUnits', EngineeringUnits)
, ReadableProperty('manipulatedVariableReference', ObjectPropertyReference)
, ReadableProperty('controlledVariableReference', ObjectPropertyReference)
, ReadableProperty('controlledVariableValue', Real)
, ReadableProperty('controlledVariableUnits', EngineeringUnits)
, ReadableProperty('setpointReference', SetpointReference)
, ReadableProperty('setpoint', Real)
, ReadableProperty('action', Action)
, OptionalProperty('proportionalConstant', Real)
, OptionalProperty('proportionalConstantUnits', EngineeringUnits)
, OptionalProperty('integralConstant', Real)
, OptionalProperty('integralConstantUnits', EngineeringUnits)
, OptionalProperty('derivativeConstant', Real)
, OptionalProperty('derivativeConstantUnits', EngineeringUnits)
, OptionalProperty('bias', Real)
, OptionalProperty('maximumOutput', Real)
, OptionalProperty('minimumOutput', Real)
, ReadableProperty('priorityForWriting', Unsigned)
, OptionalProperty('covIncrement', Real)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('errorLimit', Real)
, OptionalProperty('deadband', Real)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('lowDiffLimit', OptionalReal)
]
@register_object_type
class MultiStateInputObject(Object):
objectType = 'multiStateInput'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', Unsigned)
, OptionalProperty('deviceType', CharacterString)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('numberOfStates', Unsigned)
, OptionalProperty('stateText', ArrayOf(CharacterString))
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('alarmValues', ListOf(Unsigned))
, OptionalProperty('faultValues', ListOf(Unsigned))
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('interfaceValue', OptionalUnsigned)
]
@register_object_type
class MultiStateOutputObject(Object):
objectType = 'multiStateOutput'
_object_supports_cov = True
properties = \
[ WritableProperty('presentValue', Unsigned)
, OptionalProperty('deviceType', CharacterString)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('numberOfStates', Unsigned)
, OptionalProperty('stateText', ArrayOf(CharacterString))
, ReadableProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', Unsigned)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('feedbackValue', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('interfaceValue', OptionalUnsigned)
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class MultiStateValueObject(Object):
objectType = 'multiStateValue'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', Unsigned)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('numberOfStates', Unsigned)
, OptionalProperty('stateText', ArrayOf(CharacterString))
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', Unsigned)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('alarmValues', ListOf(Unsigned))
, OptionalProperty('faultValues', ListOf(Unsigned))
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class NetworkPortObject(Object):
objectType = 'networkPort' #56
properties = \
[ ReadableProperty('statusFlags', StatusFlags) #111
, ReadableProperty('reliability', Reliability) #103
, ReadableProperty('outOfService', Boolean) #81
, ReadableProperty('networkType', NetworkType) #427
, ReadableProperty('protocolLevel', ProtocolLevel) #482
, OptionalProperty('referencePort', Unsigned) #483
, ReadableProperty('networkNumber', Unsigned16) #425
, ReadableProperty('networkNumberQuality', NetworkNumberQuality) #427
, ReadableProperty('changesPending', Boolean) #416
, OptionalProperty('command', NetworkPortCommand) #417
, OptionalProperty('macAddress', OctetString) #423
, ReadableProperty('apduLength', Unsigned) #388
, ReadableProperty('linkSpeed', Real) #420
, OptionalProperty('linkSpeeds', ArrayOf(Real)) #421
, OptionalProperty('linkSpeedAutonegotiate', Boolean) #422
, OptionalProperty('networkInterfaceName', CharacterString) #424
, OptionalProperty('bacnetIPMode', IPMode) #408
, OptionalProperty('ipAddress', OctetString) #400
, OptionalProperty('bacnetIPUDPPort', Unsigned16) #412
, OptionalProperty('ipSubnetMask', OctetString) #411
, OptionalProperty('ipDefaultGateway', OctetString) #401
, OptionalProperty('bacnetIPMulticastAddress', OctetString) #409
, OptionalProperty('ipDNSServer', ArrayOf(OctetString)) #406
, OptionalProperty('ipDHCPEnable', Boolean) #402
, OptionalProperty('ipDHCPLeaseTime', Unsigned) #403
, OptionalProperty('ipDHCPLeaseTimeRemaining', Unsigned) #404
, OptionalProperty('ipDHCPServer', OctetString) #405
, OptionalProperty('bacnetIPNATTraversal', Boolean) #410
, OptionalProperty('bacnetIPGlobalAddress', HostNPort) #407
, OptionalProperty('bbmdBroadcastDistributionTable', ListOf(BDTEntry)) #414
, OptionalProperty('bbmdAcceptFDRegistrations', Boolean) #413
, OptionalProperty('bbmdForeignDeviceTable', ListOf(FDTEntry)) #415
, OptionalProperty('fdBBMDAddress', HostNPort) #418
, OptionalProperty('fdSubscriptionLifetime', Unsigned16) #419
, OptionalProperty('bacnetIPv6Mode', IPMode) #435
, OptionalProperty('ipv6Address', OctetString) #436
, OptionalProperty('ipv6PrefixLength', Unsigned8) #437
, OptionalProperty('bacnetIPv6UDPPort', Unsigned16) #438
, OptionalProperty('ipv6DefaultGateway', OctetString) #439
, OptionalProperty('bacnetIPv6MulticastAddress', OctetString) #440
, OptionalProperty('ipv6DNSServer', OctetString) #441
, OptionalProperty('ipv6AutoAddressingEnabled', Boolean) #442
, OptionalProperty('ipv6DHCPLeaseTime', Unsigned) #443
, OptionalProperty('ipv6DHCPLeaseTimeRemaining', Unsigned) #444
, OptionalProperty('ipv6DHCPServer', OctetString) #445
, OptionalProperty('ipv6ZoneIndex', CharacterString) #446
, OptionalProperty('maxMaster', Unsigned8) #64
, OptionalProperty('maxInfoFrames', Unsigned8) #63
, OptionalProperty('slaveProxyEnable', Boolean) #172
, OptionalProperty('manualSlaveAddressBinding', ListOf(AddressBinding)) #170
, OptionalProperty('autoSlaveDiscovery', Boolean) #169
, OptionalProperty('slaveAddressBinding', ListOf(AddressBinding)) #171
, OptionalProperty('virtualMACAddressTable', ListOf(VMACEntry)) #429
, OptionalProperty('routingTable', ListOf(RouterEntry)) #428
, OptionalProperty('eventDetectionEnable', Boolean) #353
, OptionalProperty('notificationClass', Unsigned) #17
, OptionalProperty('eventEnable', EventTransitionBits) #35
, OptionalProperty('ackedTransitions', EventTransitionBits) #0
, OptionalProperty('notifyType', NotifyType) #72
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventState', EventState) #36
, ReadableProperty('reliabilityEvaluationInhibit', Boolean) #357
]
@register_object_type
class NetworkSecurityObject(Object):
objectType = 'networkSecurity'
properties = \
[ WritableProperty('baseDeviceSecurityPolicy', SecurityLevel)
, WritableProperty('networkAccessSecurityPolicies', ArrayOf(NetworkSecurityPolicy))
, WritableProperty('securityTimeWindow', Unsigned)
, WritableProperty('packetReorderTime', Unsigned)
, ReadableProperty('distributionKeyRevision', Unsigned)
, ReadableProperty('keySets', ArrayOf(SecurityKeySet))
, WritableProperty('lastKeyServer', AddressBinding)
, WritableProperty('securityPDUTimeout', Unsigned)
, ReadableProperty('updateKeySetTimeout', Unsigned)
, ReadableProperty('supportedSecurityAlgorithms', ListOf(Unsigned))
, WritableProperty('doNotHide', Boolean)
]
@register_object_type
class NotificationClassObject(Object):
objectType = 'notificationClass'
properties = \
[ ReadableProperty('notificationClass', Unsigned)
, ReadableProperty('priority', ArrayOf(Unsigned))
, ReadableProperty('ackRequired', EventTransitionBits)
, ReadableProperty('recipientList', ListOf(Destination))
, OptionalProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, ReadableProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class NotificationForwarderObject(Object):
objectType = 'notificationForwarder'
properties = \
[ ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('recipientList', ListOf(Destination))
, WritableProperty('subscribedRecipients', ListOf(EventNotificationSubscription))
, ReadableProperty('processIdentifierFilter', ProcessIdSelection)
, OptionalProperty('portFilter', ArrayOf(PortPermission))
, ReadableProperty('localForwardingOnly', Boolean)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class OctetStringValueObject(Object):
objectType = 'octetstringValue'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', OctetString)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', OctetString)
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class PositiveIntegerValueObject(Object):
objectType = 'positiveIntegerValue'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', Unsigned)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, ReadableProperty('units', EngineeringUnits)
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', Unsigned)
, OptionalProperty('covIncrement', Unsigned)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('highLimit', Unsigned)
, OptionalProperty('lowLimit', Unsigned)
, OptionalProperty('deadband', Unsigned)
, OptionalProperty('limitEnable', LimitEnable)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('minPresValue', Unsigned)
, OptionalProperty('maxPresValue', Unsigned)
, OptionalProperty('resolution', Unsigned)
, OptionalProperty('faultHighLimit', Unsigned)
, OptionalProperty('faultLowLimit', Unsigned)
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class ProgramObject(Object):
objectType = 'program'
properties = \
[ ReadableProperty('programState', ProgramState)
, WritableProperty('programChange', ProgramRequest)
, OptionalProperty('reasonForHalt', ProgramError)
, OptionalProperty('descriptionOfHalt', CharacterString)
, OptionalProperty('programLocation', CharacterString)
, OptionalProperty('instanceOf', CharacterString)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class PulseConverterObject(Object):
objectType = 'pulseConverter'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', Real)
, OptionalProperty('inputReference', ObjectPropertyReference)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('units', EngineeringUnits)
, ReadableProperty('scaleFactor', Real)
, WritableProperty('adjustValue', Real)
, ReadableProperty('count', Unsigned)
, ReadableProperty('updateTime', DateTime)
, ReadableProperty('countChangeTime', DateTime)
, ReadableProperty('countBeforeChange', Unsigned)
, OptionalProperty('covIncrement', Real)
, OptionalProperty('covPeriod', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('highLimit', Real)
, OptionalProperty('lowLimit', Real)
, OptionalProperty('deadband', Real)
, OptionalProperty('limitEnable', LimitEnable)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class ScheduleObject(Object):
objectType = 'schedule'
properties = \
[ ReadableProperty('presentValue', AnyAtomic)
, ReadableProperty('effectivePeriod', DateRange)
, OptionalProperty('weeklySchedule', ArrayOf(DailySchedule, 7))
, OptionalProperty('exceptionSchedule', ArrayOf(SpecialEvent))
, ReadableProperty('scheduleDefault', AnyAtomic)
, ReadableProperty('listOfObjectPropertyReferences', ListOf(DeviceObjectPropertyReference))
, ReadableProperty('priorityForWriting', Unsigned)
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, ReadableProperty('eventState', EventState)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class StagingObject(Object):
objectType = 'staging'
properties = \
[ WritableProperty('presentValue', Real)
, ReadableProperty('presentStage', Unsigned)
, ReadableProperty('stages', ArrayOf(StageLimitValue))
, OptionalProperty('stageNames', ArrayOf(CharacterString))
, ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, ReadableProperty('outOfService', Boolean)
, ReadableProperty('units', EngineeringUnits)
, ReadableProperty('targetReferences', ArrayOf(DeviceObjectReference))
, ReadableProperty('priorityForWriting', Unsigned) # 1..16
, OptionalProperty('defaultPresentValue', Real)
, ReadableProperty('minPresValue', Real)
, ReadableProperty('maxPresValue', Real)
, OptionalProperty('covIncrement', Real)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('valueSource', ValueSource)
]
@register_object_type
class StructuredViewObject(Object):
objectType = 'structuredView'
properties = \
[ ReadableProperty('nodeType', NodeType)
, OptionalProperty('nodeSubtype', CharacterString)
, ReadableProperty('subordinateList', ArrayOf(DeviceObjectReference))
, OptionalProperty('subordinateAnnotations', ArrayOf(CharacterString))
, OptionalProperty('subordinateTags', ArrayOf(NameValueCollection))
, OptionalProperty('subordinateNodeTypes', ArrayOf(NodeType))
, OptionalProperty('subordinateRelationships', ArrayOf(Relationship))
, OptionalProperty('defaultSubordinateRelationship', Relationship)
, OptionalProperty('represents', DeviceObjectReference)
]
@register_object_type
class TimePatternValueObject(Object):
objectType = 'timePatternValue'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', Time)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', Time)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class TimeValueObject(Object):
objectType = 'timeValue'
_object_supports_cov = True
properties = \
[ ReadableProperty('presentValue', Time)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, OptionalProperty('priorityArray', PriorityArray)
, OptionalProperty('relinquishDefault', Time)
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('currentCommandPriority', OptionalUnsigned)
, OptionalProperty('valueSource', ValueSource)
, OptionalProperty('valueSourceArray', ArrayOf(ValueSource, 16))
, OptionalProperty('lastCommandTime', TimeStamp)
, OptionalProperty('commandTimeArray', ArrayOf(TimeStamp, 16))
, OptionalProperty('auditablePriorityFilter', OptionalPriorityFilter)
]
@register_object_type
class TimerObject(Object):
objectType = 'timer'
properties = \
[ ReadableProperty('presentValue', Unsigned)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('outOfService', Boolean)
, ReadableProperty('timerState', TimerState)
, ReadableProperty('timerRunning', Boolean)
, OptionalProperty('updateTime', DateTime)
, OptionalProperty('lastStateChange', TimerTransition)
, OptionalProperty('expirationTime', DateTime)
, OptionalProperty('initialTimeout', Unsigned)
, OptionalProperty('defaultTimeout', Unsigned)
, OptionalProperty('minPresValue', Unsigned)
, OptionalProperty('maxPresValue', Unsigned)
, OptionalProperty('resolution', Unsigned)
, OptionalProperty('stateChangeValues', ArrayOf(TimerStateChangeValue, 7))
, OptionalProperty('listOfObjectPropertyReferences', ListOf(DeviceObjectPropertyReference))
, OptionalProperty('priorityForWriting', Unsigned) # 1..16
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('timeDelay', Unsigned)
, OptionalProperty('timeDelayNormal', Unsigned)
, OptionalProperty('alarmValues', ListOf(TimerState))
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class TrendLogObject(Object):
objectType = 'trendLog'
properties = \
[ ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, WritableProperty('enable', Boolean)
, OptionalProperty('startTime', DateTime)
, OptionalProperty('stopTime', DateTime)
, OptionalProperty('logDeviceObjectProperty', DeviceObjectPropertyReference)
, OptionalProperty('logInterval', Unsigned)
, OptionalProperty('covResubscriptionInterval', Unsigned)
, OptionalProperty('clientCovIncrement', ClientCOV)
, ReadableProperty('stopWhenFull', Boolean)
, ReadableProperty('bufferSize', Unsigned)
, ReadableProperty('logBuffer', ListOf(LogRecord))
, WritableProperty('recordCount', Unsigned)
, ReadableProperty('totalRecordCount', Unsigned)
, ReadableProperty('loggingType', LoggingType)
, OptionalProperty('alignIntervals', Boolean)
, OptionalProperty('intervalOffset', Unsigned)
, OptionalProperty('trigger', Boolean)
, ReadableProperty('statusFlags', StatusFlags)
, OptionalProperty('reliability', Reliability)
, OptionalProperty('notificationThreshold', Unsigned)
, OptionalProperty('recordsSinceNotification', Unsigned)
, OptionalProperty('lastNotifyRecord', Unsigned)
, ReadableProperty('eventState', EventState)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
@register_object_type
class TrendLogMultipleObject(Object):
objectType = 'trendLogMultiple'
properties = \
[ ReadableProperty('statusFlags', StatusFlags)
, ReadableProperty('eventState', EventState)
, OptionalProperty('reliability', Reliability)
, WritableProperty('enable', Boolean)
, OptionalProperty('startTime', DateTime)
, OptionalProperty('stopTime', DateTime)
, ReadableProperty('logDeviceObjectProperty', ArrayOf(DeviceObjectPropertyReference))
, ReadableProperty('loggingType', LoggingType)
, ReadableProperty('logInterval', Unsigned)
, OptionalProperty('alignIntervals', Boolean)
, OptionalProperty('intervalOffset', Unsigned)
, OptionalProperty('trigger', Boolean)
, ReadableProperty('stopWhenFull', Boolean)
, ReadableProperty('bufferSize', Unsigned)
, ReadableProperty('logBuffer', ListOf(LogMultipleRecord))
, WritableProperty('recordCount', Unsigned)
, ReadableProperty('totalRecordCount', Unsigned)
, OptionalProperty('notificationThreshold', Unsigned)
, OptionalProperty('recordsSinceNotification', Unsigned)
, OptionalProperty('lastNotifyRecord', Unsigned)
, OptionalProperty('notificationClass', Unsigned)
, OptionalProperty('eventEnable', EventTransitionBits)
, OptionalProperty('ackedTransitions', EventTransitionBits)
, OptionalProperty('notifyType', NotifyType)
, OptionalProperty('eventTimeStamps', ArrayOf(TimeStamp, 3))
, OptionalProperty('eventMessageTexts', ArrayOf(CharacterString, 3))
, OptionalProperty('eventMessageTextsConfig', ArrayOf(CharacterString, 3))
, OptionalProperty('eventDetectionEnable', Boolean)
, OptionalProperty('eventAlgorithmInhibitRef', ObjectPropertyReference)
, OptionalProperty('eventAlgorithmInhibit', Boolean)
, OptionalProperty('reliabilityEvaluationInhibit', Boolean)
]
| JoelBender/bacpypes | py34/bacpypes/object.py | Python | mit | 139,323 |
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
import miscClin
import tsvIO
import sys
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
remapDict = {}
DAYS_PER_YEAR = 365.2425
# ------------------------------------------------------------------------- ##
# DEPENDING ON WHICH TUMOR TYPE IS BEING PROCESSED, THESE SWITCHES MAY
# NEED TO BE CHANGED ...
remapDict["anatomic_organ_subdivision"] = {}
if (1):
remapDict["anatomic_organ_subdivision"]["na"] = "NA"
remapDict["anatomic_organ_subdivision"]["rectum"] = 0
remapDict["anatomic_organ_subdivision"]["rectosigmoid_junction"] = 1
remapDict["anatomic_organ_subdivision"]["sigmoid_colon"] = 2
remapDict["anatomic_organ_subdivision"]["descending_colon"] = 3
remapDict["anatomic_organ_subdivision"]["splenic_flexure"] = 4
remapDict["anatomic_organ_subdivision"]["transverse_colon"] = 5
remapDict["anatomic_organ_subdivision"]["hepatic_flexure"] = 6
remapDict["anatomic_organ_subdivision"]["ascending_colon"] = 7
remapDict["anatomic_organ_subdivision"]["cecum"] = 8
if (0):
remapDict["anatomic_organ_subdivision"]["na"] = "NA"
remapDict["anatomic_organ_subdivision"]["bilateral"] = "bilateral"
remapDict["anatomic_organ_subdivision"]["left"] = "left"
remapDict["anatomic_organ_subdivision"]["right"] = "right"
if (0):
remapDict["anatomic_organ_subdivision"][""] = "NA"
remapDict["anatomic_organ_subdivision"]["na"] = "NA"
remapDict["anatomic_organ_subdivision"]["brain"] = "brain"
remapDict["histological_type"] = {}
if (0):
remapDict["histological_type"]["na"] = "NA"
remapDict["histological_type"]["colon_adenocarcinoma"] = 0
remapDict["histological_type"]["rectal_adenocarcinoma"] = 0
remapDict["histological_type"]["colon_mucinous_adenocarcinoma"] = 1
remapDict["histological_type"]["rectal_mucinous_adenocarcinoma"] = 1
if (0):
remapDict["histological_type"]["na"] = "NA"
remapDict["histological_type"][
"untreated_primary_(de_novo)_gbm"] = "de_novo"
remapDict["histological_type"]["treated_primary_gbm"] = "primary"
remapDict["ethnicity"] = {}
remapDict["ethnicity"]["hispanic_or_latino"] = "hispanic"
remapDict["ethnicity"]["not_hispanic_or_latino"] = "not_hispanic"
# ------------------------------------------------------------------------- ##
remapDict["tumor_grade"] = {}
remapDict["tumor_grade"]["na"] = "NA"
remapDict["tumor_grade"]["gx"] = "NA"
remapDict["tumor_grade"]["gb"] = "NA"
remapDict["tumor_grade"]["g1"] = 1
remapDict["tumor_grade"]["g2"] = 2
remapDict["tumor_grade"]["g3"] = 3
remapDict["tumor_grade"]["g4"] = 4
remapDict["tumor_grade"]["high grade"] = 3 # ???
remapDict["tumor_grade"]["high_grade"] = 3 # ???
if (0):
remapDict["tumor_stage"] = {}
remapDict["tumor_stage"]["na"] = "NA"
remapDict["tumor_stage"]["i"] = 1
remapDict["tumor_stage"]["ia"] = 1.2
remapDict["tumor_stage"]["ib"] = 1.4
remapDict["tumor_stage"]["ic"] = 1.6
remapDict["tumor_stage"]["ii"] = 2
remapDict["tumor_stage"]["iia"] = 2.2
remapDict["tumor_stage"]["iib"] = 2.4
remapDict["tumor_stage"]["iic"] = 2.6
remapDict["tumor_stage"]["iii"] = 3
remapDict["tumor_stage"]["iiia"] = 3.2
remapDict["tumor_stage"]["iiib"] = 3.4
remapDict["tumor_stage"]["iiic"] = 3.6
remapDict["tumor_stage"]["iv"] = 4
remapDict["tumor_stage"]["iva"] = 4.2
remapDict["tumor_stage"]["ivb"] = 4.4
remapDict["tumor_stage"]["ivc"] = 4.6
remapDict["breast_tumor_pathologic_grouping_stage"] = {}
remapDict["breast_tumor_pathologic_grouping_stage"]["na"] = "NA"
remapDict["breast_tumor_pathologic_grouping_stage"]["x"] = "NA"
remapDict["breast_tumor_pathologic_grouping_stage"]["tis"] = 0.5
remapDict["breast_tumor_pathologic_grouping_stage"]["i"] = 1
remapDict["breast_tumor_pathologic_grouping_stage"]["ia"] = 1.2
remapDict["breast_tumor_pathologic_grouping_stage"]["ib"] = 1.4
remapDict["breast_tumor_pathologic_grouping_stage"]["ii"] = 2
remapDict["breast_tumor_pathologic_grouping_stage"]["iia"] = 2.2
remapDict["breast_tumor_pathologic_grouping_stage"]["iib"] = 2.4
remapDict["breast_tumor_pathologic_grouping_stage"]["iic"] = 2.6
remapDict["breast_tumor_pathologic_grouping_stage"]["iii"] = 3
remapDict["breast_tumor_pathologic_grouping_stage"]["iiia"] = 3.2
remapDict["breast_tumor_pathologic_grouping_stage"]["iiib"] = 3.4
remapDict["breast_tumor_pathologic_grouping_stage"]["iiic"] = 3.6
remapDict["breast_tumor_pathologic_grouping_stage"]["iv"] = 4
remapDict["primary_tumor_pathologic_spread"] = {}
remapDict["primary_tumor_pathologic_spread"]["na"] = "NA"
remapDict["primary_tumor_pathologic_spread"]["tx"] = "NA"
remapDict["primary_tumor_pathologic_spread"]["t0"] = 0
remapDict["primary_tumor_pathologic_spread"]["tis"] = 0.5
remapDict["primary_tumor_pathologic_spread"]["t1"] = 1
remapDict["primary_tumor_pathologic_spread"]["t1a"] = 1.2
remapDict["primary_tumor_pathologic_spread"]["t1b"] = 1.4
remapDict["primary_tumor_pathologic_spread"]["t2"] = 2
remapDict["primary_tumor_pathologic_spread"]["t2a"] = 2.2
remapDict["primary_tumor_pathologic_spread"]["t2b"] = 2.4
remapDict["primary_tumor_pathologic_spread"]["t3"] = 3
remapDict["primary_tumor_pathologic_spread"]["t3a"] = 3.2
remapDict["primary_tumor_pathologic_spread"]["t3b"] = 3.4
remapDict["primary_tumor_pathologic_spread"]["t3c"] = 3.6
remapDict["primary_tumor_pathologic_spread"]["t4"] = 4
remapDict["primary_tumor_pathologic_spread"]["t4a"] = 4.2
remapDict["primary_tumor_pathologic_spread"]["t4b"] = 4.4
remapDict["breast_tumor_pathologic_t_stage"] = {}
remapDict["breast_tumor_pathologic_t_stage"]["na"] = "NA"
remapDict["breast_tumor_pathologic_t_stage"]["tx"] = "NA"
remapDict["breast_tumor_pathologic_t_stage"]["t1"] = 1
remapDict["breast_tumor_pathologic_t_stage"]["t1a"] = 1.2
remapDict["breast_tumor_pathologic_t_stage"]["t1b"] = 1.4
remapDict["breast_tumor_pathologic_t_stage"]["t1c"] = 1.6
remapDict["breast_tumor_pathologic_t_stage"]["t2"] = 2
remapDict["breast_tumor_pathologic_t_stage"]["t2a"] = 2.2
remapDict["breast_tumor_pathologic_t_stage"]["t2b"] = 2.4
remapDict["breast_tumor_pathologic_t_stage"]["t2c"] = 2.6
remapDict["breast_tumor_pathologic_t_stage"]["t3"] = 3
remapDict["breast_tumor_pathologic_t_stage"]["t3a"] = 3.4
remapDict["breast_tumor_pathologic_t_stage"]["t3b"] = 3.4
remapDict["breast_tumor_pathologic_t_stage"]["t3c"] = 3.6
remapDict["breast_tumor_pathologic_t_stage"]["t4"] = 4
remapDict["breast_tumor_pathologic_t_stage"]["t4a"] = 4.2
remapDict["breast_tumor_pathologic_t_stage"]["t4b"] = 4.4
remapDict["breast_tumor_pathologic_t_stage"]["t4c"] = 4.6
remapDict["breast_tumor_pathologic_t_stage"]["t4d"] = 4.8
remapDict["breast_carcinoma_estrogen_receptor_status"] = {}
remapDict["breast_carcinoma_estrogen_receptor_status"]["na"] = "NA"
remapDict["breast_carcinoma_estrogen_receptor_status"]["not_performed"] = "NA"
remapDict["breast_carcinoma_estrogen_receptor_status"][
"performed_but_not_available"] = "NA"
remapDict["breast_carcinoma_estrogen_receptor_status"][
"indeterminate"] = "indeterminate"
remapDict["breast_carcinoma_estrogen_receptor_status"]["positive"] = "positive"
remapDict["breast_carcinoma_estrogen_receptor_status"]["negative"] = "negative"
remapDict["lymphnode_pathologic_spread"] = {}
remapDict["lymphnode_pathologic_spread"]["na"] = "NA"
remapDict["lymphnode_pathologic_spread"]["nx"] = "NA"
remapDict["lymphnode_pathologic_spread"]["n0"] = 0
remapDict["lymphnode_pathologic_spread"]["n1"] = 1
remapDict["lymphnode_pathologic_spread"]["n1a"] = 1.2
remapDict["lymphnode_pathologic_spread"]["n1b"] = 1.4
remapDict["lymphnode_pathologic_spread"]["n1c"] = 1.6
remapDict["lymphnode_pathologic_spread"]["n2"] = 2
remapDict["lymphnode_pathologic_spread"]["n2a"] = 2.2
remapDict["lymphnode_pathologic_spread"]["n2b"] = 2.4
remapDict["lymphnode_pathologic_spread"]["n2c"] = 2.6
remapDict["lymphnode_pathologic_spread"]["n3"] = 3
remapDict["lymphnode_pathologic_spread"]["n3a"] = 3.2
remapDict["breast_tumor_pathologic_n_stage"] = {}
remapDict["breast_tumor_pathologic_n_stage"]["na"] = "NA"
remapDict["breast_tumor_pathologic_n_stage"]["pnx"] = "NA"
remapDict["breast_tumor_pathologic_n_stage"]["pn0"] = 0
remapDict["breast_tumor_pathologic_n_stage"]["pn0(i-)"] = 0.2
remapDict["breast_tumor_pathologic_n_stage"]["pn0(i+)"] = 0.4
remapDict["breast_tumor_pathologic_n_stage"]["pn1"] = 1
remapDict["breast_tumor_pathologic_n_stage"]["pn1mi"] = 1.1
remapDict["breast_tumor_pathologic_n_stage"]["pn1a"] = 1.2
remapDict["breast_tumor_pathologic_n_stage"]["pn1b"] = 1.4
remapDict["breast_tumor_pathologic_n_stage"]["pn1c"] = 1.6
remapDict["breast_tumor_pathologic_n_stage"]["pn2"] = 2
remapDict["breast_tumor_pathologic_n_stage"]["pn2a"] = 2.2
remapDict["breast_tumor_pathologic_n_stage"]["pn2b"] = 2.4
remapDict["breast_tumor_pathologic_n_stage"]["pn3"] = 3
remapDict["breast_tumor_pathologic_n_stage"]["pn3a"] = 3.2
remapDict["breast_tumor_pathologic_n_stage"]["pn3b"] = 3.4
remapDict["breast_tumor_pathologic_n_stage"]["pn3c"] = 3.6
remapDict["breast_tumor_pathologic_n_stage"] = {}
remapDict["breast_tumor_pathologic_n_stage"]["na"] = "NA"
remapDict["breast_tumor_pathologic_n_stage"]["nx"] = "NA"
remapDict["breast_tumor_pathologic_n_stage"]["n0"] = 0
remapDict["breast_tumor_pathologic_n_stage"]["n0(i-)"] = 0.2
remapDict["breast_tumor_pathologic_n_stage"]["n0_(i-)"] = 0.2
remapDict["breast_tumor_pathologic_n_stage"]["n0(i+)"] = 0.4
remapDict["breast_tumor_pathologic_n_stage"]["n0_(i+)"] = 0.4
remapDict["breast_tumor_pathologic_n_stage"]["n0_(mol+)"] = 0.3
remapDict["breast_tumor_pathologic_n_stage"]["n1"] = 1
remapDict["breast_tumor_pathologic_n_stage"]["n1mi"] = 1.1
remapDict["breast_tumor_pathologic_n_stage"]["n1a"] = 1.2
remapDict["breast_tumor_pathologic_n_stage"]["n1b"] = 1.4
remapDict["breast_tumor_pathologic_n_stage"]["n1c"] = 1.6
remapDict["breast_tumor_pathologic_n_stage"]["n2"] = 2
remapDict["breast_tumor_pathologic_n_stage"]["n2a"] = 2.2
remapDict["breast_tumor_pathologic_n_stage"]["n2b"] = 2.4
remapDict["breast_tumor_pathologic_n_stage"]["n3"] = 3
remapDict["breast_tumor_pathologic_n_stage"]["n3a"] = 3.2
remapDict["breast_tumor_pathologic_n_stage"]["n3b"] = 3.4
remapDict["breast_tumor_pathologic_n_stage"]["n3c"] = 3.6
remapDict["distant_metastasis_pathologic_spread"] = {}
remapDict["distant_metastasis_pathologic_spread"]["na"] = "NA"
remapDict["distant_metastasis_pathologic_spread"]["mx"] = "NA"
remapDict["distant_metastasis_pathologic_spread"]["m0"] = 0
remapDict["distant_metastasis_pathologic_spread"]["m1"] = 1
remapDict["distant_metastasis_pathologic_spread"]["m1a"] = 1.2
remapDict["distant_metastasis_pathologic_spread"]["m1b"] = 1.4
remapDict["breast_tumor_clinical_m_stage"] = {}
remapDict["breast_tumor_clinical_m_stage"]["na"] = "NA"
remapDict["breast_tumor_clinical_m_stage"]["mx"] = "NA"
remapDict["breast_tumor_clinical_m_stage"]["cm0_(i+)"] = "NA"
remapDict["breast_tumor_clinical_m_stage"]["m0"] = 0
remapDict["breast_tumor_clinical_m_stage"]["m1"] = 1
remapDict["breast_tumor_clinical_m_stage"]["m1a"] = 1.2
remapDict["breast_tumor_clinical_m_stage"]["m1b"] = 1.4
remapDict["residual_tumor"] = {}
remapDict["residual_tumor"]["na"] = "NA"
remapDict["residual_tumor"]["rx"] = "NA"
remapDict["residual_tumor"]["r0"] = 0
remapDict["residual_tumor"]["r1"] = 1
remapDict["residual_tumor"]["r2"] = 2
remapDict["her2_immunohistochemistry_level_result"] = {}
remapDict["her2_immunohistochemistry_level_result"]["na"] = "NA"
remapDict["her2_immunohistochemistry_level_result"]["0"] = 0
remapDict["her2_immunohistochemistry_level_result"]["1+"] = 1
remapDict["her2_immunohistochemistry_level_result"]["2+"] = 2
remapDict["her2_immunohistochemistry_level_result"]["3+"] = 3
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"] = {}
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"]["na"] = "NA"
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"]["0"] = 0
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"]["1+"] = 1
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"]["2+"] = 2
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"]["3+"] = 3
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"]["4+"] = 4
remapDict["immunohistochemistry_positive_cell_score"] = {}
remapDict["immunohistochemistry_positive_cell_score"]["na"] = "NA"
remapDict["immunohistochemistry_positive_cell_score"]["0"] = 0
remapDict["immunohistochemistry_positive_cell_score"]["1+"] = 1
remapDict["immunohistochemistry_positive_cell_score"]["2+"] = 2
remapDict["immunohistochemistry_positive_cell_score"]["3+"] = 3
remapDict["immunohistochemistry_positive_cell_score"]["4+"] = 4
remapDict["progesterone_receptor_level_cell_percent_category"] = {}
remapDict["progesterone_receptor_level_cell_percent_category"]["na"] = "NA"
remapDict["progesterone_receptor_level_cell_percent_category"]["<10%"] = 0
remapDict["progesterone_receptor_level_cell_percent_category"]["10-19%"] = 1
remapDict["progesterone_receptor_level_cell_percent_category"]["20-29%"] = 2
remapDict["progesterone_receptor_level_cell_percent_category"]["30-39%"] = 3
remapDict["progesterone_receptor_level_cell_percent_category"]["40-49%"] = 4
remapDict["progesterone_receptor_level_cell_percent_category"]["50-59%"] = 5
remapDict["progesterone_receptor_level_cell_percent_category"]["60-69%"] = 6
remapDict["progesterone_receptor_level_cell_percent_category"]["70-79%"] = 7
remapDict["progesterone_receptor_level_cell_percent_category"]["80-89%"] = 8
remapDict["progesterone_receptor_level_cell_percent_category"]["90-99%"] = 9
remapDict["er_level_cell_percentage_category"] = {}
remapDict["er_level_cell_percentage_category"]["na"] = "NA"
remapDict["er_level_cell_percentage_category"]["<10%"] = 0
remapDict["er_level_cell_percentage_category"]["10-19%"] = 1
remapDict["er_level_cell_percentage_category"]["20-29%"] = 2
remapDict["er_level_cell_percentage_category"]["30-39%"] = 3
remapDict["er_level_cell_percentage_category"]["40-49%"] = 4
remapDict["er_level_cell_percentage_category"]["50-59%"] = 5
remapDict["er_level_cell_percentage_category"]["60-69%"] = 6
remapDict["er_level_cell_percentage_category"]["70-79%"] = 7
remapDict["er_level_cell_percentage_category"]["80-89%"] = 8
remapDict["er_level_cell_percentage_category"]["90-99%"] = 9
remapDict["her2_erbb_pos_finding_cell_percent_category"] = {}
remapDict["her2_erbb_pos_finding_cell_percent_category"]["na"] = "NA"
remapDict["her2_erbb_pos_finding_cell_percent_category"]["<10%"] = 0
remapDict["her2_erbb_pos_finding_cell_percent_category"]["10-19%"] = 1
remapDict["her2_erbb_pos_finding_cell_percent_category"]["20-29%"] = 2
remapDict["her2_erbb_pos_finding_cell_percent_category"]["30-39%"] = 3
remapDict["her2_erbb_pos_finding_cell_percent_category"]["40-49%"] = 4
remapDict["her2_erbb_pos_finding_cell_percent_category"]["50-59%"] = 5
remapDict["her2_erbb_pos_finding_cell_percent_category"]["60-69%"] = 6
remapDict["her2_erbb_pos_finding_cell_percent_category"]["70-79%"] = 7
remapDict["her2_erbb_pos_finding_cell_percent_category"]["80-89%"] = 8
remapDict["her2_erbb_pos_finding_cell_percent_category"]["90-99%"] = 9
remapDict["axillary_lymph_node_stage_method_type"] = {}
remapDict["axillary_lymph_node_stage_method_type"]["na"] = "NA"
remapDict["axillary_lymph_node_stage_method_type"]["OTHER_(SPECIFY)"] = "NA"
remapDict["axillary_lymph_node_stage_method_type"]["other_(specify)"] = "NA"
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def stringInList_CaseInsens ( aString, aList ):
for s in aList:
u = s.upper()
if ( aString.upper() == u ): return ( 1 )
return ( 0 )
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def remapCategoricalFeatures(allClinDict):
print " "
print " in remapCategoricalFeatures "
keyList = allClinDict.keys()
keyList.sort()
for aKey in keyList:
aKey2 = aKey.lower()
if (aKey2 in remapDict.keys()):
numRemap = 0
print " "
print " looking at <%s> " % aKey2
tmpV = allClinDict[aKey]
# print " part of original vector : ", tmpV[:10]
newV = [0] * len(tmpV)
for kk in range(len(tmpV)):
bKey2 = tmpV[kk].lower()
if (bKey2.startswith("stage_")):
bKey2 = bKey2[6:]
if (bKey2.startswith("stage ")):
bKey2 = bKey2[6:]
try:
newV[kk] = remapDict[aKey2][bKey2]
if (newV[kk] != "NA" and newV[kk] != "na"):
if (newV[kk].lower() != bKey2):
# print " remapping ... ", aKey, aKey2, kk,
# bKey2, remapDict[aKey2][bKey2]
numRemap += 1
except:
if (0):
print " WARNING in remapCategoricalFeatures ... nothing to remap to ??? "
print " <%s> <%s> %d <%s> " % (aKey, aKey2, kk, bKey2)
print " <%s> " % remapDict[aKey2]
# sys.exit(-1)
newV[kk] = bKey2
if (numRemap > 0):
print " --> using remapped values for <%s> " % aKey
print " mapping dictionary : ", remapDict[aKey2]
print " part of original vector : ", tmpV[:10]
print " part of new vector : ", newV[:10]
allClinDict[aKey] = newV
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getNumPatients(allClinDict):
aKey = allClinDict.keys()[0]
return ( len(allClinDict[aKey]) )
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def findProperKey(allClinDict, aString):
keyList = allClinDict.keys()
foundList = []
for aKey in keyList:
if ( aKey.lower().find(aString.lower()) >=0 ):
foundList += [ aKey ]
if ( len(foundList) == 0 ):
return ( "NO KEY" )
elif ( len(foundList) == 1 ):
return ( foundList[0] )
else:
## look for a perfect match ...
for mString in foundList:
mTokens = mString.split(':')
if ( len(mTokens) == 1 ):
if ( mTokens[0].lower() == aString.lower() ):
return ( mString )
elif ( len(mTokens) > 2 ):
try:
if ( mTokens[2].lower() == aString.lower() ):
return ( mString )
except:
print " findProperKey: ERROR in try ??? ", mString
print foundList
print " "
print " ERROR in findProperKey ??? multiple matches "
print " but none of them are perfect matches ... "
print aString
print foundList
print " "
sys.exit(-1)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def computeLymphnodesFraction(allClinDict):
aKey = findProperKey ( allClinDict, "number_of_lymphnodes_positive_by_he" )
bKey = findProperKey ( allClinDict, "number_of_lymphnodes_examined" )
if (aKey not in allClinDict.keys()):
print " "
print " skipping computeLymphnodesFraction "
return (allClinDict)
if (bKey not in allClinDict.keys()):
print " "
print " skipping computeLymphnodesFraction "
return (allClinDict)
print " "
print " in computeLymphnodesFraction ... "
numClin = getNumPatients(allClinDict)
newV = [0] * numClin
for kk in range(numClin):
if (allClinDict[bKey][kk] == "NA"):
newV[kk] = "NA"
elif (allClinDict[aKey][kk] == "NA"):
newV[kk] = "NA"
elif (int(allClinDict[bKey][kk]) == 0):
newV[kk] = "NA"
else:
newV[kk] = float(allClinDict[aKey][kk]) / \
float(allClinDict[bKey][kk])
allClinDict["N:SAMP:fraction_lymphnodes_positive_by_he:::::"] = newV
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def addTag2Key ( aKey, aTag ):
aTokens = aKey.split(':')
if ( len(aTokens) >= 7 ):
newKey = aTokens[0] + ':' + aTokens[1] + ':' + aTokens[2]
if ( aTag[0] == "_" ):
newKey += aTag
else:
newKey += "_" + aTag
for ii in range(3,len(aTokens)):
newKey += ":" + aTokens[ii]
else:
newKey = aKey
if ( aTag[0] == "_" ):
newKey += aTag
else:
newKey += "_" + aTag
return ( newKey )
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def checkBarcodes(allClinDict):
zKey = findProperKey (allClinDict, "bcr_patient_barcode" )
numClin = getNumPatients(allClinDict)
for ii in range(numClin):
if ( allClinDict[zKey][ii].find("_") >= 0 ):
print " BAD barcode !!! ", ii, allClinDict[zKey][ii]
sys.exit(-1)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# fields of interest:
# days_to_birth
# days_to_initial_pathologic_diagnosis <-- this is always 0
# days_to_submitted_specimen_dx
# days_to_last_followup
# days_to_last_known_alive
# days_to_death
# also:
# new_tumor_event_after_initial_treatment
# days_to_new_tumor_event_after_initial_treatment
def addFollowupInfo(allClinDict):
print " "
print " in addFollowupInfo ... "
# ------------------------------------------------------------------------
# FIRST: if there is a days_to_last_known_alive, then check that it is
# used consistently, otherwise create it
zKey = findProperKey (allClinDict, "bcr_patient_barcode")
aKey = findProperKey (allClinDict, "days_to_last_known_alive")
bKey = findProperKey (allClinDict, "days_to_last_followup")
cKey = findProperKey (allClinDict, "days_to_death")
haveA = (aKey in allClinDict.keys())
haveB = (bKey in allClinDict.keys())
haveC = (cKey in allClinDict.keys())
print " "
print " STEP #1 "
print " have flags A, B, and C : ", haveA, haveB, haveC
## print " allClinDict.keys() : "
## print allClinDict.keys()
if (haveA):
# if we have the "days_to_last_known_alive" field, check that it
# is consistent with the other two fields ...
numClin = getNumPatients(allClinDict)
numNotNA = 0
for kk in range(numClin):
## if we have a 'days_to_death' field and it is not NA, then set 'days_to_last_known_alive' to that value
if (haveC):
if (str(allClinDict[cKey][kk]).upper() != "NA"):
allClinDict[aKey][kk] = allClinDict[cKey][kk]
## if we have a 'days_to_last_followup' field and it is not NA, then ...
if (haveB):
if (str(allClinDict[bKey][kk]).upper() != "NA"):
if (str(allClinDict[aKey][kk]).upper() == "NA"):
allClinDict[aKey][kk] = allClinDict[bKey][kk]
if (str(allClinDict[aKey][kk]).upper() != "NA"):
numNotNA += 1
print " UPDATED days_to_last_known_alive (%d) : " % numNotNA
print allClinDict[aKey]
else:
# create it ...
if ( aKey == "NO KEY" ): aKey = "N:CLIN:days_to_last_known_alive:::::"
numClin = getNumPatients(allClinDict)
newVec = [0] * numClin
numNotNA = 0
for kk in range(numClin):
newVec[kk] = "NA"
if (haveC):
if (str(allClinDict[cKey][kk]).upper() != "NA"):
newVec[kk] = allClinDict[cKey][kk]
if (haveB):
if (str(allClinDict[bKey][kk]).upper() != "NA"):
if (str(newVec[kk]).upper() == "NA"):
newVec[kk] = allClinDict[bKey][kk]
if (str(newVec[kk]).upper() != "NA"):
numNotNA += 1
print " NEW days_to_last_known_alive (%d) : " % numNotNA
## print newVec
allClinDict[aKey] = newVec
# ------------------------------------------------------------------------
# SECOND: if there is a "days_to_submitted_specimen_dx", then create
# a set of "days_to_" features that instead of being relative
# to "initial_pathologic_diagnosis" are relative to "submitted_specimen"
print " "
print " STEP #2 "
aKey = findProperKey (allClinDict, "days_to_submitted_specimen_dx")
tKey = findProperKey (allClinDict, "days_to_initial_pathologic_diagnosis")
if (aKey in allClinDict.keys()):
haveA = 1
else:
print " do not have [days_to_submitted_specimen_dx] in allClinDict "
haveA = 0
if (tKey in allClinDict.keys()):
haveT = 1
else:
print " do not have [days_to_initial_pathologic_diagnosis] in allClinDict "
haveT = 0
try:
numClin = getNumPatients(allClinDict)
for bKey in allClinDict.keys():
if (haveA == 0): continue
if (bKey == aKey): continue
if (bKey.find("days_to_") >= 0):
newKey = addTag2Key ( bKey, "relSS" )
print " --> making newKey <%s> from bKey <%s> [%d] " % (newKey, bKey, numClin)
newVec = [0] * numClin
numNotNA = 0
for kk in range(numClin):
## initialize to NA
newVec[kk] = "NA"
## skip if an important value is NA
if (str(allClinDict[aKey][kk]).upper() == "NA"): continue
if (str(allClinDict[bKey][kk]).upper() == "NA"): continue
if (haveT):
if (str(allClinDict[tKey][kk]).upper() == "NA"): continue
## deltaDays is either (days_to_submitted_specimen_dx) - (days_to_initial_pathologic_diagnosis)
## or just (days_to_submitted_specimen_dx)
if (haveT):
deltaDays = allClinDict[aKey][kk] - allClinDict[tKey][kk]
else:
deltaDays = allClinDict[aKey][kk]
## and then we subtract 'delta days' from the original key to make the new relative key
newVec[kk] = allClinDict[bKey][kk] - deltaDays
print " STEP2a ", kk, allClinDict[zKey][kk], allClinDict[bKey][kk], allClinDict[aKey][kk], deltaDays, newVec[kk]
numNotNA += 1
if ( numNotNA > 30 ):
print " adding new key (%d) : " % numNotNA, newKey
## print newVec[:5]
## print newVec[-5:]
allClinDict[newKey] = newVec
else:
print " NOT adding new key (%d) : ", numNotNA, newKey
if (bKey.find("age_at_") >= 0):
## make sure that this is not a "stage_at_" feature !!!
if ( bKey.find("stage_at_") >= 0 ): continue
newKey = addTag2Key ( bKey, "relSS" )
print " --> making newKey <%s> from bKey <%s> [%d] " % (newKey, bKey, numClin)
newVec = [0] * numClin
numNotNA = 0
for kk in range(numClin):
## initialize to NA
newVec[kk] = "NA"
## skip if an important value is NA
if (str(allClinDict[aKey][kk]).upper() == "NA"): continue
if (str(allClinDict[bKey][kk]).upper() == "NA"): continue
if (haveT):
if (str(allClinDict[tKey][kk]).upper() == "NA"): continue
## deltaDays is either (days_to_submitted_specimen_dx) - (days_to_initial_pathologic_diagnosis)
## or just (days_to_submitted_specimen_dx)
if (haveT):
deltaDays = allClinDict[aKey][kk] - allClinDict[tKey][kk]
else:
deltaDays = allClinDict[aKey][kk]
## and then we subtract 'delta days' from the original key to make the new relative key
## 04mar14 : actually we need to ADD here because "age" should go UP with deltaDays ...
newVec[kk] = allClinDict[bKey][kk] + ( float(deltaDays) / DAYS_PER_YEAR )
print " STEP2b ", kk, allClinDict[zKey][kk], allClinDict[bKey][kk], allClinDict[aKey][kk], deltaDays, newVec[kk]
numNotNA += 1
if ( numNotNA > 30 ):
print " adding new key (%d) : " % numNotNA, newKey
## print newVec[:5]
## print newVec[-5:]
allClinDict[newKey] = newVec
else:
print " NOT adding new key (%d) : ", numNotNA, newKey
except:
print " --> failed in this try (x) "
doNothing = 1
# ------------------------------------------------------------------------
# THIRD: if there is a "days_to_sample_procurement", then create
# a set of "days_to_" features that instead of being relative
# to "initial_pathologic_diagnosis" are relative to "sample_procurement
print " "
print " STEP #3 "
aKey = findProperKey (allClinDict, "days_to_sample_procurement")
tKey = findProperKey (allClinDict, "days_to_initial_pathologic_diagnosis")
if (aKey in allClinDict.keys()):
haveA = 1
else:
print " do not have [days_to_sample_procurement] in allClinDict "
haveA = 0
if (tKey in allClinDict.keys()):
haveT = 1
else:
haveT = 0
print " do not have a [days_to_initial_pathologic_diagnosis] key "
try:
numClin = getNumPatients(allClinDict)
for bKey in allClinDict.keys():
if (haveA == 0): continue
if (bKey == aKey): continue
if (bKey.find("days_to_") >= 0):
## make sure that this is not one of the relSS features just added !!!
if ( bKey.find("relSS") >= 0 ): continue
newKey = addTag2Key ( bKey, "relSP" )
print " --> making newKey <%s> from bKey <%s> [%d] " % (newKey, bKey, numClin)
newVec = [0] * numClin
numNotNA = 0
for kk in range(numClin):
## initialize to NA
newVec[kk] = "NA"
## skip if an important value is NA
if (str(allClinDict[aKey][kk]).upper() == "NA"): continue
if (str(allClinDict[bKey][kk]).upper() == "NA"): continue
if (haveT):
if (str(allClinDict[tKey][kk]).upper() == "NA"): continue
## deltaDays is either (days_to_sample_procurement) - (days_to_initial_pathologic_diagnosis)
## or just (days_to_sample_procurement)
if (haveT):
deltaDays = allClinDict[aKey][kk] - allClinDict[tKey][kk]
else:
deltaDays = allClinDict[aKey][kk]
## and then we subtract 'delta days' from the original key to make the new relative key
newVec[kk] = allClinDict[bKey][kk] - deltaDays
print " STEP3a ", kk, allClinDict[zKey][kk], allClinDict[bKey][kk], allClinDict[aKey][kk], deltaDays, newVec[kk]
numNotNA += 1
if ( numNotNA > 30 ):
print " adding new key (%d) : " % numNotNA, newKey
## print newVec[:5]
## print newVec[-5:]
allClinDict[newKey] = newVec
else:
print " NOT adding new key (%d) : ", numNotNA, newKey
if (bKey.find("age_at_") >= 0):
## make sure that this is not one of the relSS features just added !!!
if ( bKey.find("relSS") >= 0 ): continue
## make sure that this is not a "stage_at_" feature !!!
if ( bKey.find("stage_at_") >= 0 ): continue
newKey = addTag2Key ( bKey, "relSP" )
print " --> making newKey <%s> from bKey <%s> [%d] " % (newKey, bKey, numClin)
newVec = [0] * numClin
numNotNA = 0
for kk in range(numClin):
## initialize to NA
newVec[kk] = "NA"
## skip if an important value is NA
print " checking for important information ... ", aKey, bKey, tKey
print allClinDict[aKey][kk]
print allClinDict[bKey][kk]
if (str(allClinDict[aKey][kk]).upper() == "NA"): continue
if (str(allClinDict[bKey][kk]).upper() == "NA"): continue
if (haveT):
print allClinDict[tKey][kk]
if (str(allClinDict[tKey][kk]).upper() == "NA"): continue
## deltaDays is either (days_to_sample_procurement) - (days_to_initial_pathologic_diagnosis)
## or just (days_to_sample_procurement)
if (haveT):
deltaDays = allClinDict[aKey][kk] - allClinDict[tKey][kk]
else:
deltaDays = allClinDict[aKey][kk]
print " computed deltaDays : ", deltaDays
## and then we subtract 'delta days', scaled to years ...
## 03mar14 : actually we need to ADD here ...
newVec[kk] = allClinDict[bKey][kk] + ( float(deltaDays) / DAYS_PER_YEAR )
print " STEP3b ", kk, allClinDict[zKey][kk], allClinDict[bKey][kk], allClinDict[aKey][kk], deltaDays, newVec[kk]
numNotNA += 1
if ( numNotNA > 30 ):
print " adding new key (%d) : " % numNotNA, newKey
## print newVec[:5]
## print newVec[-5:]
allClinDict[newKey] = newVec
else:
print " NOT adding new key (%d) : ", numNotNA, newKey
except:
print " --> failed in this try (y) "
doNothing = 1
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# fields of interest:
# days_to_birth
# days_to_initial_pathologic_diagnosis <-- this is always 0
# days_to_submitted_specimen_dx
# days_to_last_followup
# days_to_last_known_alive
# days_to_death
# also:
# new_tumor_event_after_initial_treatment
# days_to_new_tumor_event_after_initial_treatment
def checkFollowupInfo(allClinDict):
print " "
print " in checkFollowupInfo ... "
# FIRST: if there is a days_to_last_known_alive, then check that it is
# used consistently, otherwise create it
zKey = findProperKey (allClinDict, "bcr_patient_barcode")
aKey = findProperKey (allClinDict, "days_to_last_known_alive")
bKey = findProperKey (allClinDict, "days_to_last_followup")
cKey = findProperKey (allClinDict, "days_to_death")
dKey = findProperKey (allClinDict, "vital_status")
haveA = (aKey in allClinDict.keys())
haveB = (bKey in allClinDict.keys())
haveC = (cKey in allClinDict.keys())
haveD = (dKey in allClinDict.keys())
print " have flags A, B, C and D : ", haveA, haveB, haveC, haveD
if ( not haveD ):
print " skipping this function ... requires vital_status "
return (allClinDict)
## print " allClinDict.keys() : "
## print allClinDict.keys()
numClin = getNumPatients(allClinDict)
# range of days_to_last_known_alive is typically something like [0,3196]
for kk in range(numClin):
if (str(allClinDict[dKey][kk]).upper() == "DEAD"):
if (str(allClinDict[cKey][kk]).upper() == "NA"):
print " ERROR !!! need to know when this person died !!! ", allClinDict[zKey][kk]
print kk
print aKey, allClinDict[aKey][kk]
print bKey, allClinDict[bKey][kk]
print cKey, allClinDict[cKey][kk]
print dKey, allClinDict[dKey][kk]
print " UPDATING vital_status to Alive ... "
print " "
## because we do not have a days_to_death value, we are going to call this person "Alive"
allClinDict[dKey][kk] = "Alive"
if (str(allClinDict[dKey][kk]).upper() == "ALIVE"):
if (str(allClinDict[aKey][kk]).upper() == "NA"):
if (str(allClinDict[bKey][kk]).upper() == "NA"):
print " ERROR !!! no information about follow-up ??? ", allClinDict[zKey][kk]
print kk
print aKey, allClinDict[aKey][kk]
print bKey, allClinDict[bKey][kk]
print cKey, allClinDict[cKey][kk]
print dKey, allClinDict[dKey][kk]
print " UPDATING days_to_last_known_alive and days_to_last_followup to 0 "
print " "
allClinDict[aKey][kk] = 0
allClinDict[bKey][kk] = 0
else:
print " ERROR in checkFollowupInfo ... how did we get here ??? "
sys.exit(-1)
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# derive the preferred stage tumor stage from the comparison of the
# reported one with the derived one
def PreferredStage(reported, computed):
t = testTumorStage(reported, computed)
if (t == "AGREE"):
return(reported)
if (t == "Stage cannot be derived from TNM"):
return(reported)
if (t == "Derived stage is more specific"):
return(repStage(computed)) # For SupTab1 use return(computed)
if (t == "Stage can be derived from TNM"):
return(repStage(computed)) # For SupTab1 use return(computed)
if (t == "Stage more specific than TNM"):
return(reported)
if (t == "DISAGREE"):
return(reported) # assuming the reported one to be valid!
return("Error: Lack a preferred stage")
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# Return first element of a vector, or if input is string, the string itself
def repStage(substage):
if (type(substage) is str):
return(substage)
if (type(substage) is list):
return(substage[0])
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# Characterize the difference between reported and inferred tumor stage
def testTumorStage(reported, computed):
# Agreement includes "in" relationship and NA equivalence
if (type(computed) is list):
if (reported in computed):
return("AGREE")
if (type(computed) is str):
if (reported == computed):
return("AGREE")
if (((reported == "STAGE IVA") | (reported == "STAGE IVB")) & (computed == "STAGE IV")):
return("Stage more specific than TNM")
if ((reported == "NA") & (computed != "NA")):
return("Stage can be derived from TNM")
if ((reported != "NA") & (computed == "NA")):
return("Stage cannot be derived from TNM")
if (repStage(computed).startswith(reported)):
return("Derived stage is more specific")
return("DISAGREE")
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# Derive Tumor Stage from TNM and AJCC table, 7th edition
# sometimes if we get something like "M1" when it should actually be "M1A"
# or "M1B", we will pick the first/lowest thing that it could be ...
def getTumorStage(T, N, M):
print " WARNING ... this function should NOT be called ... "
sys.exit(-1)
T = T.upper()
N = N.upper()
M = M.upper()
if (M == "M1"):
# Seems to be TCGA choice if IVA, IVB not specified
return ("STAGE IV")
if (M == "M1A"):
return ("STAGE IVA")
if (M == "M1B"):
return ("STAGE IVB")
if (T == "TX"):
T = "NA"
if (N == "NX"):
N = "NA"
if (M == "MX"):
M = "NA"
if (T == "NA" or N == "NA" or M == "NA"):
return ("NA")
if (T == "T0" and N == "N0" and M == "M0"):
return ("STAGE 0")
if (T == "Tis" and N == "N0" and M == "M0"):
return ("STAGE 0")
if (T == "T1" and N == "N0" and M == "M0"):
return ("STAGE I")
if (T == "T1A" and N == "N0" and M == "M0"):
return ("STAGE I")
if (T == "T1B" and N == "N0" and M == "M0"):
return ("STAGE I")
if (T == "T2" and N == "N0" and M == "M0"):
return ("STAGE I")
if (T == "T2A" and N == "N0" and M == "M0"):
return ("STAGE I")
if (T == "T2B" and N == "N0" and M == "M0"):
return ("STAGE I")
if (T == "T3" and N == "N0" and M == "M0"):
return ("STAGE IIA")
if (T == "T3A" and N == "N0" and M == "M0"):
return ("STAGE IIA")
if (T == "T3B" and N == "N0" and M == "M0"):
return ("STAGE IIB")
if (T == "T3C" and N == "N0" and M == "M0"):
return ("STAGE IIB")
if (T == "T4A" and N == "N0" and M == "M0"):
return ("STAGE IIB")
if (T == "T4B" and N == "N0" and M == "M0"):
return ("STAGE IIC")
if (T == "T4" and N == "N0" and M == "M0"):
return (["STAGE IIB", "STAGE IIC"])
if (T == "T1" and N == "N1" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1A" and N == "N1" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1B" and N == "N1" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1" and N == "N1A" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1" and N == "N1B" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1" and N == "N1C" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1" and N == "N2" and M == "M0"):
return (["STAGE IIIA", "STAGE IIIB"]) # CHOICE IIIA, IIIB
if (T == "T1B" and N == "N2" and M == "M0"):
return ("STAGE IIIB")
if (T == "T1" and N == "N2A" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1" and N == "N3" and M == "M0"):
return ("STAGE IIIA")
if (T == "T2" and N == "N1" and M == "M0"):
return ("STAGE IIIA")
if (T == "T2A" and N == "N1" and M == "M0"):
return ("STAGE IIIA")
if (T == "T2B" and N == "N1" and M == "M0"):
return ("STAGE IIIA")
if (T == "T2" and N == "N1A" and M == "M0"):
return ("STAGE IIIA")
if (T == "T2" and N == "N1B" and M == "M0"):
return ("STAGE IIIA")
if (T == "T2" and N == "N1C" and M == "M0"):
return ("STAGE IIIA")
if (T == "T3" and N == "N1" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3A" and N == "N1" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3B" and N == "N1" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3" and N == "N1A" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3" and N == "N1B" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3" and N == "N1C" and M == "M0"):
return ("STAGE IIIB")
if (T == "T4A" and N == "N1" and M == "M0"):
return ("STAGE IIIB")
if (T == "T4A" and N == "N1A" and M == "M0"):
return ("STAGE IIIB")
if (T == "T4A" and N == "N1B" and M == "M0"):
return ("STAGE IIIB")
if (T == "T4A" and N == "N1C" and M == "M0"):
return ("STAGE IIIB")
if (T == "T4" and N == "N1" and M == "M0"):
return (["STAGE IIIB", "STAGE IIIC"])
if (T == "T2" and N == "N2" and M == "M0"):
return ("STAGE IIIB")
if (T == "T2A" and N == "N2" and M == "M0"):
return ("STAGE IIIB")
if (T == "T2B" and N == "N2" and M == "M0"):
return ("STAGE IIIB")
if (T == "T2" and N == "N2A" and M == "M0"):
return ("STAGE IIIB")
if (T == "T2" and N == "N2B" and M == "M0"):
return ("STAGE IIIB")
if (T == "T2" and N == "N2C" and M == "M0"):
return ("STAGE IIIC")
if (T == "T2" and N == "N3" and M == "M0"):
return ("STAGE IIIC")
if (T == "T2B" and N == "N3" and M == "M0"):
return ("STAGE IIIC")
if (T == "T3" and N == "N2" and M == "M0"):
return (["STAGE IIIB", "STAGE IIIC"]) # CHOICE IIIB, IIIC
if (T == "T3" and N == "N2A" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3" and N == "N2C" and M == "M0"):
return ("STAGE IIIC")
if (T == "T1" and N == "N2B" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3" and N == "N2B" and M == "M0"):
return ("STAGE IIIC")
if (T == "T3" and N == "N3" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4" and N == "N2" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4A" and N == "N2" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4A" and N == "N2A" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4A" and N == "N2B" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4A" and N == "N2C" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4" and N == "N3" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4" and N == "N3A" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4A" and N == "N3" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N1" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N2" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N1A" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N1B" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N1C" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N2C" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N2A" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N2B" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N2B" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N3A" and M == "M0"):
return ("STAGE IIIC")
# We reach this point if all values are non-NA, but combination is not in
# AJCC tumor table
print " ERROR in getTumorStage ??? ", T, N, M
return ("Not in AJCC Table?")
sys.exit(-1)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def checkTumorStage(allClinDict):
if ("tumor_stage" not in allClinDict.keys()):
print " skipping checkTumorStage ... "
return (allClinDict)
else:
print " running checkTumorStage ... "
numClin = getNumPatients(allClinDict)
print " total number of patients : ", numClin
if (0):
stringList = ["tumor", "stage", "spread"]
for aKey in allClinDict.keys():
for aString in stringList:
if (aKey.find(aString) >= 0):
print aKey
reqKeyList = [
"bcr_patient_barcode", "tumor_stage", "primary_tumor_pathologic_spread",
"lymphnode_pathologic_spread", "distant_metastasis_pathologic_spread"]
numNotFound = 0
for aKey in reqKeyList:
if (aKey not in allClinDict.keys()):
numNotFound += 1
if (numNotFound > 0):
print " skipping checkTumorStage ... "
return (allClinDict)
pKey = getProperKey ( allClinDict, "bcr_patient_barcode" )
sKey = getProperKey ( allClinDict, "tumor_stage" )
tKey = getProperKey ( allClinDict, "primary_tumor_pathologic_spread" )
nKey = getProperKey ( allClinDict, "lymphnode_pathologic_spread" )
mKey = getProperKey ( allClinDict, "distant_metastasis_pathologic_spread" )
for ii in range(numClin):
aCode = allClinDict[pKey][ii]
curTumorStage = allClinDict[sKey][ii]
curT = allClinDict[tKey][ii]
curN = allClinDict[nKey][ii]
curM = allClinDict[mKey][ii]
# print " checking tumor stage for <%s> <%s> <%s> <%s> <%s> " % (
# aCode, curTumorStage, curN, curM, curT )
## removing this 15aug2014 ...
if ( 0 ):
curTumorStage = curTumorStage.upper()
curTumorStage = curTumorStage.strip()
if (curTumorStage != "NA"):
if (not curTumorStage.startswith("STAGE ")):
curTumorStage = "STAGE " + curTumorStage
# as of 09nov12, NOT attempting to derive tumor stage from T, N, and M
if (0):
# get AJCC-derived tumor stage, compare to DCC value, and decide
# which to use
ajccStage = getTumorStage(curT, curN, curM)
newStage = PreferredStage(curTumorStage, ajccStage)
allClinDict[sKey][ii] = newStage
# report
if (type(ajccStage) is list):
ajccString = ' OR '.join(ajccStage)
else:
ajccString = ajccStage
print aCode.upper() + ', TNM:' + curT.upper() + ' ' + curN.upper() + ' ' + curM.upper() + ', DCC Stage:' \
+ curTumorStage + ', AJCC Stage:' + ajccString + ', Comparison:' \
+ \
testTumorStage(curTumorStage, ajccStage) + \
', Will use: ' + newStage
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def checkVitalStatus(allClinDict):
vsKey = findProperKey ( allClinDict, "vital_status" )
bcKey = findProperKey ( allClinDict, "bcr_patient_barcode" )
if ( vsKey == "NO KEY" ):
print " skipping checkVitalStatus ... "
return (allClinDict)
print " running checkVitalStatus ... "
numClin = getNumPatients(allClinDict)
print " total number of patients : ", numClin
numLC = 0
numDC = 0
for ii in range(numClin):
aCode = allClinDict[bcKey][ii]
curStatus = allClinDict[vsKey][ii]
doChange = 1
try:
newStatus = curStatus
except:
try:
if (curStatus == 0):
newStatus = "Alive"
numLC += 1
elif (curStatus == 1):
newStatus = "Dead"
numDC += 1
except:
doChange = 0
if (doChange):
allClinDict[vsKey][ii] = newStatus
if (numLC + numDC > 0):
print " WARNING: changed some vital status fields ... %d %d " % (numLC, numDC)
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def updateAge(allClinDict):
pKey = findProperKey ( allClinDict, "bcr_patient_barcode" )
aKey = findProperKey ( allClinDict, "age_at_initial_pathologic_diagnosis" )
bKey = findProperKey ( allClinDict, "days_to_birth" )
print " running updateAge ... "
numClin = getNumPatients(allClinDict)
print " total number of patients : ", numClin
for ii in range(numClin):
try:
aCode = allClinDict[pKey][ii]
curAge = allClinDict[aKey][ii]
curD2B = allClinDict[bKey][ii]
newAge = float(0 - int(curD2B)) / DAYS_PER_YEAR
# now we want to limit the 'precision' to two decimal places
newAge = float(int((100. * newAge) + 0.49)) / 100.
if (abs(curAge - int(newAge)) > 0):
print " ERROR in updateAge ??? ", curAge, curD2B, newAge, aCode
allClinDict[aKey][ii] = newAge
except:
doNothing = 1
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def removeParens(oneKey):
print " in removeParens ... "
origUniqList = []
newUniqList = []
newKey = []
for aLabel in oneKey:
if (aLabel not in origUniqList):
origUniqList += [aLabel]
if (aLabel.find("(") >= 0):
print " --> found open paren ... at %d in <%s> " % (aLabel.find("("), aLabel)
bLabel = ""
copyOn = 1
for ii in range(len(aLabel)):
if (aLabel[ii] == "("):
copyOn = 0
if (copyOn):
bLabel += aLabel[ii]
if (aLabel[ii] == ")"):
copyOn = 1
if (bLabel.startswith("_")):
bLabel = bLabel[1:]
if (bLabel.endswith("_")):
bLabel = bLabel[:-1]
newKey += [bLabel]
if (bLabel not in newUniqList):
newUniqList += [bLabel]
else:
newKey += [aLabel]
if (aLabel not in newUniqList):
newUniqList += [aLabel]
print origUniqList
print newUniqList
if (len(newUniqList) == len(origUniqList)):
print " --> removing parenthetical strings "
print origUniqList
print newUniqList
return (newKey)
else:
print " NOT removing parenthetical strings "
return (oneKey)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def removeSpecialChars(oneKey):
okExceptions = ['4+']
# ran into a strange case here where most of the elements were strings
# like 'grade_1' but some were just integers like 3 :(
# so ... these next few blocks of code are TOTAL HACKS ...
numInt = 0
numNot = 0
aString = ''
aInt = 9999
for ii in range(len(oneKey)):
aLabel = str(oneKey[ii])
if (aLabel.upper() == "NA"):
continue
if (aLabel.upper() == "UNKNOWN"):
oneKey[ii] = "NA"
continue
try:
iVal = int(aLabel)
numInt += 1
aInt = iVal
except:
numNot += 1
aString = aLabel
print " number of integers = %d number NOT = %d " % (numInt, numNot)
if (numInt > 0 and numNot > 0):
# for now, we are just checking for 'grade' strings that are sometimes
# 'grade_3' and sometimes just '3'
if (aString.lower().startswith("grade_")):
for ii in range(len(oneKey)):
aLabel = str(oneKey[ii])
if (aLabel.upper() == "NA"):
continue
if (not aLabel.lower().startswith("grade_")):
try:
iVal = int(aLabel)
aString = "Grade_%d" % iVal
oneKey[ii] = aString
except:
print " FAILED to prepend grade ??? ", aLabel
sys.exit(-1)
# or if there are at least twice as many strings as integers, then we
# will cast the integers to strings ...
elif (numInt < (numNot / 2)):
for ii in range(len(oneKey)):
aLabel = str(oneKey[ii])
if (aLabel.upper() == "NA"):
continue
try:
iVal = int(aLabel)
oneKey[ii] = str(iVal)
except:
doNothing = 1
elif (aString not in okExceptions):
if ( 1 ):
print " WARNING ... something odd about this feature ... ", aInt, aString
print oneKey
## return ([])
## sys.exit(-1)
origUniqList = []
newUniqList = []
newKey = []
for aLabel in oneKey:
if (aLabel not in origUniqList):
origUniqList += [aLabel]
bLabel = ""
try:
for ii in range(len(aLabel)):
if (aLabel[ii] == ' '):
bLabel += "_"
elif (aLabel[ii] == "'"):
bLabel += "_"
elif (aLabel[ii] == '"'):
bLabel += "_"
elif (aLabel[ii] == ':'):
bLabel += "_"
elif (aLabel[ii] == '/'):
bLabel += "_"
# elif ( aLabel[ii] == '-' ):
## bLabel += "_"
elif (aLabel[ii] == '.'):
bLabel += "_"
elif (aLabel[ii] == ','):
bLabel += "_"
else:
bLabel += aLabel[ii]
except:
print " ERROR in removeSpecialChars ??? "
print " oneKey = <%s> " % (oneKey)
print " aLabel = <%s> " % (aLabel)
sys.exit(-1)
ii = bLabel.find("__")
while (ii >= 0):
bLabel = bLabel[:ii] + bLabel[ii + 1:]
ii = bLabel.find("__")
newKey += [bLabel]
if (bLabel not in newUniqList):
newUniqList += [bLabel]
print origUniqList
print newUniqList
if (len(newUniqList) == len(origUniqList)):
return (newKey)
else:
print " NOT removing parenthetical strings "
return (oneKey)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getCommonPrefix(aLabel, bLabel):
nn = 0
while (aLabel[nn].lower() == bLabel[nn].lower()):
nn += 1
if (nn >= len(aLabel)):
return (aLabel[:nn])
if (nn >= len(bLabel)):
return (aLabel[:nn])
return (aLabel[:nn])
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getCommonSuffix(aLabel, bLabel):
nn = -1
while (aLabel[nn].lower() == bLabel[nn].lower()):
nn -= 1
if (-nn > len(aLabel)):
return (aLabel)
if (-nn > len(bLabel)):
return (bLabel)
if (nn == -1):
return ("")
else:
return (aLabel[nn + 1:])
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def removeCommonPrefix(oneKey, labelList):
print " in removeCommonPrefix : ", labelList
madeChange = 0
nLabel = len(labelList)
for ii in range(nLabel):
for jj in range(ii + 1, nLabel):
commonPrefix = getCommonPrefix(labelList[ii], labelList[jj])
# if the commonPrefix is *ever* the entire string, then
# we cannot really use this ...
if (commonPrefix == labelList[ii]):
continue
if (commonPrefix == labelList[jj]):
continue
if (len(commonPrefix) > 4):
print ii, jj, commonPrefix
newKey = []
for cLabel in oneKey:
if (cLabel.lower().startswith(commonPrefix)):
dLabel = cLabel[len(commonPrefix):]
if (len(dLabel) < 4):
dLabel = cLabel
else:
madeChange += 1
else:
dLabel = cLabel
if (dLabel[0] == '_'):
dLabel = dLabel[1:]
newKey += [dLabel]
newList = []
for cLabel in labelList:
if (cLabel.lower().startswith(commonPrefix)):
dLabel = cLabel[len(commonPrefix):]
if (len(dLabel) < 4):
dLabel = cLabel
else:
madeChange += 1
else:
dLabel = cLabel
if (dLabel[0] == '_'):
dLabel = dLabel[1:]
newList += [dLabel]
if (len(labelList) == len(newList)):
labelList = newList
oneKey = newKey
if (madeChange > 0):
print " after removeCommonPrefix : ", madeChange, labelList
return (oneKey, labelList)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def removeCommonSuffix(oneKey, labelList):
print " in removeCommonSuffix : ", labelList
madeChange = 0
nLabel = len(labelList)
for ii in range(nLabel):
for jj in range(ii + 1, nLabel):
commonSuffix = getCommonSuffix(labelList[ii], labelList[jj])
# if the commonSuffix is *ever* the entire string, then
# we cannot really use this ...
if (commonSuffix == labelList[ii]):
continue
if (commonSuffix == labelList[jj]):
continue
if (len(commonSuffix) > 4):
print ii, jj, commonSuffix
newKey = []
for cLabel in oneKey:
if (cLabel.lower().endswith(commonSuffix)):
dLabel = cLabel[:-len(commonSuffix)]
if (len(dLabel) < 4):
dLabel = cLabel
else:
madeChange += 1
else:
dLabel = cLabel
if (dLabel[-1] == '_'):
dLabel = dLabel[:-1]
newKey += [dLabel]
newList = []
for cLabel in labelList:
if (cLabel.lower().endswith(commonSuffix)):
dLabel = cLabel[:-len(commonSuffix)]
if (len(dLabel) < 4):
dLabel = cLabel
else:
madeChange += 1
else:
dLabel = cLabel
if (dLabel[-1] == '_'):
dLabel = dLabel[:-1]
newList += [dLabel]
if (len(labelList) == len(newList)):
labelList = newList
oneKey = newKey
if (0):
print " removeCommonSuffix has not yet been fully tested ... "
print labelList
print oneKey
sys.exit(-1)
if (madeChange > 0):
print " after removeCommonSuffix : ", madeChange, labelList
return (oneKey, labelList)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def abbrevCategStrings(allClinDict):
print " "
print " in abbrevCategStrings ... "
print " "
keyList = allClinDict.keys()
keyList.sort()
for aKey in keyList:
if (aKey.find("bcr_patient_barcode") >= 0):
print " all barcodes : "
print allClinDict[aKey]
print " done "
continue
(keyType, nCount, nNA, nCard, labelList, labelCount) = miscClin.lookAtKey(allClinDict[aKey])
print aKey, keyType, nCount, nNA
if (keyType == "NOMINAL"):
# remove weird characters from the strings ...
print " calling removeSpecialChars ... <%s> " % (aKey)
allClinDict[aKey] = removeSpecialChars(allClinDict[aKey])
# if we get nothing back, then skip ...
if (allClinDict[aKey] == []):
print " WARNING ... got nothing back ??? ", aKey
continue
# otherwise, look at cardinality, type, etc ...
(keyType, nCount, nNA, nCard, labelList, labelCount) = miscClin.lookAtKey(allClinDict[aKey])
maxLen = 0
skipFlag = 0
for aLabel in labelList:
try:
maxLen = max(maxLen, len(aLabel))
except:
print " what is up with this key ??? ", aKey, labelList
skipFlag = 1
if (skipFlag):
continue
if (maxLen > 10):
## print aKey, labelList, maxLen
# first try at making the labels a bit shorter by removing
# parenthetical elements ...
allClinDict[aKey] = removeParens(allClinDict[aKey])
(keyType, nCount, nNA, nCard, labelList,
labelCount) = miscClin.lookAtKey(allClinDict[aKey])
maxLen = 0
for aLabel in labelList:
maxLen = max(maxLen, len(aLabel))
## print aKey, labelList, maxLen
# removing this step for now (04dec12)
if (0):
# next try to remove common prefixes or suffixes ...
if (maxLen > 10):
(allClinDict[aKey], labelList) = removeCommonPrefix(
allClinDict[aKey], labelList)
(allClinDict[aKey], labelList) = removeCommonSuffix(
allClinDict[aKey], labelList)
maxLen = 0
for aLabel in labelList:
maxLen = max(maxLen, len(aLabel))
## print aKey, labelList, maxLen
if (maxLen > 25):
print " --> strings are still rather long, but not sure what to do about this ... "
print labelList, maxLen
print " "
print " "
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def checkPrefix(labelList, aPrefix):
nLabel = len(labelList)
nHas = 0
for aLabel in labelList:
bLabel = aLabel.upper()
if (bLabel == "NA"):
nLabel -= 1
continue
if (bLabel.startswith(aPrefix)):
nHas += 1
if ((nHas + 2) >= nLabel): return (1)
return (0)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def translateArabic(aLabel, usePrefix):
# print " in translateArabic ... "
pLen = len(usePrefix)
bLabel = aLabel.upper()
if (bLabel.startswith(usePrefix)):
bLabel = bLabel[pLen:]
if (bLabel[0] == "_"):
bLabel = bLabel[1:]
bLen = len(bLabel)
found = 0
for iLen in range(bLen, 0, -1):
# print found, iLen, bLabel[:iLen]
if (not found):
try:
curN = int(bLabel[:iLen])
found = 1
except:
doNothing = 1
if (not found):
# X means that it could not be assessed, so returning NA
if (bLabel == "X"):
return ("NA")
# B means 'borderline' but returning NA ...
elif (bLabel == "B"):
return ("NA")
else:
print " ERROR ??? <%s> <%s> --> returning NA " % (bLabel, usePrefix)
return ("NA")
rLen = len(str(curN))
if (len(bLabel) > rLen):
bLabel = bLabel[rLen:]
if (bLabel == "A"):
curN += 0.2
elif (bLabel == "B"):
curN += 0.4
elif (bLabel == "C"):
curN += 0.6
else:
print " left over in translateArabic <%s> <%s> <%s> " % (bLabel, aLabel, usePrefix)
return (curN)
else:
return ("NA")
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def translateRoman(aLabel, usePrefix):
romanList = ["VIII", "III", "VII", "IV", "IX", "II", "VI", "I", "V", "X"]
numbrList = [8, 3, 7, 4, 9, 2, 6, 1, 5, 10]
pLen = len(usePrefix)
bLabel = aLabel.upper()
if (bLabel.startswith(usePrefix)):
bLabel = bLabel[pLen:]
if (bLabel[0] == "_"):
bLabel = bLabel[1:]
found = 0
for kk in range(len(romanList)):
if (not found):
if (bLabel.startswith(romanList[kk])):
found = 1
curKK = kk
curN = numbrList[kk]
curR = romanList[kk]
if (not found):
if (bLabel == "X"):
return ("NA")
elif (bLabel == "TIS"):
return ("NA")
else:
print " ERROR ??? ", bLabel, usePrefix
sys.exit(-1)
rLen = len(curR)
if (len(bLabel) > rLen):
bLabel = bLabel[rLen:]
if (bLabel == "A"):
curN += 0.2
elif (bLabel == "B"):
curN += 0.4
elif (bLabel == "C"):
curN += 0.6
else:
print " left over in translateRoman <%s> <%s> <%s> " % (bLabel, aLabel, usePrefix)
return (curN)
else:
return ("NA")
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def checkRomanNumerals(labelList, usePrefix):
skipList = ["_"]
stripList = ["A", "B", "C", "X", "0"]
romanList = ["I", "V", "X"]
pLen = len(usePrefix)
yesR = 0
notR = 0
for aLabel in labelList:
bLabel = aLabel.upper()
if (bLabel.startswith(usePrefix)):
bLabel = bLabel[pLen:]
if (bLabel[-1] in stripList):
bLabel = bLabel[:-1]
for ii in range(len(bLabel)):
if (bLabel[ii] in romanList):
yesR += 1
else:
if (bLabel[ii] not in skipList):
notR += 1
# print " in checkRomanNumerals : ", yesR, notR
if (notR == 0):
return (1)
if (notR > yesR):
return (0)
if (yesR > 0):
print " ??? strange counts in checkRomanNumerals ??? "
return (1)
return (0)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# this function tries to create a numerical feature for a categorical
# feature ...
def addNumericalFeatures(allClinDict):
print " "
print " "
print " in addNumericalFeatures ... "
keyList = allClinDict.keys()
keyList.sort()
# CAREFUL: add on things only at the end of this list ...
prefixList = ["T", "N", "M", "STAGE", "GRADE", "G", "PT"]
nPrefix = len(prefixList)
prefixBits = [0] * nPrefix
for aKey in keyList:
if (aKey == "bcr_patient_barcode"):
continue
(keyType, nCount, nNA, nCard, labelList,
labelCount) = miscClin.lookAtKey(allClinDict[aKey])
if (keyType == "NOMINAL"):
if (nCard > 2 and nCard < 15):
tmpKey = aKey.lower()
if (tmpKey.find("stage") >= 0 or tmpKey.find("grade") >= 0 or
tmpKey.find("pathologic_spread") >= 0):
print " considering this categorical feature ... ", aKey, keyType, nCard, labelList, labelCount
for iP in range(nPrefix):
aPrefix = prefixList[iP]
prefixBits[iP] = checkPrefix(labelList, aPrefix)
# if the 'GRADE' bit gets set, then unset the 'G' bit
if (prefixBits[4]):
prefixBits[5] = 0
# print prefixBits
usePrefix = ""
if (sum(prefixBits) == 1):
for iP in range(nPrefix):
if (prefixBits[iP]):
usePrefix = prefixList[iP]
elif (sum(prefixBits) > 1):
print " ERROR ??? how can it have multiple prefix bits ON ??? "
sys.exit(-1)
# print " usePrefix <%s> " % usePrefix
isRoman = checkRomanNumerals(labelList, usePrefix)
# print " isRoman = %d " % isRoman
if (aKey[1] == ":"):
tokenList = aKey.split(':')
newLabel = "N:" + \
tokenList[1] + ":" + tokenList[2] + "_derived"
for ii in range(3, len(tokenList)):
newLabel += ":" + tokenList[ii]
else:
newLabel = "N:CLIN:" + aKey + "_derived"
if (newLabel in allClinDict.keys()):
print " this feature label already exists ??? ", newLabel
sys.exit(-1)
curV = allClinDict[aKey]
numClin = len(curV)
tmpV = [0] * numClin
for kk in range(numClin):
if (curV[kk] == "NA"):
tmpV[kk] = "NA"
elif (isRoman):
tmpV[kk] = translateRoman(curV[kk], usePrefix)
else:
tmpV[kk] = translateArabic(curV[kk], usePrefix)
if (0):
if (tmpV[kk] == 0):
print " why is tmpV[kk] still ZERO ??? ", kk, numClin, curV[kk], usePrefix, tmpV[kk]
numNA = 0
notNA = 0
for kk in range(numClin):
if (tmpV[kk] == "NA"):
numNA += 1
else:
notNA += 1
if (numNA > 10 * notNA):
print " --> NOT adding this new feature <%s> " % newLabel, list(set(tmpV)), numNA, notNA, usePrefix, isRoman
else:
print " --> ADDING new feature !!! <%s> " % newLabel, list(set(tmpV)), numNA, notNA, usePrefix, isRoman
allClinDict[newLabel] = tmpV
print " "
print " "
print " "
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getMappingDict(featName, auxName):
mapDict = {}
if (featName[1] == ':'):
if (featName[6] == ':'):
tokenList = featName.split(':')
tmpFeatName = tokenList[2]
else:
tmpFeatName = featName
try:
mapFilename = "../" + auxName + "/" + tmpFeatName + ".map"
fh = file(mapFilename)
firstLine = 1
for aLine in fh:
aLine = aLine.strip()
## aLine = aLine.upper()
tokenList = aLine.split('\t')
if (firstLine):
if (tokenList[0].upper() == tmpFeatName.upper()):
numNew = len(tokenList) - 1
newNames = tokenList[1:]
print newNames
firstLine = 0
else:
print " ERROR ??? invalid mapping file ??? "
print mapFilename
print tokenList
print tmpFeatName
print " FAILING out of TRY "
sys.exit(-1)
else:
mapDict[str(tokenList[0])] = tokenList[1:]
fh.close()
print " mapping dictionary read from <%s> : " % mapFilename
print mapDict
print " "
return (mapDict, newNames)
except:
return (mapDict, [])
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getMapping ( mapDict, curV, ii ):
for k in mapDict.keys():
if ( k.lower() == curV.lower() ):
return ( mapDict[k][ii] )
print " FAILED TO GET MAPPING ??? ", curV, ii
print mapDict
sys.exit(-1)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def addDerivedFeatures(allClinDict, auxName):
print " "
print " "
print " in addDerivedFeatures ... "
keyList = allClinDict.keys()
keyList.sort()
for aKey in keyList:
if (aKey == "bcr_patient_barcode"):
continue
(keyType, nCount, nNA, nCard, labelList, labelCount) = miscClin.lookAtKey(allClinDict[aKey])
print " considering key <%s> " % (aKey), keyType, nCard
if (keyType == "NOMINAL"):
if (nCard > 2 and nCard < 25):
tmpKey = aKey.lower()
tmpList = []
for aLabel in labelList:
tmpL = str(aLabel)
if (tmpL not in tmpList):
tmpList += [tmpL]
labelList = tmpList
if (1):
print " considering this categorical feature ... ", aKey, keyType, nCard, labelList, labelCount
(mapDict, newNames) = getMappingDict(aKey, auxName)
# if there is no mapping file, then we won't be making any
# new features ...
if (len(newNames) == 0):
continue
# sanity check ...
if (0):
for bKey in mapDict.keys():
if ( stringInList_CaseInsens ( bKey, labelList ) ):
print " ERROR ??? mapping does not match this feature ??? "
print mapDict
print labelList
sys.exit(-1)
if (1):
for bLabel in labelList:
try:
if ( not stringInList_CaseInsens ( bLabel, mapDict.keys() ) ):
print " ************************************************** "
print " ERROR ??? feature value not in mapDict ??? ", bLabel
print " labelList : ", labelList
print " mapDict : ", mapDict
print " --> WILL NOT ADD ANY DERIVED FEATURES AT THIS TIME "
print " ************************************************** "
continue
# sys.exit(-1)
except:
doNothing = 1
# if there is no mapping file, then we won't be making any
# new features ...
if (len(newNames) == 0):
continue
# but if we do have one or more mappings, then we need
# to create those features ...
for ithName in range(len(newNames)):
aName = newNames[ithName]
print " looping over %d mappings ... " % len(newNames), ithName, aName
# the first thing we need to figure out is whether this is another
# categorical feature, or a numerical one ...
isNum = 1
uVec = []
for bKey in mapDict.keys():
curVal = mapDict[bKey][ithName]
if (curVal == "NA"):
continue
if ( stringInList_CaseInsens ( curVal, uVec ) ):
uVec += [curVal]
try:
fVal = float(curVal)
except:
isNum = 0
print " is numerical ??? ", isNum
if (len(uVec) == 1):
print " mapping to a constant ??? "
sys.exit(-1)
elif (len(uVec) == 2):
print " mapping is binary "
# if the mapping produces a binary feature, then
# over-ride the numerical feature
if (isNum):
print " over-riding the fact that the features LOOKS numerical ... "
isNum = 0
if (aName[1] == ":"):
if (aName[0] == "N"):
if (not isNum):
print " ERROR ??? new feature does not look to be numerical ???? "
print aName, uVec
sys.exit(-1)
# start setting up the new feature ...
newLabel = aName
if (newLabel in allClinDict.keys()):
print " this feature label already exists ??? ", newLabel
sys.exit(-1)
curV = allClinDict[aKey]
numClin = len(curV)
tmpV = [0] * numClin
for kk in range(numClin):
if (curV[kk].upper() == "NA"):
tmpV[kk] = "NA"
else:
try:
tmpV[kk] = getMapping ( mapDict, curV[kk], ithName )
## tmpV[kk] = mapDict[curV[kk]][ithName]
except:
print " ERROR ??? failed to map ??? setting to NA but MUST FIX !!! "
print kk, curV[kk], ithName
print mapDict
if (1):
tmpV[kk] = "NA"
else:
sys.exit(-1)
numNA = 0
notNA = 0
for kk in range(numClin):
if (tmpV[kk] == "NA"):
numNA += 1
else:
notNA += 1
if (numNA > 10 * notNA):
print " --> NOT adding this new feature <%s> " % newLabel, list(set(tmpV)), numNA, notNA
else:
print " --> ADDING new feature !!! <%s> " % newLabel, list(set(tmpV)), numNA, notNA
allClinDict[newLabel] = tmpV
print " "
print " "
print " "
print " RETURNING from addDerivedFeatures ... "
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# this function creates N binary indicator vectors based on a single nominal
# (categorical) variable of cardinality N -- the indicator vectors still
# contain strings ("I0" and "I1") so that we can tell that they are still not
# truly "numeric" vectors ...
def addIndicatorFeatures(allClinDict):
magicStrings = ["patient", "person", "vital", "surviv", "race", "ethnic",
"prior", "gender", "age_at", "ageat", "radiation", "chemo",
"therapy", "treat", "performance", "days_to_", "daysto",
"year_of", "yearof", "surgical", "recurrence", "pregnancies"]
keyList = allClinDict.keys()
keyList.sort()
for aKey in keyList:
if (aKey == "bcr_patient_barcode"):
continue
(keyType, nCount, nNA, nCard, labelList,
labelCount) = miscClin.lookAtKey(allClinDict[aKey])
if (keyType == "NOMINAL"):
if (nCard > 2 and nCard < 27):
print " "
print " "
print " in addIndicatorFeatures ... ", aKey, keyType, nCard, labelList, labelCount
for aLabel in labelList:
# sometimes even though we have a "categorical" feature, some of the
# categories appear to be integers or floating point values
# ...
if (type(aLabel) is float):
print " we seem to have a floating point value ??? ", aLabel
iVal = int(aLabel + 0.001)
xVal = float(aLabel) - iVal
print iVal, xVal
if (abs(xVal) < 0.001):
aLabel = "%d" % iVal
else:
aLabel = str(aLabel)
elif (type(aLabel) is int):
iVal = int(aLabel)
aLabel = "%d" % iVal
print " "
## print aKey, aLabel
try:
# 012345678901234567890123456789...
# C:CLIN:<label>
# C:CLIN:<label>:a:b:c:d:e
if (aKey[1] == ":" and aKey[6] == ":"):
# if this feature name already has a prefix (eg
# "C:CLIN:")
featType = aKey[2:7]
i1 = aKey[7:].find(':')
if (i1 < 0):
# if there are no further ':'
firstName = aKey[7:]
secondName = ":::::"
else:
# if there are ...
firstName = aKey[7:7 + i1]
secondName = aKey[7 + i1 + 1:]
print " (a) got to here ... ", featType, aLabel, firstName, secondName
newLabel = "B:" + featType + \
"I(" + aLabel + "|" + firstName + ")" + \
secondName
print " (b) got to here ... ", newLabel
if (newLabel.find("|):") > 0):
print " (a) BAILING !!!! ", newLabel
sys.exit(-1)
else:
# here we really need to have some way to guess whether this
# should be a CLIN or a SAMP feature ...
typeString = "UNK"
for aString in magicStrings:
if (aKey.find(aString) >= 0):
typeString = "CLIN"
if (typeString == "UNK"):
print " defaulting to type SAMP for this feature : <%s> " % (aKey)
typeString = "SAMP"
print " (c) got to here ... ", typeString, aLabel
newLabel = "B:" + typeString + ":" + \
"I(" + aLabel + "|" + aKey + ")"
except:
print " (b) BAILING !!! "
print " ERROR in addIndicatorFeatures ??? ", aLabel, aKey
sys.exit(-1)
# make sure there are no blanks ...
newLabel = tsvIO.replaceBlanks(newLabel, "_")
if (newLabel in allClinDict.keys()):
print " this indicator variable already exists so I will not make a new one ... ", newLabel
continue
curV = allClinDict[aKey]
numClin = len(curV)
tmpV = [0] * numClin
print " ... looping over %d values ... default new value is zero " % (numClin)
for kk in range(numClin):
print kk, allClinDict[aKey][kk], aLabel, type(allClinDict[aKey][kk]), type(aLabel)
if (allClinDict[aKey][kk] == "NA"):
tmpV[kk] = "NA"
elif (str(allClinDict[aKey][kk]).lower() == str(aLabel).lower()):
tmpV[kk] = 1
print " adding new feature : ", newLabel
allClinDict[newLabel] = tmpV
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def addPairwiseIndicatorFeatures(allClinDict):
magicStrings = ["patient", "person", "vital", "surviv", "race", "ethnic",
"prior", "gender", "age_at", "ageat", "radiation", "chemo",
"therapy", "treat", "performance", "days_to_", "daysto",
"year_of", "yearof", "surgical", "recurrence", "pregnancies"]
print " "
print " "
keyList = allClinDict.keys()
keyList.sort()
for aKey in keyList:
if (aKey == "bcr_patient_barcode"):
continue
(keyType, nCount, nNA, nCard, labelList,
labelCount) = miscClin.lookAtKey(allClinDict[aKey])
if (keyType == "NOMINAL"):
# we do this only for categorical features with 3-9 categories
if (nCard > 2 and nCard < 10):
print " "
print " in addPairwiseIndicatorFeatures ... ", aKey, keyType, nCard, labelList, labelCount
for ak in range(len(labelList)):
aLabel = labelList[ak]
for bk in range(ak + 1, len(labelList)):
bLabel = labelList[bk]
print " aLabel=<%s> bLabel=<%s> " % (aLabel, bLabel)
try:
if (aKey[1] == ":" and aKey[6] == ":"):
i1 = aKey[7:].find(':')
if (i1 < 0):
i1 = len(aKey)
i2 = len(aKey)
else:
i1 = i1 + 7
i2 = aKey[(i1 + 1):].find(':')
if (i2 < 0):
i2 = len(aKey)
if (i2 > 0 and i2 < len(aKey)):
newLabel = "B:" + \
aKey[
2:7] + "I(" + aLabel + "," + bLabel + "|" + aKey[7:i1] + ")" + aKey[i2:]
else:
newLabel = "B:" + \
aKey[
2:7] + "I(" + aLabel + "," + bLabel + "|" + aKey[7:i1] + ")" + "::::"
else:
# here we really need to have some way to guess whether this
# should be a CLIN or a SAMP feature ...
typeString = "UNK"
for aString in magicStrings:
if (aKey.find(aString) >= 0):
typeString = "CLIN"
if (typeString == "UNK"):
print " defaulting to type SAMP for this feature : <%s> " % (aKey)
typeString = "SAMP"
newLabel = "B:" + typeString + ":" + \
"I(" + aLabel + "," + bLabel + "|" + aKey + ")" + \
"::::"
except:
print " NOT continuing in addPairwiseIndicatorFeatures !!! ", aLabel, bLabel, aKey
continue
# make sure there are no blanks ...
newLabel = tsvIO.replaceBlanks(newLabel, "_")
print " --> new label: <%s> " % newLabel
if (newLabel in allClinDict.keys()):
print " this indicator variable already exists so I will not make a new one ... ", newLabel
continue
curV = allClinDict[aKey]
numClin = len(curV)
tmpV = ["NA"] * numClin
for kk in range(numClin):
if (allClinDict[aKey][kk].lower() == aLabel.lower()):
tmpV[kk] = 1
elif (allClinDict[aKey][kk].lower() == bLabel.lower()):
tmpV[kk] = 0
print " adding new feature : ", newLabel
# print tmpV
allClinDict[newLabel] = tmpV
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
if __name__ == "__main__":
if ( (len(sys.argv)!=4) and (len(sys.argv)!=5) ):
print " Usage : %s <input TSV> <output TSV> <public/private> [auxName] " % sys.argv[0]
print " ERROR -- bad command line arguments "
sys.exit(-1)
tsvNameIn = sys.argv[1]
tsvNameOut = sys.argv[2]
ppString = sys.argv[3]
if ( len(sys.argv) == 5 ):
auxName = sys.argv[4]
else:
auxName = "aux"
# test out readTSV ...
## tsvName = "coad_read_clinical.27jan.tsv"
print " "
print " ****************************************************************** "
print " reading input file <%s> " % tsvNameIn
allClinDict = tsvIO.readTSV(tsvNameIn)
print " A "
checkBarcodes(allClinDict)
# take a look ...
(naCounts, otherCounts) = miscClin.lookAtClinDict(allClinDict)
if (1):
# remove constant-value keys ...
allClinDict = miscClin.removeConstantKeys(allClinDict)
print " B "
checkBarcodes(allClinDict)
if (1):
# remove uninformative keys ...
allClinDict = miscClin.removeUninformativeKeys(allClinDict)
print " C "
checkBarcodes(allClinDict)
# check the tumor stage based on the other T/N/M definitions, update if possible
# (and if the original setting was "NA")
if (1):
allClinDict = checkTumorStage(allClinDict)
print " D "
checkBarcodes(allClinDict)
# new as of 16aug13 ... vital_status strings are inconsistent between
# 'living' or 'alive' or 'deceased' or 'dead' ...
# --> standard should be "Alive" or "Dead"
if (1):
allClinDict = checkVitalStatus(allClinDict)
print " E "
checkBarcodes(allClinDict)
# new as of 13sep13 ... makig 'age' a continuous feature that
# exactly matches the days_to_birth ...
if (1):
allClinDict = updateAge(allClinDict)
print " F "
checkBarcodes(allClinDict)
# remap some categorical features to numerical features ...
# oh, this shouldn't still be here, should it ??? 15aug2014
if (0):
allClinDict = remapCategoricalFeatures(allClinDict)
print " G "
checkBarcodes(allClinDict)
# add the lymphnodes_positive fraction ...
allClinDict = computeLymphnodesFraction(allClinDict)
print " H "
checkBarcodes(allClinDict)
# fill in some missing information that we have collected from elsewhere
# ...
if (0):
allClinDict = addMissingInfo(allClinDict)
print " I "
checkBarcodes(allClinDict)
# NEW: look at some of the "days_to_" fields and do some fix-ups ...
if (1):
allClinDict = addFollowupInfo(allClinDict)
print " J "
checkBarcodes(allClinDict)
# new as of 04dec13 ... checking that vital_status and various days_to_???
# features are consistent ...
if (1):
allClinDict = checkFollowupInfo(allClinDict)
print " K "
checkBarcodes(allClinDict)
# take a look at the updated dictionary ...
(naCounts, otherCounts) = miscClin.lookAtClinDict(allClinDict)
if (1):
# remove constant-value keys ...
allClinDict = miscClin.removeConstantKeys(allClinDict)
print " L "
checkBarcodes(allClinDict)
if (0):
# removing this ... 02Feb2012 SMR
# filter out keys with too little information ...
# or maybe leave nearly everything in ;-)
categorical_naFracThresh = 0.90
numerical_naFracThresh = 0.90
classSize_minFracThresh = 0.
classSize_maxFracThresh = 0.995
allClinDict = miscClin.filterClinDict(allClinDict,
categorical_naFracThresh,
numerical_naFracThresh,
classSize_minFracThresh,
classSize_maxFracThresh)
print " M "
checkBarcodes(allClinDict)
# try to abbreviate clinical feature strings
allClinDict = abbrevCategStrings(allClinDict)
print " N "
checkBarcodes(allClinDict)
if (0):
# automatically generate indicator features for remaining categorical
# features
allClinDict = addIndicatorFeatures(allClinDict)
print " O "
checkBarcodes(allClinDict)
# new 10Feb2012 : add pairwise indicator features
allClinDict = addPairwiseIndicatorFeatures(allClinDict)
print " P "
checkBarcodes(allClinDict)
# new 09Jan2013 : try to add numeric features that map the non-binary categorical features ...
# as of 06Aug2014, this is only done for "private" runs
if ( ppString == "private" ):
allClinDict = addDerivedFeatures(allClinDict, auxName)
print " Q "
checkBarcodes(allClinDict)
# look at pairwise MI ...
if (0):
print " "
print " ****************************************** "
print " * looking at pairwise Mutual Information * "
print " ****************************************** "
print " "
miscClin.pairwiseMI(allClinDict, "miNetwork.A.13feb12")
# look at the data again and re-pick the 'best' key order ...
(naCounts, otherCounts) = miscClin.lookAtClinDict(allClinDict)
bestKeyOrder = miscClin.getBestKeyOrder(allClinDict, naCounts)
doWriteTSV = 1
if (doWriteTSV):
outName = tsvNameOut
tsvIO.writeTSV_clinical(allClinDict, bestKeyOrder, outName)
if (1):
outName = tsvNameOut[:-4] + ".flipNumeric.tsv"
tsvIO.writeTSV_clinicalFlipNumeric(
allClinDict, bestKeyOrder, outName)
print " "
print " "
print " FINISHED "
print " "
print " "
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
| cancerregulome/gidget | commands/feature_matrix_construction/main/cleanClinTSV.py | Python | mit | 98,495 |
from __future__ import (absolute_import, print_function, division)
import itertools
import time
import hyperframe.frame
from hpack.hpack import Encoder, Decoder
from netlib import utils
from netlib.http import url
import netlib.http.headers
import netlib.http.response
import netlib.http.request
from netlib.http.http2 import framereader
class TCPHandler(object):
def __init__(self, rfile, wfile=None):
self.rfile = rfile
self.wfile = wfile
class HTTP2Protocol(object):
ERROR_CODES = utils.BiDi(
NO_ERROR=0x0,
PROTOCOL_ERROR=0x1,
INTERNAL_ERROR=0x2,
FLOW_CONTROL_ERROR=0x3,
SETTINGS_TIMEOUT=0x4,
STREAM_CLOSED=0x5,
FRAME_SIZE_ERROR=0x6,
REFUSED_STREAM=0x7,
CANCEL=0x8,
COMPRESSION_ERROR=0x9,
CONNECT_ERROR=0xa,
ENHANCE_YOUR_CALM=0xb,
INADEQUATE_SECURITY=0xc,
HTTP_1_1_REQUIRED=0xd
)
CLIENT_CONNECTION_PREFACE = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
HTTP2_DEFAULT_SETTINGS = {
hyperframe.frame.SettingsFrame.HEADER_TABLE_SIZE: 4096,
hyperframe.frame.SettingsFrame.ENABLE_PUSH: 1,
hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: None,
hyperframe.frame.SettingsFrame.INITIAL_WINDOW_SIZE: 2 ** 16 - 1,
hyperframe.frame.SettingsFrame.MAX_FRAME_SIZE: 2 ** 14,
hyperframe.frame.SettingsFrame.MAX_HEADER_LIST_SIZE: None,
}
def __init__(
self,
tcp_handler=None,
rfile=None,
wfile=None,
is_server=False,
dump_frames=False,
encoder=None,
decoder=None,
unhandled_frame_cb=None,
):
self.tcp_handler = tcp_handler or TCPHandler(rfile, wfile)
self.is_server = is_server
self.dump_frames = dump_frames
self.encoder = encoder or Encoder()
self.decoder = decoder or Decoder()
self.unhandled_frame_cb = unhandled_frame_cb
self.http2_settings = self.HTTP2_DEFAULT_SETTINGS.copy()
self.current_stream_id = None
self.connection_preface_performed = False
def read_request(
self,
__rfile,
include_body=True,
body_size_limit=None,
allow_empty=False,
):
if body_size_limit is not None:
raise NotImplementedError()
self.perform_connection_preface()
timestamp_start = time.time()
if hasattr(self.tcp_handler.rfile, "reset_timestamps"):
self.tcp_handler.rfile.reset_timestamps()
stream_id, headers, body = self._receive_transmission(
include_body=include_body,
)
if hasattr(self.tcp_handler.rfile, "first_byte_timestamp"):
# more accurate timestamp_start
timestamp_start = self.tcp_handler.rfile.first_byte_timestamp
timestamp_end = time.time()
authority = headers.get(':authority', b'')
method = headers.get(':method', 'GET')
scheme = headers.get(':scheme', 'https')
path = headers.get(':path', '/')
headers.clear(":method")
headers.clear(":scheme")
headers.clear(":path")
host = None
port = None
if path == '*' or path.startswith("/"):
first_line_format = "relative"
elif method == 'CONNECT':
first_line_format = "authority"
if ":" in authority:
host, port = authority.split(":", 1)
else:
host = authority
else:
first_line_format = "absolute"
# FIXME: verify if path or :host contains what we need
scheme, host, port, _ = url.parse(path)
scheme = scheme.decode('ascii')
host = host.decode('ascii')
if host is None:
host = 'localhost'
if port is None:
port = 80 if scheme == 'http' else 443
port = int(port)
request = netlib.http.request.Request(
first_line_format,
method.encode('ascii'),
scheme.encode('ascii'),
host.encode('ascii'),
port,
path.encode('ascii'),
b"HTTP/2.0",
headers,
body,
timestamp_start,
timestamp_end,
)
request.stream_id = stream_id
return request
def read_response(
self,
__rfile,
request_method=b'',
body_size_limit=None,
include_body=True,
stream_id=None,
):
if body_size_limit is not None:
raise NotImplementedError()
self.perform_connection_preface()
timestamp_start = time.time()
if hasattr(self.tcp_handler.rfile, "reset_timestamps"):
self.tcp_handler.rfile.reset_timestamps()
stream_id, headers, body = self._receive_transmission(
stream_id=stream_id,
include_body=include_body,
)
if hasattr(self.tcp_handler.rfile, "first_byte_timestamp"):
# more accurate timestamp_start
timestamp_start = self.tcp_handler.rfile.first_byte_timestamp
if include_body:
timestamp_end = time.time()
else:
timestamp_end = None
response = netlib.http.response.Response(
b"HTTP/2.0",
int(headers.get(':status', 502)),
b'',
headers,
body,
timestamp_start=timestamp_start,
timestamp_end=timestamp_end,
)
response.stream_id = stream_id
return response
def assemble(self, message):
if isinstance(message, netlib.http.request.Request):
return self.assemble_request(message)
elif isinstance(message, netlib.http.response.Response):
return self.assemble_response(message)
else:
raise ValueError("HTTP message not supported.")
def assemble_request(self, request):
assert isinstance(request, netlib.http.request.Request)
authority = self.tcp_handler.sni if self.tcp_handler.sni else self.tcp_handler.address.host
if self.tcp_handler.address.port != 443:
authority += ":%d" % self.tcp_handler.address.port
headers = request.headers.copy()
if ':authority' not in headers:
headers.insert(0, b':authority', authority.encode('ascii'))
headers.insert(0, b':scheme', request.scheme.encode('ascii'))
headers.insert(0, b':path', request.path.encode('ascii'))
headers.insert(0, b':method', request.method.encode('ascii'))
if hasattr(request, 'stream_id'):
stream_id = request.stream_id
else:
stream_id = self._next_stream_id()
return list(itertools.chain(
self._create_headers(headers, stream_id, end_stream=(request.body is None or len(request.body) == 0)),
self._create_body(request.body, stream_id)))
def assemble_response(self, response):
assert isinstance(response, netlib.http.response.Response)
headers = response.headers.copy()
if ':status' not in headers:
headers.insert(0, b':status', str(response.status_code).encode('ascii'))
if hasattr(response, 'stream_id'):
stream_id = response.stream_id
else:
stream_id = self._next_stream_id()
return list(itertools.chain(
self._create_headers(headers, stream_id, end_stream=(response.body is None or len(response.body) == 0)),
self._create_body(response.body, stream_id),
))
def perform_connection_preface(self, force=False):
if force or not self.connection_preface_performed:
if self.is_server:
self.perform_server_connection_preface(force)
else:
self.perform_client_connection_preface(force)
def perform_server_connection_preface(self, force=False):
if force or not self.connection_preface_performed:
self.connection_preface_performed = True
magic_length = len(self.CLIENT_CONNECTION_PREFACE)
magic = self.tcp_handler.rfile.safe_read(magic_length)
assert magic == self.CLIENT_CONNECTION_PREFACE
frm = hyperframe.frame.SettingsFrame(settings={
hyperframe.frame.SettingsFrame.ENABLE_PUSH: 0,
hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: 1,
})
self.send_frame(frm, hide=True)
self._receive_settings(hide=True)
def perform_client_connection_preface(self, force=False):
if force or not self.connection_preface_performed:
self.connection_preface_performed = True
self.tcp_handler.wfile.write(self.CLIENT_CONNECTION_PREFACE)
self.send_frame(hyperframe.frame.SettingsFrame(), hide=True)
self._receive_settings(hide=True) # server announces own settings
self._receive_settings(hide=True) # server acks my settings
def send_frame(self, frm, hide=False):
raw_bytes = frm.serialize()
self.tcp_handler.wfile.write(raw_bytes)
self.tcp_handler.wfile.flush()
if not hide and self.dump_frames: # pragma no cover
print(frm.human_readable(">>"))
def read_frame(self, hide=False):
while True:
frm = framereader.http2_read_frame(self.tcp_handler.rfile)
if not hide and self.dump_frames: # pragma no cover
print(frm.human_readable("<<"))
if isinstance(frm, hyperframe.frame.PingFrame):
raw_bytes = hyperframe.frame.PingFrame(flags=['ACK'], payload=frm.payload).serialize()
self.tcp_handler.wfile.write(raw_bytes)
self.tcp_handler.wfile.flush()
continue
if isinstance(frm, hyperframe.frame.SettingsFrame) and 'ACK' not in frm.flags:
self._apply_settings(frm.settings, hide)
if isinstance(frm, hyperframe.frame.DataFrame) and frm.flow_controlled_length > 0:
self._update_flow_control_window(frm.stream_id, frm.flow_controlled_length)
return frm
def check_alpn(self):
alp = self.tcp_handler.get_alpn_proto_negotiated()
if alp != b'h2':
raise NotImplementedError(
"HTTP2Protocol can not handle unknown ALP: %s" % alp)
return True
def _handle_unexpected_frame(self, frm):
if isinstance(frm, hyperframe.frame.SettingsFrame):
return
if self.unhandled_frame_cb:
self.unhandled_frame_cb(frm)
def _receive_settings(self, hide=False):
while True:
frm = self.read_frame(hide)
if isinstance(frm, hyperframe.frame.SettingsFrame):
break
else:
self._handle_unexpected_frame(frm)
def _next_stream_id(self):
if self.current_stream_id is None:
if self.is_server:
# servers must use even stream ids
self.current_stream_id = 2
else:
# clients must use odd stream ids
self.current_stream_id = 1
else:
self.current_stream_id += 2
return self.current_stream_id
def _apply_settings(self, settings, hide=False):
for setting, value in settings.items():
old_value = self.http2_settings[setting]
if not old_value:
old_value = '-'
self.http2_settings[setting] = value
frm = hyperframe.frame.SettingsFrame(flags=['ACK'])
self.send_frame(frm, hide)
def _update_flow_control_window(self, stream_id, increment):
frm = hyperframe.frame.WindowUpdateFrame(stream_id=0, window_increment=increment)
self.send_frame(frm)
frm = hyperframe.frame.WindowUpdateFrame(stream_id=stream_id, window_increment=increment)
self.send_frame(frm)
def _create_headers(self, headers, stream_id, end_stream=True):
def frame_cls(chunks):
for i in chunks:
if i == 0:
yield hyperframe.frame.HeadersFrame, i
else:
yield hyperframe.frame.ContinuationFrame, i
header_block_fragment = self.encoder.encode(headers.fields)
chunk_size = self.http2_settings[hyperframe.frame.SettingsFrame.MAX_FRAME_SIZE]
chunks = range(0, len(header_block_fragment), chunk_size)
frms = [frm_cls(
flags=[],
stream_id=stream_id,
data=header_block_fragment[i:i + chunk_size]) for frm_cls, i in frame_cls(chunks)]
frms[-1].flags.add('END_HEADERS')
if end_stream:
frms[0].flags.add('END_STREAM')
if self.dump_frames: # pragma no cover
for frm in frms:
print(frm.human_readable(">>"))
return [frm.serialize() for frm in frms]
def _create_body(self, body, stream_id):
if body is None or len(body) == 0:
return b''
chunk_size = self.http2_settings[hyperframe.frame.SettingsFrame.MAX_FRAME_SIZE]
chunks = range(0, len(body), chunk_size)
frms = [hyperframe.frame.DataFrame(
flags=[],
stream_id=stream_id,
data=body[i:i + chunk_size]) for i in chunks]
frms[-1].flags.add('END_STREAM')
if self.dump_frames: # pragma no cover
for frm in frms:
print(frm.human_readable(">>"))
return [frm.serialize() for frm in frms]
def _receive_transmission(self, stream_id=None, include_body=True):
if not include_body:
raise NotImplementedError()
body_expected = True
header_blocks = b''
body = b''
while True:
frm = self.read_frame()
if (
(isinstance(frm, hyperframe.frame.HeadersFrame) or isinstance(frm, hyperframe.frame.ContinuationFrame)) and
(stream_id is None or frm.stream_id == stream_id)
):
stream_id = frm.stream_id
header_blocks += frm.data
if 'END_STREAM' in frm.flags:
body_expected = False
if 'END_HEADERS' in frm.flags:
break
else:
self._handle_unexpected_frame(frm)
while body_expected:
frm = self.read_frame()
if isinstance(frm, hyperframe.frame.DataFrame) and frm.stream_id == stream_id:
body += frm.data
if 'END_STREAM' in frm.flags:
break
else:
self._handle_unexpected_frame(frm)
headers = netlib.http.headers.Headers(
(k.encode('ascii'), v.encode('ascii')) for k, v in self.decoder.decode(header_blocks)
)
return stream_id, headers, body
| tdickers/mitmproxy | netlib/http/http2/connections.py | Python | mit | 14,926 |
from discord import Embed
from bot import Command, categories
class GuildList(Command):
def __init__(self, bot):
super().__init__(bot)
self.name = 'guildlist'
self.aliases = ['guilds']
self.category = categories.SETTINGS
self.bot_owner_only = True
async def handle(self, cmd):
if not cmd.is_pm:
await cmd.answer('$[guildlist-answer]')
if len(self.bot.guilds) == 0:
await cmd.answer('$[guildlist-none]', to_author=True)
return
await cmd.answer('$[guildlist-msg]', locales={'amount': len(self.bot.guilds)}, to_author=True)
resp_list = ''
for guild in self.bot.guilds:
item = '- {} ({})'.format(guild.name, guild.id)
if len('{}\n{}'.format(resp_list, item)) > 2000:
await cmd.answer(Embed(description=resp_list), withname=False, to_author=True)
resp_list = ''
else:
resp_list = '{}\n{}'.format(resp_list, item)
# Send remaining list
if resp_list != '':
await cmd.answer(Embed(description=resp_list), withname=False, to_author=True)
| jvicu2001/alexis-bot | bot/modules/guildlist.py | Python | mit | 1,174 |
# -*- coding: utf-8 -*-
import cgi
import logging
import os
import uuid
from collections import namedtuple
import requests
from office365.runtime.auth.authentication_context import AuthenticationContext
from office365.runtime.client_request import ClientRequest
from office365.runtime.utilities.http_method import HttpMethod
from office365.runtime.utilities.request_options import RequestOptions
from office365.sharepoint.client_context import ClientContext
import time
class ConnectionFailed(Exception):
pass
class Client(object):
def __init__(self, host, port=0, auth=None, username=None, password=None, protocol='http', path=None):
if not port:
port = 443 if protocol == 'https' else 80
self.path = path or ''
if not self.path.endswith('/'):
self.path = '%s/' % self.path
self.username = username
self.password = password
# oneDrive: need to split /site/ and path
# in our config site is /personal/UF_OCX_msf_geneva_msf_org/
# path is /Documents/Tests/
self.baseurl = '{0}://{1}:{2}{3}/'.format(protocol, host, port, '/'.join(self.path.split('/')[0:3]) )
self.login()
def login(self):
ctx_auth = AuthenticationContext(self.baseurl)
if ctx_auth.acquire_token_for_user(self.username, cgi.escape(self.password)):
self.request = ClientRequest(ctx_auth)
self.request.context = ClientContext(self.baseurl, ctx_auth)
if not ctx_auth.provider.FedAuth or not ctx_auth.provider.rtFa:
raise ConnectionFailed(ctx_auth.get_last_error())
else:
raise ConnectionFailed(ctx_auth.get_last_error())
def change_oc(self, baseurl, dir):
if dir == 'OCA':
dir = '/personal/UF_OCA_msf_geneva_msf_org/'
elif dir == 'OCB':
dir = '/personal/UF_OCB_msf_geneva_msf_org/'
elif dir == 'OCG':
dir = '/personal/UF_OCG_msf_geneva_msf_org/'
elif dir == 'OCP':
dir = '/personal/UF_OCP_msf_geneva_msf_org/'
self.baseurl = baseurl + dir
def delete(self, remote_path):
webUri = '%s%s' % (self.path, remote_path)
request_url = "%s/_api/web/getfilebyserverrelativeurl('%s')" % (self.baseurl, webUri)
options = RequestOptions(request_url)
options.method = HttpMethod.Delete
options.set_header("X-HTTP-Method", "DELETE")
self.request.context.authenticate_request(options)
self.request.context.ensure_form_digest(options)
result = requests.post(url=request_url, data="", headers=options.headers, auth=options.auth)
if result.status_code not in (200, 201):
raise Exception(result.content)
return True
def list(self, remote_path):
#webUri = '%s%s' % (self.path, remote_path)
#request_url = "%s_api/web/getfilebyserverrelativeurl('%s')/files" % (self.baseurl, webUri)
request_url = "%s_api/web/getfolderbyserverrelativeurl('%s')/files" % (self.baseurl, remote_path)
options = RequestOptions(request_url)
options.method = HttpMethod.Get
options.set_header("X-HTTP-Method", "GET")
options.set_header('accept', 'application/json;odata=verbose')
self.request.context.authenticate_request(options)
self.request.context.ensure_form_digest(options)
result = requests.get(url=request_url, headers=options.headers, auth=options.auth)
#result = requests.post(url=request_url, data="", headers=options.headers, auth=options.auth)
result = result.json()
'''if result.status_code not in (200, 201):
print 'Error code: '
print result.status_code
raise Exception(result.content)
'''
#return True
files=[]
for i in range(len(result['d']['results'])):
item = result['d']['results'][i]
files.append(item)
return files
def download(self, remote_path, filename):
request_url = "%s_api/web/getfilebyserverrelativeurl('%s')/$value" % (self.baseurl, remote_path)
options = RequestOptions(request_url)
options.method = HttpMethod.Get
options.set_header("X-HTTP-Method", "GET")
options.set_header('accept', 'application/json;odata=verbose')
retry = 5
while retry:
try:
self.request.context.authenticate_request(options)
self.request.context.ensure_form_digest(options)
with requests.get(url=request_url, headers=options.headers, auth=options.auth, stream=True, timeout=120) as r:
if r.status_code not in (200, 201):
error = self.parse_error(r)
raise requests.exceptions.RequestException(error)
with open(filename, 'wb') as file:
for chunk in r.iter_content(chunk_size=8192):
if chunk:
file.write(chunk)
except requests.exceptions.RequestException:
time.sleep(3)
self.login()
retry -= 1
if not retry:
raise
continue
retry = 0
return filename
def upload(self, fileobj, remote_path, buffer_size=None, log=False, progress_obj=False):
iid = uuid.uuid1()
if progress_obj:
log = True
if log:
logger = logging.getLogger('cloud.backup')
try:
size = os.path.getsize(fileobj.name)
except:
size = None
offset = -1
if not buffer_size:
buffer_size = 10* 1024 * 1024
x = ""
webUri = '%s%s' % (self.path, remote_path)
while True:
if offset == -1:
request_url = "%s/_api/web/GetFolderByServerRelativeUrl('%s')/Files/add(url='%s',overwrite=true)" % (self.baseurl, self.path, remote_path)
offset = 0
elif not offset:
if len(x) == buffer_size:
request_url="%s/_api/web/getfilebyserverrelativeurl('%s')/startupload(uploadId=guid'%s')" % (self.baseurl, webUri, iid)
else:
request_url = "%s/_api/web/GetFolderByServerRelativeUrl('%s')/Files/add(url='%s',overwrite=true)" % (self.baseurl, self.path, remote_path)
elif len(x) == buffer_size:
request_url = "%s/_api/web/getfilebyserverrelativeurl('%s')/continueupload(uploadId=guid'%s',fileOffset=%s)" % (self.baseurl, webUri, iid, offset)
else:
request_url = "%s/_api/web/getfilebyserverrelativeurl('%s')/finishupload(uploadId=guid'%s',fileOffset=%s)" % (self.baseurl, webUri, iid, offset)
offset += len(x)
options = RequestOptions(request_url)
options.method = HttpMethod.Post
self.request.context.authenticate_request(options)
self.request.context.ensure_form_digest(options)
result = requests.post(url=request_url, data=x, headers=options.headers, auth=options.auth)
if result.status_code not in (200, 201):
raise Exception(result.content)
if log and offset and offset % buffer_size*10 == 0:
percent_txt = ''
if size:
percent = round(offset*100/size)
percent_txt = '%d%%' % percent
if progress_obj:
progress_obj.write({'name': percent})
logger.info('OneDrive: %d bytes sent on %s bytes %s' % (offset, size or 'unknown', percent_txt))
x = fileobj.read(buffer_size)
if not x:
break
return True
def parse_error(self, result):
try:
if 'application/json' in result.headers.get('Content-Type'):
resp_content = result.json()
msg = resp_content['odata.error']['message']
error = []
if isinstance(msg, dict):
error = [msg['value']]
else:
error = [msg]
if resp_content['odata.error'].get('code'):
error.append('Code: %s' % resp_content['odata.error']['code'])
return ' '.join(error)
except:
pass
return result.text
| Unifield/ufload | ufload/webdav.py | Python | mit | 8,486 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import contents.models
class Migration(migrations.Migration):
dependencies = [
('contents', '0017_auto_20170329_1504'),
]
operations = [
migrations.AlterField(
model_name='frontpageimage',
name='image',
field=models.ImageField(null=True, upload_to=contents.models.get_front_page_image_path),
),
]
| andersonjonathan/Navitas | navitas/contents/migrations/0018_auto_20170329_1549.py | Python | mit | 483 |
"""
WSGI config for jt_portfolio project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jt_portfolio.settings")
application = get_wsgi_application()
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
| ilikesounds/jt_portfolio | jt_portfolio/wsgi.py | Python | mit | 582 |
from tempfile import TemporaryFile
import pytest
from imgpy import Img
@pytest.mark.parametrize('image', ({
'sub': 'anima/bordered.gif',
'convert': 'L',
'mode': 'P'
}, {
'sub': 'anima/clear.gif',
'convert': 'L',
'mode': 'P'
}, {
'sub': 'fixed/bordered.jpg',
'convert': 'L',
'mode': 'L'
}, {
'sub': 'fixed/clear.jpg',
'convert': 'L',
'mode': 'L'
}, ))
def test_convert(path, image):
with Img(fp=path(image['sub'])) as src, TemporaryFile() as tf:
src.convert(image['convert'])
src.save(fp=tf)
with Img(fp=tf) as dest:
assert (dest.width, dest.height, dest.mode, dest.frame_count) == (
src.width, src.height, image['mode'], src.frame_count)
| embali/imgpy | tests/test_convert.py | Python | mit | 746 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ApplicationGatewaysOperations(object):
"""ApplicationGatewaysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGateway"
"""Gets the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.ApplicationGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
parameters, # type: "_models.ApplicationGateway"
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ApplicationGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
parameters, # type: "_models.ApplicationGateway"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ApplicationGateway"]
"""Creates or updates the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param parameters: Parameters supplied to the create or update application gateway operation.
:type parameters: ~azure.mgmt.network.v2019_08_01.models.ApplicationGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_08_01.models.ApplicationGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ApplicationGateway"]
"""Updates the specified application gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param parameters: Parameters supplied to update application gateway tags.
:type parameters: ~azure.mgmt.network.v2019_08_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_08_01.models.ApplicationGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ApplicationGatewayListResult"]
"""Lists all application gateways in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ApplicationGatewayListResult"]
"""Gets all the application gateways in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGateways'} # type: ignore
def _start_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
# Construct URL
url = self._start_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/start'} # type: ignore
def begin_start(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Starts the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._start_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/start'} # type: ignore
def _stop_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
# Construct URL
url = self._stop_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/stop'} # type: ignore
def begin_stop(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Stops the specified application gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._stop_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/stop'} # type: ignore
def _backend_health_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ApplicationGatewayBackendHealth"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ApplicationGatewayBackendHealth"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self._backend_health_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ApplicationGatewayBackendHealth', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_backend_health_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/backendhealth'} # type: ignore
def begin_backend_health(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ApplicationGatewayBackendHealth"]
"""Gets the backend health of the specified application gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param expand: Expands BackendAddressPool and BackendHttpSettings referenced in backend health.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationGatewayBackendHealth or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayBackendHealth]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayBackendHealth"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._backend_health_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
expand=expand,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayBackendHealth', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_backend_health.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/backendhealth'} # type: ignore
def _backend_health_on_demand_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
probe_request, # type: "_models.ApplicationGatewayOnDemandProbe"
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ApplicationGatewayBackendHealthOnDemand"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ApplicationGatewayBackendHealthOnDemand"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._backend_health_on_demand_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(probe_request, 'ApplicationGatewayOnDemandProbe')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ApplicationGatewayBackendHealthOnDemand', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_backend_health_on_demand_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/getBackendHealthOnDemand'} # type: ignore
def begin_backend_health_on_demand(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
probe_request, # type: "_models.ApplicationGatewayOnDemandProbe"
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ApplicationGatewayBackendHealthOnDemand"]
"""Gets the backend health for given combination of backend pool and http setting of the specified
application gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param probe_request: Request body for on-demand test probe operation.
:type probe_request: ~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayOnDemandProbe
:param expand: Expands BackendAddressPool and BackendHttpSettings referenced in backend health.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationGatewayBackendHealthOnDemand or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayBackendHealthOnDemand]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayBackendHealthOnDemand"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._backend_health_on_demand_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
probe_request=probe_request,
expand=expand,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayBackendHealthOnDemand', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_backend_health_on_demand.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/getBackendHealthOnDemand'} # type: ignore
def list_available_server_variables(
self,
**kwargs # type: Any
):
# type: (...) -> List[str]
"""Lists all available server variables.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.list_available_server_variables.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_server_variables.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableServerVariables'} # type: ignore
def list_available_request_headers(
self,
**kwargs # type: Any
):
# type: (...) -> List[str]
"""Lists all available request headers.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.list_available_request_headers.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_request_headers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableRequestHeaders'} # type: ignore
def list_available_response_headers(
self,
**kwargs # type: Any
):
# type: (...) -> List[str]
"""Lists all available response headers.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.list_available_response_headers.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_response_headers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableResponseHeaders'} # type: ignore
def list_available_waf_rule_sets(
self,
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGatewayAvailableWafRuleSetsResult"
"""Lists all available web application firewall rule sets.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGatewayAvailableWafRuleSetsResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayAvailableWafRuleSetsResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayAvailableWafRuleSetsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.list_available_waf_rule_sets.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGatewayAvailableWafRuleSetsResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_waf_rule_sets.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableWafRuleSets'} # type: ignore
def list_available_ssl_options(
self,
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGatewayAvailableSslOptions"
"""Lists available Ssl options for configuring Ssl policy.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGatewayAvailableSslOptions, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayAvailableSslOptions
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayAvailableSslOptions"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.list_available_ssl_options.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGatewayAvailableSslOptions', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_ssl_options.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default'} # type: ignore
def list_available_ssl_predefined_policies(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ApplicationGatewayAvailableSslPredefinedPolicies"]
"""Lists all SSL predefined policies for configuring Ssl policy.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayAvailableSslPredefinedPolicies or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_08_01.models.ApplicationGatewayAvailableSslPredefinedPolicies]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayAvailableSslPredefinedPolicies"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_available_ssl_predefined_policies.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayAvailableSslPredefinedPolicies', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_available_ssl_predefined_policies.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default/predefinedPolicies'} # type: ignore
def get_ssl_predefined_policy(
self,
predefined_policy_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGatewaySslPredefinedPolicy"
"""Gets Ssl predefined policy with the specified policy name.
:param predefined_policy_name: Name of Ssl predefined policy.
:type predefined_policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGatewaySslPredefinedPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.ApplicationGatewaySslPredefinedPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewaySslPredefinedPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.get_ssl_predefined_policy.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'predefinedPolicyName': self._serialize.url("predefined_policy_name", predefined_policy_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGatewaySslPredefinedPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_ssl_predefined_policy.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default/predefinedPolicies/{predefinedPolicyName}'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_08_01/operations/_application_gateways_operations.py | Python | mit | 73,707 |
#!/usr/bin/env python3
import unittest
from tests import testfunctions
from dftintegrate.fourier import vaspdata
class TestExtractingVASPDataToDatFiles(unittest.TestCase,
testfunctions.TestFunctions):
def setUp(self):
print('Testing extracting VASP data to .dat files...')
self.cases = [str(x) for x in range(1, 3)]
self.root = './tests/fourier/extractvaspdata/'
def test_runtestcases(self):
for case in self.cases:
print(' Testing case '+case+'...')
vaspdata.VASPData(self.root+'tocheck/test'+case)
kpts_eigenvals_ans = self.readfile(case, 'answer',
'kpts_eigenvals')
kpts_eigenvals_tocheck = self.readfile(case, 'tocheck',
'kpts_eigenvals')
self.assertEqual(kpts_eigenvals_ans, kpts_eigenvals_tocheck,
msg='kpts_eigenvals case '+case)
symops_trans_ans = self.readfile(case, 'answer',
'symops_trans')
symops_trans_tocheck = self.readfile(case, 'tocheck',
'symops_trans')
self.assertEqual(symops_trans_ans, symops_trans_tocheck,
msg='symops_trans case '+case)
kmax_ans = self.readfile(case, 'answer', 'kmax')
kmax_tocheck = self.readfile(case, 'tocheck', 'kmax')
self.assertEqual(kmax_ans, kmax_tocheck, msg='kmax case '+case)
| mmb90/dftintegrate | tests/fourier/extractvaspdata/test_extractvaspdata.py | Python | mit | 1,597 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class AvailableEndpointServicesOperations:
"""AvailableEndpointServicesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
location: str,
**kwargs: Any
) -> AsyncIterable["_models.EndpointServicesListResult"]:
"""List what values of endpoint services are available for use.
:param location: The location to check available endpoint services.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either EndpointServicesListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_06_01.models.EndpointServicesListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.EndpointServicesListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('EndpointServicesListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/virtualNetworkAvailableEndpointServices'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/aio/operations/_available_endpoint_services_operations.py | Python | mit | 5,328 |
def capitalize(string):
for s in string.split(" "):
string = string.replace(s, s.capitalize())
return string
if __name__ == '__main__':
string = input()
capitalized_string = capitalize(string)
print(capitalized_string)
| MrinmoiHossain/HackerRank | Python/Strings/Capitalize!.py | Python | mit | 259 |
# -*- coding: utf-8 -*-
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, as long as
# any reuse or further development of the software attributes the
# National Geospatial-Intelligence Agency (NGA) authorship as follows:
# 'This software (gamification-server)
# is provided to the public as a courtesy of the National
# Geospatial-Intelligence Agency.
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import json
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.utils.datastructures import SortedDict
from django.db.models.signals import post_save
from django.db import models
from gamification.badges.models import ProjectBadge, ProjectBadgeToUser
from jsonfield import JSONField
from mptt.models import MPTTModel, TreeForeignKey
TRUE_FALSE = [(0, 'False'), (1, 'True')]
class ProjectBase(models.Model):
"""
A generic model for GeoQ objects.
"""
active = models.BooleanField(default=True, help_text='If checked, this project will be listed in the active list.')
created_at = models.DateTimeField(auto_now_add=True)
name = models.CharField(max_length=200, help_text='Name of the project.')
description = models.TextField(help_text='Details of this project that will be listed on the viewing page.')
updated_at = models.DateTimeField(auto_now=True)
url = models.TextField(help_text='Project Information URL', null=True)
def __unicode__(self):
return self.name
class Meta:
abstract = True
ordering = ('-created_at',)
class Team(MPTTModel):
name = models.CharField(max_length=50)
description = models.TextField(null=True, blank=True)
members = models.ManyToManyField(User, null=True, blank=True)
parent = TreeForeignKey('self', null=True, blank=True, related_name='children')
order = models.IntegerField(default=0, null=True, blank=True, help_text='Optionally specify the order teams should appear. Lower numbers appear sooner. By default, teams appear in the order they were created.')
date_created = models.DateTimeField(auto_now_add=True)
background_color = models.CharField(max_length=50, null=True, blank=True, help_text='Optional - Color to use for background of all team badges')
icon = models.ImageField(upload_to='badge_images', null=True, blank=True, help_text='Optional - Image to show next to team names')
def __str__(self):
return "%s (%s)" % (self.name, str(len(self.members.all())))
def get_all_users(self, include_self=True):
u = []
if include_self:
u += self.members.all()
for team in self.get_descendants():
u += team.members.all()
return u
class Meta:
ordering = ['-order', '-date_created', 'id']
class MPTTMeta:
order_insertion_by = ['name']
class Project(ProjectBase):
"""
Top-level organizational object.
"""
THEMES = (
("", "None"),
("camping", "Camping"),
("camping2", "Camping Theme 2"),
("map", "Geospatial"),
)
private = models.BooleanField(default=False, help_text='If checked, hide this project from the list of projects and public badge APIs.')
supervisors = models.ManyToManyField(User, blank=True, null=True, related_name="supervisors", help_text='Anyone other than site administrators that can add badges and update the site')
teams = models.ManyToManyField(Team, blank=True, null=True)
viewing_pass_phrase = models.CharField(max_length=200, null=True, blank=True, help_text='Phrase that must be entered to view this page.')
project_closing_date = models.DateTimeField(null=True, blank=True, help_text='Date that project "closes" with countdown shown on project page. Badges can still be added after this.')
visual_theme = models.CharField(max_length=20, default="none", choices=THEMES, help_text='Visual Theme used to style the project page')
background_image = models.ImageField(upload_to='badge_images', null=True, blank=True, help_text='Optional - Override theme background with this image')
properties = JSONField(null=True, blank=True, help_text='JSON key/value pairs associated with this object, e.g. {"badges_mode":"blue"}')
query_token = models.CharField(max_length=200, null=True, blank=True, help_text='Token that must be entered by any server requesting data - not implemented yet.')
allowed_api_hosts = models.TextField(null=True, blank=True, help_text='Comma-separated list of hosts (IPs or Hostnames) that can access this project via data requests - not implemented yet')
@property
def user_count(self):
return User.objects.filter(projectbadgetouser__projectbadge__project=self).distinct().count()
@property
def badge_count(self):
return ProjectBadgeToUser.objects.filter(projectbadge__project=self).count()
def get_absolute_url(self):
return reverse('project-list', args=[self.name])
class Points(models.Model):
user = models.ForeignKey(User)
projectbadge = models.ForeignKey(ProjectBadge)
value = models.IntegerField(default=0)
date_awarded = models.DateTimeField('date awarded',auto_now=True)
description = models.CharField(max_length=200)
def get_absolute_url(self):
return reverse('points-list', args=[self.id])
class Meta:
verbose_name_plural = "Points"
class UserProfile(models.Model):
""" from http://stackoverflow.com/questions/44109/extending-the-user-model-with-custom-fields-in-django; this is one mechanism for adding extra details (currently score for badges) to the User model """
defaultScore = 1
user = models.OneToOneField(User)
score = models.IntegerField(default=defaultScore)
def __str__(self):
return "%s's profile" % self.user
def create_user_profile(sender, instance, created, **kwargs):
if created:
profile, created = UserProfile.objects.get_or_create(user=instance)
post_save.connect(create_user_profile, sender=User)
import sys
if not 'syncdb' in sys.argv[1:2] and not 'migrate' in sys.argv[1:2]:
from meta_badges import * | ngageoint/gamification-server | gamification/core/models.py | Python | mit | 7,022 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
# (r'^statinfo/$', 'appname.views.stat_info'),
(r'^login/$', 'django.contrib.auth.views.login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page' : '/login'}),
(r'^menu/$', 'mypublisher.views.mainmenu'),
(r'^cmanager/$', 'mypublisher.views.content_manager'),
(r'^cmanager/upload$', 'mypublisher.views.render_upload_content'),
(r'^cmanager/upload/save$', 'mypublisher.views.save_content'),
) | laxmikantG/mypublisher | mypublisher/urls.py | Python | mit | 629 |
#!/usr/bin/env python
# encoding: utf-8
"""
__init__.py
The MIT License (MIT)
Copyright (c) 2013 Matt Ryan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import sys
import os
import stomp
import json
import afutils.file_pattern as pattern
from aflib3 import AFLibraryEntry
class AFMQ:
'''Represents a basic connection to an ActiveMQ
service for AudioFile.
'''
def __init__(self, queue_name):
self.queue_name = queue_name
self.queue_handle = stomp.Connection()
self.queue_handle.start()
self.queue_handle.connect()
self.queue_handle.subscribe(destination=queue_name, ack='auto')
def __del__(self):
self.queue_handle.disconnect()
def put(self, msg):
self.queue_handle.send(msg, destination=self.queue_name)
class BasicHandler:
'''Represents an ActiveMQ handler that consumes information
from the queue.
'''
def __init__(self, aflib, queue_name):
self.aflib = aflib
self.queue_name = queue_name
self.queue_handle = stomp.Connection()
self.queue_handle.set_listener(queue_name, self)
self.queue_handle.start()
self.queue_handle.connect()
self.queue_handle.subscribe(destination=queue_name, ack='auto')
def __del__(self):
self.queue_handle.stop()
def on_error(self, headers, message):
print '%s: Received an error: "%s"' % (self.__class__, message)
def on_message(self, headers, message):
print '%s: Received message: "%s"' % (self.__class__, message)
class AddFileHandler(BasicHandler):
'''Adds files to the AudioFile library as the files
are posted into a queue.
'''
def __init__(self, aflib):
BasicHandler.__init__(self, aflib, '/audiofile/library_additions')
def on_message(self, headers, message):
BasicHandler.on_message(self, headers, message)
args = json.loads(message)
self.aflib.add_mp3(args[0], args[1])
class RenameFileHandler(BasicHandler):
'''Renames files from the old path to the new specified
path as the information is put into a queue.
'''
def __init__(self, aflib):
BasicHandler.__init__(self, aflib, '/audiofile/file_renames')
def on_message(self, headers, message):
BasicHandler.on_message(self, headers, message)
args = json.loads(message)
song = AFLibraryEntry()
song.apply_dict(args[0])
newpath = pattern.get_new_path(song, args[1])
print 'Renaming "%s" as "%s"...' % (song.path, newpath)
os.rename(song.path, newpath)
if __name__ == '__main__':
pass
| mattvryan/audiofile | afmq/__init__.py | Python | mit | 3,351 |
import pytest # noqa
import python_jsonschema_objects as pjo
def test_regression_156(markdown_examples):
builder = pjo.ObjectBuilder(
markdown_examples["MultipleObjects"], resolved=markdown_examples
)
classes = builder.build_classes(named_only=True)
er = classes.ErrorResponse(message="Danger!", status=99)
vgr = classes.VersionGetResponse(local=False, version="1.2.3")
# round-trip serialize-deserialize into named classes
classes.ErrorResponse.from_json(er.serialize())
classes.VersionGetResponse.from_json(vgr.serialize())
# round-trip serialize-deserialize into class defined with `oneOf`
classes.Multipleobjects.from_json(er.serialize())
classes.Multipleobjects.from_json(vgr.serialize())
def test_toplevel_oneof_gets_a_name(markdown_examples):
builder = pjo.ObjectBuilder(
markdown_examples["MultipleObjects"], resolved=markdown_examples
)
classes = builder.build_classes(named_only=True)
assert classes.Multipleobjects.__title__ is not None
| cwacek/python-jsonschema-objects | test/test_regression_156.py | Python | mit | 1,032 |
"""
virtstrap.project
-----------------
This module contains all the abstractions for dealing with a Project.
Using this object simplifies creating commands that are used to manage
the project.
"""
import os
import sys
from virtstrap import constants
from virtstrap.config import VirtstrapConfig
from virtstrap.utils import call_subprocess
VIRTSTRAP_DIR = constants.VIRTSTRAP_DIR
class Project(object):
@classmethod
def load(cls, options):
"""Creates a project and loads it's configuration immediately"""
project = cls()
project.load_settings(options)
return project
def __init__(self):
self._options = None
self._config = None
def load_settings(self, options):
# Check if project directory is specified
project_dir = getattr(options, 'project_dir', None)
if not project_dir:
project_dir = self._find_project_dir()
project_dir = os.path.abspath(project_dir)
self._project_dir = project_dir
config_file = os.path.join(project_dir, options.config_file)
config = VirtstrapConfig.from_file(config_file,
profiles=options.profiles)
processor = ProjectNameProcessor(project_dir)
project_name = config.process_section('project_name', processor)
self._project_name = project_name
self._config = config
self._config_file = config_file
self._options = options
def _find_project_dir(self):
return find_project_dir()
@property
def name(self):
return self._project_name
@property
def config_file(self):
if not os.path.isfile(self._config_file):
return None
return self._config_file
def set_options(self, options):
self._options = options
def path(self, *paths):
"""Create a path relative to the project"""
return os.path.join(self._project_dir, *paths)
def env_path(self, *paths):
"""Create a path relative to the virtstrap-dir"""
return os.path.join(self._project_dir,
self._options.virtstrap_dir, *paths)
def bin_path(self, *paths):
"""Create a path relative to the virtstrap-dir's bin directory"""
bin_py = 'bin'
if sys.platform == 'win32':
bin_py = 'Scripts'
return self.env_path(bin_py, *paths)
def process_config_section(self, section, processor):
return self._config.process_section(section, processor)
def config(self, section):
"""Grabs processed section data"""
return self._config.processed(section)
def call_bin(self, command_name, args, **options):
command = [self.bin_path(command_name)]
command.extend(args)
return call_subprocess(command, **options)
class NoProjectFound(Exception):
pass
def find_project_dir(current_dir=None):
"""Finds the project directory for the current directory"""
current_dir = current_dir or os.path.abspath(os.curdir)
if VIRTSTRAP_DIR in os.listdir(current_dir):
vs_dir = os.path.join(current_dir, VIRTSTRAP_DIR)
if os.path.islink(vs_dir) or os.path.isdir(vs_dir):
return current_dir
parent_dir = os.path.abspath(os.path.join(current_dir, os.pardir))
if parent_dir == current_dir:
raise NoProjectFound('No project found')
return find_project_dir(parent_dir)
class ProjectNameProcessor(object):
def __init__(self, project_dir):
self._project_dir = os.path.abspath(project_dir)
def __call__(self, project_name):
return project_name or os.path.basename(self._project_dir)
| ravenac95/virtstrap | virtstrap-core/virtstrap/project.py | Python | mit | 3,644 |
"""Module containing kernel related classes"""
__author__ = 'wittawat'
from abc import ABCMeta, abstractmethod
import numpy as np
import scipy.signal as sig
class Kernel(object):
"""Abstract class for kernels"""
__metaclass__ = ABCMeta
@abstractmethod
def eval(self, X1, X2):
"""Evalute the kernel on data X1 and X2 """
pass
@abstractmethod
def pair_eval(self, X, Y):
"""Evaluate k(x1, y1), k(x2, y2), ..."""
pass
class KHoPoly(Kernel):
"""Homogeneous polynomial kernel of the form
(x.dot(y))**d
"""
def __init__(self, degree):
assert degree > 0
self.degree = degree
def eval(self, X1, X2):
return X1.dot(X2.T)**self.degree
def pair_eval(self, X, Y):
return np.sum(X1*X2, 1)**self.degree
def __str__(self):
return 'KHoPoly(d=%d)'%self.degree
class KLinear(Kernel):
def eval(self, X1, X2):
return X1.dot(X2.T)
def pair_eval(self, X, Y):
return np.sum(X*Y, 1)
def __str__(self):
return "KLinear()"
class KGauss(Kernel):
def __init__(self, sigma2):
assert sigma2 > 0, 'sigma2 must be > 0. Was %s'%str(sigma2)
self.sigma2 = sigma2
def eval(self, X1, X2):
"""
Evaluate the Gaussian kernel on the two 2d numpy arrays.
Parameters
----------
X1 : n1 x d numpy array
X2 : n2 x d numpy array
Return
------
K : a n1 x n2 Gram matrix.
"""
(n1, d1) = X1.shape
(n2, d2) = X2.shape
assert d1==d2, 'Dimensions of the two inputs must be the same'
D2 = np.sum(X1**2, 1)[:, np.newaxis] - 2*X1.dot(X2.T) + np.sum(X2**2, 1)
K = np.exp(-D2/self.sigma2)
return K
def pair_eval(self, X, Y):
"""
Evaluate k(x1, y1), k(x2, y2), ...
Parameters
----------
X, Y : n x d numpy array
Return
-------
a numpy array with length n
"""
(n1, d1) = X.shape
(n2, d2) = Y.shape
assert n1==n2, 'Two inputs must have the same number of instances'
assert d1==d2, 'Two inputs must have the same dimension'
D2 = np.sum( (X-Y)**2, 1)
Kvec = np.exp(-D2/self.sigma2)
return Kvec
def __str__(self):
return "KGauss(%.3f)"%self.sigma2
class KTriangle(Kernel):
"""
A triangular kernel defined on 1D. k(x, y) = B_1((x-y)/width) where B_1 is the
B-spline function of order 1 (i.e., triangular function).
"""
def __init__(self, width):
assert width > 0, 'width must be > 0'
self.width = width
def eval(self, X1, X2):
"""
Evaluate the triangular kernel on the two 2d numpy arrays.
Parameters
----------
X1 : n1 x 1 numpy array
X2 : n2 x 1 numpy array
Return
------
K : a n1 x n2 Gram matrix.
"""
(n1, d1) = X1.shape
(n2, d2) = X2.shape
assert d1==1, 'd1 must be 1'
assert d2==1, 'd2 must be 1'
diff = (X1-X2.T)/self.width
K = sig.bspline( diff , 1)
return K
def pair_eval(self, X, Y):
"""
Evaluate k(x1, y1), k(x2, y2), ...
Parameters
----------
X, Y : n x 1 numpy array
Return
-------
a numpy array with length n
"""
(n1, d1) = X.shape
(n2, d2) = Y.shape
assert d1==1, 'd1 must be 1'
assert d2==1, 'd2 must be 1'
diff = (X-Y)/self.width
Kvec = sig.bspline( diff , 1)
return Kvec
def __str__(self):
return "KTriangle(w=%.3f)"%self.width
| wittawatj/fsic-test | fsic/kernel.py | Python | mit | 3,695 |
from core.himesis import Himesis, HimesisPreConditionPatternLHS
import uuid
class HSS2_then_IsolatedLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HSS2_then_IsolatedLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HSS2_then_IsolatedLHS, self).__init__(name='HSS2_then_IsolatedLHS', num_nodes=0, edges=[])
# Add the edges
self.add_edges([])
# Set the graph attributes
self["mm__"] = ['MT_pre__FamiliesToPersonsMM', 'MoTifRule']
self["MT_constraint__"] = """#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
"""
self["name"] = """"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'SS2_then')
# Set the node attributes
# Add the attribute equations
self["equations"] = []
def constraint(self, PreNode, graph):
"""
Executable constraint code.
@param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
| levilucio/SyVOLT | UMLRT2Kiltera_MM/Properties/from_thesis/HSS2_then_IsolatedLHS.py | Python | mit | 2,488 |
#!/usr/bin/env python
import sys, re
output = open(sys.argv[1])
output = output.read()
output = re.split('thisisalinebreak =',output)
f = open(sys.argv[2])
i = 1
matlab = False
for line in f:
if line == "<?ml\n":
matlab = True
j = 0
for oline in output[i].split('\n'):
if (j > 2) & (re.match('^(\s+[^\s]+|[^=]+)$',oline) != None):
if oline.strip() != '':
print oline
j += 1
i += 1
if line == "?>\n":
matlab = False
continue
if not matlab:
print line,
| newhouseb/MatTex | mattex.py | Python | mit | 582 |
#
# Copyright (c) 2017-2021 w-gao
#
import argparse
import logging
import struct
import sys
import socket
from contextlib import contextmanager
from random import randint
from typing import Generator
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.WARNING)
# constants
MC_QUERY_MAGIC = b'\xFE\xFD'
MC_QUERY_HANDSHAKE = b'\x09'
MC_QUERY_STATISTICS = b'\x00'
class QueryNetworkError(Exception):
"""
Exception thrown when the socket connection fails.
"""
pass
class QueryFormatError(Exception):
"""
Exception thrown when the data returned from the server is malformed.
"""
def __init__(self, raw_data=None):
if raw_data:
msg = f"Error parsing data: '{raw_data}'. Format has likely changed."
else:
msg = "Error parsing data from the target server. Format has likely changed."
super(QueryFormatError, self).__init__(msg)
class QueryServerData:
"""
An object encapsulating the data retrieved from a target Minecraft: Bedrock
edition server using the Query protocol. Note that not all servers provide
complete or accurate information, so any field could be empty.
"""
def __init__(self):
self.motd = None
self.hostname = None
self.game_type = None
self.game_id = None
self.version = None
self.server_engine = None
self.plugins = []
self.map = None
self.num_players = -1
self.max_players = -1
self.whitelist = None
self.host_ip = None
self.host_port = None
self.players = []
def __str__(self):
return "{}({})".format(self.__class__.__name__, ', '.join(f"{k}={repr(v)}" for k, v in self.__dict__.items()))
@contextmanager
def mcquery(host: str, port: int = 19132, timeout: int = 5) -> Generator[QueryServerData, None, None]:
"""
A context manager to make a socket connection to the target host and port,
then initiates the query protocol sequence to request information about the
server. The socket connection is automatically closed when the context
manager exits.
"""
soc = None
try:
logger.debug(f"Connecting to {host}:{port}...")
soc = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
soc.settimeout(timeout)
soc.connect((host, port))
# Magic + packetType + sessionId + payload
handshake = MC_QUERY_MAGIC + MC_QUERY_HANDSHAKE + struct.pack('>l', randint(1, 9999999))
logger.debug("Sending handshake...")
soc.send(handshake)
token = soc.recv(65535)[5:-1].decode()
if token is not None:
payload = b'\x00\x00\x00\x00'
logger.debug("Requesting statistics...")
request_stat = MC_QUERY_MAGIC + MC_QUERY_STATISTICS + struct.pack('>l', randint(1, 9999999)) + struct.pack(
'>l', int(token)) + payload
soc.send(request_stat)
buff = str(soc.recv(65535)[5:])
if buff is not None:
logger.debug("Got data from server.")
logger.debug("Parsing data...")
yield _parse_data(buff)
return
raise QueryFormatError
except socket.error as msg:
raise QueryNetworkError(f"Failed to query: '{msg}'")
finally:
logger.debug("Closing connection...")
soc.close()
def _parse_data(raw_data: str) -> QueryServerData:
"""
Internal function for parsing the raw data from the target server into a
QueryServerData object.
"""
stats = QueryServerData()
server_data = raw_data.split(r'\x01')
if len(server_data) != 2:
raise QueryFormatError(raw_data)
server_data_1 = server_data[0].split(r'\x00')[2:-2]
server_data_2 = server_data[1].split(r'\x00')[2:-2] # player list
# trimmed server data
data = {}
for i in range(0, len(server_data_1), 2):
data[server_data_1[i]] = server_data_1[i + 1]
stats.hostname = data.get('hostname')
stats.game_type = data.get('gametype')
stats.game_id = data.get('game_id')
stats.version = data.get('version')
stats.server_engine = data.get('server_engine')
# plugins
plugins = []
for p in data.get('plugins', '').split(';'):
plugins.append(p)
stats.plugins = plugins
stats.map = data.get('map')
stats.num_players = int(data.get('numplayers', -1))
stats.max_players = int(data.get('maxplayers', -1))
stats.whitelist = data.get('whitelist')
stats.host_ip = data.get('hostip')
stats.host_port = int(data.get('hostport', -1))
players = []
for p in server_data_2:
players.append(p)
stats.players = players
return stats
def main(args=None):
parser = argparse.ArgumentParser(description="Query tool for Minecraft: Bedrock Edition servers.")
parser.add_argument("host", type=str, help="The host of the server.")
parser.add_argument("-p", "--port", type=int, default=19132, help="The port of the server.")
parser.add_argument("-t", "--timeout", type=int, default=5, help="The time limit of the socket connection.")
parser.add_argument("-d", "--debug", action='store_true', help="Enable debug logging.")
options = parser.parse_args(args)
if options.debug:
logging.basicConfig(level=logging.DEBUG)
host = options.host
port = options.port
timeout = options.timeout
try:
with mcquery(host, port=port, timeout=timeout) as data:
def key(k):
return k.capitalize().replace('_', ' ')
stdout: str = '\n'.join(f"{key(k)}: {v}" for k, v in data.__dict__.items())
print(stdout)
except Exception as e:
print(f"An error occurred during query: {e}")
if __name__ == "__main__":
main(sys.argv[1:])
| w-gao/py-mcpe-query | src/mcquery/query.py | Python | mit | 5,863 |
# -*- coding: utf-8 -*-
from converters.circle import circle
from converters.currency import currency
from converters.electric import electric
from converters.force import force
from converters.pressure import pressure
from converters.speed import speed
from converters.temperature import temperature
class UnitsManager(object):
'''
Class responsible to manage the unit converters
of this application.
'''
_units = [
circle,
currency,
electric,
force,
pressure,
speed,
temperature,
]
def __iter__(self):
return (x for x in self._units)
def register(self, converter):
"""
Method that receives a new converter and adds it to
this manager.
Useful to add custom new methods without needing to edit
the core of this application.
"""
if converter is not None and callable(converter):
self._units.append(converter)
UNITS = UnitsManager()
| mattgd/UnitConverter | units/__init__.py | Python | mit | 1,001 |
""" Various global variables """
import os
PROG_NAME = "gBuilder"
PROG_VERSION = "2.0.0"
environ = {"os":"Windows",
"path":os.environ["GINI_HOME"]+"/",
"remotepath":"./",
"images":os.environ["GINI_HOME"]+"/share/gbuilder/images/",
"config":os.environ["GINI_HOME"]+"/etc/",
"sav":os.environ["GINI_HOME"]+"/sav/",
"tmp":os.environ["GINI_HOME"]+"/tmp/",
"doc":os.environ["GINI_HOME"]+"/doc/"}
options = {"names":True,
"systray":False,
"elasticMode":False, "keepElasticMode":False,
"smoothing":True, "glowingLights":True, "style":"Plastique",
"grid":True, "gridColor":"(240,240,240)",
"background":environ["images"] + "background.jpg",
"windowTheme":environ["images"] + "background2.jpg",
"baseTheme":environ["images"] + "background3.jpg",
"autorouting":True, "autogen":True, "autocompile":True,
"graphing":True, "username":"",
"server":"localhost", "session":"GINI", "autoconnect":True,
"localPort":"10001", "remotePort":"10000",
"restore":True}
mainWidgets = {"app":None,
"main":None,
"canvas":None,
"tab":None,
"popup":None,
"log":None,
"tm":None,
"properties":None,
"interfaces":None,
"routes":None,
"drop":None,
"client":None}
defaultOptions = {"palette":None}
| sciyoshi/gini | frontend/src/gbuilder/Core/globals.py | Python | mit | 1,542 |
# vim:ts=4:sts=4:sw=4:expandtab
from satori.client.common import want_import
want_import(globals(), '*')
from satori.web.utils.decorators import contest_view
from django import forms
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from satori.web.utils.forms import SatoriDateTimeField,RenderObjectButton
from satori.web.utils.tables import *
@contest_view
def view(request, page_info):
contest = page_info.contest
is_admin = page_info.contest_is_admin
all_contestants = Web.get_accepted_contestants(contest=contest,offset=0,limit=100000).contestants
all_problems = [p.problem_mapping for p in Web.get_problem_mapping_list(contest=contest)]
all_problems.sort(key=lambda p : p.code)
class ExtendForm(forms.Form):
contestant_choices = [[str(p.id), p.name] for p in all_contestants]
problem_choices = [['all','All problems']]+[[p.id, p.code+': '+p.title] for p in all_problems]
contestant = forms.ChoiceField(choices=contestant_choices,required=True)
problem = forms.ChoiceField(choices=problem_choices,required=True)
start = SatoriDateTimeField(required=False)
finish = SatoriDateTimeField(required=False)
class ExtendTable(ResultTable):
def default_limit(self):
return 20
def __init__(self,req,prefix=''):
super(ExtendTable,self).__init__(req=req,prefix=prefix,default_sort=2,default_desc=True)
privs = {}
for m in all_problems:
k = Privilege.list(entity=m,right='SUBMIT')
for r,t in Privilege.list(entity=m,right='SUBMIT').items():
if t.start_on:
start = str(t.start_on)
else:
start = '-infty'
if t.finish_on:
finish = str(t.finish_on)
else:
finish = 'infty'
times = start+'/'+finish
try:
c = Contestant(r.id)
n = c.name
if not privs.has_key(c):
privs[c] = {}
if not privs[c].has_key(times):
privs[c][times] = m.code
else:
privs[c][times] += ','+m.code
except:
pass
self.results = []
for c,key in privs.items():
s = ""
for times,codes in key.items():
if s!="":
s+=", "
s += codes+': '+times
self.results.append([c,s])
self.total = len(self.results)
self.fields.append(TableField(name='Contestant',value=(lambda table,i : table.results[i][0].name),id=1))
self.fields.append(TableField(name='Status',value=(lambda table,i : table.results[i][1]),id=2))
self.fields.append(TableField(name='',value='Revoke',render=(lambda table,i : RenderObjectButton(name='revoke',buttonname='Revoke',id=table.results[i][0].id,css='button button_small')),id=3))
if request.method == "POST":
if 'revoke' in request.POST.keys():
c = Contestant(int(request.POST['id']))
for p in all_problems:
Privilege.revoke(role=c,entity=p,right='SUBMIT')
return HttpResponseRedirect(reverse('extends',args=[page_info.contest.id]))
form = ExtendForm(request.POST)
if form.is_valid():
data = form.cleaned_data
c = Contestant(int(data['contestant']))
start_on = data.get('start',None)
finish_on = data.get('finish',None)
if data['problem']=='all':
for p in all_problems:
Privilege.grant(role=c,entity=p,right='SUBMIT',times=PrivilegeTimes(start_on=start_on,finish_on=finish_on))
else:
p = ProblemMapping(int(data['problem']))
Privilege.grant(role=c,entity=p,right='SUBMIT',times=PrivilegeTimes(start_on=start_on,finish_on=finish_on))
return HttpResponseRedirect(reverse('extends',args=[page_info.contest.id]))
else:
form = ExtendForm()
extends = ExtendTable(req=request.GET,prefix='extend')
return render_to_response('extends.html', {'page_info' : page_info, 'form' : form, 'extends' : extends})
| zielmicha/satori | satori.web/satori/web/views/contest/extends.py | Python | mit | 4,539 |
"""create_table
Revision ID: 42f4a0f4afd9
Revises: None
Create Date: 2014-11-15 16:53:22.716676
"""
# revision identifiers, used by Alembic.
revision = '42f4a0f4afd9'
down_revision = None
from alembic import op
import sqlalchemy as sa
#from guokr.platform.sqlalchemy.types import JSONType
def upgrade():
op.create_table(u'board',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('name', sa.Unicode(256), nullable=False),
sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
op.create_table(u'round',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('board_id', sa.Integer(),
sa.ForeignKey('board.id'), nullable=True),
sa.Column('num', sa.Integer(), nullable=False),
sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
op.create_table(u'match',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('round_id', sa.Integer(),
sa.ForeignKey('round.id'), nullable=True),
sa.Column('place', sa.Unicode(1024), nullable=False),
sa.Column('introduction', sa.UnicodeText(), nullable=False),
sa.Column('date_started', sa.DateTime(timezone=True),
nullable=False, index=True),
sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
op.create_table(u'team',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('name', sa.Unicode(256), nullable=False),
sa.Column('introduction', sa.UnicodeText(), nullable=False),
sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
op.create_table(u'match_player',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('match_id', sa.Integer(),
sa.ForeignKey('match.id'), nullable=True),
sa.Column('team_id', sa.Integer(),
sa.ForeignKey('team.id'), nullable=True),
sa.Column('score', sa.Integer(), server_default='0', nullable=True),
sa.Column('is_home', sa.Boolean(), nullable=False,
server_default=sa.sql.false()),
# sa.Column('info', JSONType()),
sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
def downgrade():
op.drop_table(u'team')
op.drop_table(u'match_player')
op.drop_table(u'match')
op.drop_table(u'round')
op.drop_table(u'board')
| qisanstudio/qsapp-score | src/score/migration/versions/42f4a0f4afd9_create_table.py | Python | mit | 3,217 |
#!/usr/bin/env python2
# -*- coding: UTF-8 -*-
import argparse
import os
import re
import subprocess
import sys
import traceback
import json
def main():
parser = argparse.ArgumentParser(description='Test discovery for Zabbix',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--discovery", dest="discovery", action='store_true', help="Режим обнаружения")
parser.add_argument('integers', metavar='N', type=int, nargs='*',
help='an integer for the accumulator')
args = parser.parse_args()
if args.discovery:
data = [
{"{#T1}": "1","{#T2}": "1"},
{"{#T1}": "1","{#T2}": "2"},
{"{#T1}": "2","{#T2}": "1"},
{"{#T1}": "2","{#T2}": "2"}
]
result = json.dumps({"data": data})
print result
else:
print str(args.integers[0] + args.integers[1])
if __name__ == "__main__":
try:
main()
except Exception, ex:
traceback.print_exc(file=sys.stdout)
exit(1)
exit(0)
| RemiZOffAlex/specialistoff.net | Zabbix/scripts/test.py | Python | mit | 1,067 |
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
from sqlalchemy import testing
from sqlalchemy.testing import engines
from sqlalchemy.testing.schema import Table, Column
from test.orm import _fixtures
from sqlalchemy import exc
from sqlalchemy.testing import fixtures
from sqlalchemy import Integer, String, ForeignKey, func
from sqlalchemy.orm import mapper, relationship, backref, \
create_session, unitofwork, attributes,\
Session, class_mapper, sync, exc as orm_exc
from sqlalchemy.testing.assertsql import AllOf, CompiledSQL, Or
class AssertsUOW(object):
def _get_test_uow(self, session):
uow = unitofwork.UOWTransaction(session)
deleted = set(session._deleted)
new = set(session._new)
dirty = set(session._dirty_states).difference(deleted)
for s in new.union(dirty):
uow.register_object(s)
for d in deleted:
uow.register_object(d, isdelete=True)
return uow
def _assert_uow_size(self, session, expected ):
uow = self._get_test_uow(session)
postsort_actions = uow._generate_actions()
print(postsort_actions)
eq_(len(postsort_actions), expected, postsort_actions)
class UOWTest(_fixtures.FixtureTest,
testing.AssertsExecutionResults, AssertsUOW):
run_inserts = None
class RudimentaryFlushTest(UOWTest):
def test_one_to_many_save(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(Address),
})
mapper(Address, addresses)
sess = create_session()
a1, a2 = Address(email_address='a1'), Address(email_address='a2')
u1 = User(name='u1', addresses=[a1, a2])
sess.add(u1)
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"INSERT INTO users (name) VALUES (:name)",
{'name': 'u1'}
),
CompiledSQL(
"INSERT INTO addresses (user_id, email_address) "
"VALUES (:user_id, :email_address)",
lambda ctx: {'email_address': 'a1', 'user_id':u1.id}
),
CompiledSQL(
"INSERT INTO addresses (user_id, email_address) "
"VALUES (:user_id, :email_address)",
lambda ctx: {'email_address': 'a2', 'user_id':u1.id}
),
)
def test_one_to_many_delete_all(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(Address),
})
mapper(Address, addresses)
sess = create_session()
a1, a2 = Address(email_address='a1'), Address(email_address='a2')
u1 = User(name='u1', addresses=[a1, a2])
sess.add(u1)
sess.flush()
sess.delete(u1)
sess.delete(a1)
sess.delete(a2)
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"DELETE FROM addresses WHERE addresses.id = :id",
[{'id':a1.id},{'id':a2.id}]
),
CompiledSQL(
"DELETE FROM users WHERE users.id = :id",
{'id':u1.id}
),
)
def test_one_to_many_delete_parent(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(Address),
})
mapper(Address, addresses)
sess = create_session()
a1, a2 = Address(email_address='a1'), Address(email_address='a2')
u1 = User(name='u1', addresses=[a1, a2])
sess.add(u1)
sess.flush()
sess.delete(u1)
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"UPDATE addresses SET user_id=:user_id WHERE "
"addresses.id = :addresses_id",
lambda ctx: [{'addresses_id': a1.id, 'user_id': None}]
),
CompiledSQL(
"UPDATE addresses SET user_id=:user_id WHERE "
"addresses.id = :addresses_id",
lambda ctx: [{'addresses_id': a2.id, 'user_id': None}]
),
CompiledSQL(
"DELETE FROM users WHERE users.id = :id",
{'id':u1.id}
),
)
def test_many_to_one_save(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties={
'user':relationship(User)
})
sess = create_session()
u1 = User(name='u1')
a1, a2 = Address(email_address='a1', user=u1), \
Address(email_address='a2', user=u1)
sess.add_all([a1, a2])
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"INSERT INTO users (name) VALUES (:name)",
{'name': 'u1'}
),
CompiledSQL(
"INSERT INTO addresses (user_id, email_address) "
"VALUES (:user_id, :email_address)",
lambda ctx: {'email_address': 'a1', 'user_id':u1.id}
),
CompiledSQL(
"INSERT INTO addresses (user_id, email_address) "
"VALUES (:user_id, :email_address)",
lambda ctx: {'email_address': 'a2', 'user_id':u1.id}
),
)
def test_many_to_one_delete_all(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties={
'user':relationship(User)
})
sess = create_session()
u1 = User(name='u1')
a1, a2 = Address(email_address='a1', user=u1), \
Address(email_address='a2', user=u1)
sess.add_all([a1, a2])
sess.flush()
sess.delete(u1)
sess.delete(a1)
sess.delete(a2)
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"DELETE FROM addresses WHERE addresses.id = :id",
[{'id':a1.id},{'id':a2.id}]
),
CompiledSQL(
"DELETE FROM users WHERE users.id = :id",
{'id':u1.id}
),
)
def test_many_to_one_delete_target(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties={
'user':relationship(User)
})
sess = create_session()
u1 = User(name='u1')
a1, a2 = Address(email_address='a1', user=u1), \
Address(email_address='a2', user=u1)
sess.add_all([a1, a2])
sess.flush()
sess.delete(u1)
a1.user = a2.user = None
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"UPDATE addresses SET user_id=:user_id WHERE "
"addresses.id = :addresses_id",
lambda ctx: [{'addresses_id': a1.id, 'user_id': None}]
),
CompiledSQL(
"UPDATE addresses SET user_id=:user_id WHERE "
"addresses.id = :addresses_id",
lambda ctx: [{'addresses_id': a2.id, 'user_id': None}]
),
CompiledSQL(
"DELETE FROM users WHERE users.id = :id",
{'id':u1.id}
),
)
def test_many_to_one_delete_unloaded(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties={
'parent':relationship(User)
})
parent = User(name='p1')
c1, c2 = Address(email_address='c1', parent=parent), \
Address(email_address='c2', parent=parent)
session = Session()
session.add_all([c1, c2])
session.add(parent)
session.flush()
pid = parent.id
c1id = c1.id
c2id = c2.id
session.expire(parent)
session.expire(c1)
session.expire(c2)
session.delete(c1)
session.delete(c2)
session.delete(parent)
# testing that relationships
# are loaded even if all ids/references are
# expired
self.assert_sql_execution(
testing.db,
session.flush,
AllOf(
# [ticket:2002] - ensure the m2os are loaded.
# the selects here are in fact unexpiring
# each row - the m2o comes from the identity map.
# the User row might be handled before or the addresses
# are loaded so need to use AllOf
CompiledSQL(
"SELECT addresses.id AS addresses_id, addresses.user_id AS "
"addresses_user_id, addresses.email_address AS "
"addresses_email_address FROM addresses WHERE addresses.id = "
":param_1",
lambda ctx: {'param_1': c1id}
),
CompiledSQL(
"SELECT addresses.id AS addresses_id, addresses.user_id AS "
"addresses_user_id, addresses.email_address AS "
"addresses_email_address FROM addresses WHERE addresses.id = "
":param_1",
lambda ctx: {'param_1': c2id}
),
CompiledSQL(
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users WHERE users.id = :param_1",
lambda ctx: {'param_1': pid}
),
CompiledSQL(
"DELETE FROM addresses WHERE addresses.id = :id",
lambda ctx: [{'id': c1id}, {'id': c2id}]
),
CompiledSQL(
"DELETE FROM users WHERE users.id = :id",
lambda ctx: {'id': pid}
),
),
)
def test_many_to_one_delete_childonly_unloaded(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties={
'parent':relationship(User)
})
parent = User(name='p1')
c1, c2 = Address(email_address='c1', parent=parent), \
Address(email_address='c2', parent=parent)
session = Session()
session.add_all([c1, c2])
session.add(parent)
session.flush()
pid = parent.id
c1id = c1.id
c2id = c2.id
session.expire(c1)
session.expire(c2)
session.delete(c1)
session.delete(c2)
self.assert_sql_execution(
testing.db,
session.flush,
AllOf(
# [ticket:2049] - we aren't deleting User,
# relationship is simple m2o, no SELECT should be emitted for it.
CompiledSQL(
"SELECT addresses.id AS addresses_id, addresses.user_id AS "
"addresses_user_id, addresses.email_address AS "
"addresses_email_address FROM addresses WHERE addresses.id = "
":param_1",
lambda ctx: {'param_1': c1id}
),
CompiledSQL(
"SELECT addresses.id AS addresses_id, addresses.user_id AS "
"addresses_user_id, addresses.email_address AS "
"addresses_email_address FROM addresses WHERE addresses.id = "
":param_1",
lambda ctx: {'param_1': c2id}
),
),
CompiledSQL(
"DELETE FROM addresses WHERE addresses.id = :id",
lambda ctx: [{'id': c1id}, {'id': c2id}]
),
)
def test_many_to_one_delete_childonly_unloaded_expired(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties={
'parent':relationship(User)
})
parent = User(name='p1')
c1, c2 = Address(email_address='c1', parent=parent), \
Address(email_address='c2', parent=parent)
session = Session()
session.add_all([c1, c2])
session.add(parent)
session.flush()
pid = parent.id
c1id = c1.id
c2id = c2.id
session.expire(parent)
session.expire(c1)
session.expire(c2)
session.delete(c1)
session.delete(c2)
self.assert_sql_execution(
testing.db,
session.flush,
AllOf(
# the parent User is expired, so it gets loaded here.
CompiledSQL(
"SELECT addresses.id AS addresses_id, addresses.user_id AS "
"addresses_user_id, addresses.email_address AS "
"addresses_email_address FROM addresses WHERE addresses.id = "
":param_1",
lambda ctx: {'param_1': c1id}
),
CompiledSQL(
"SELECT addresses.id AS addresses_id, addresses.user_id AS "
"addresses_user_id, addresses.email_address AS "
"addresses_email_address FROM addresses WHERE addresses.id = "
":param_1",
lambda ctx: {'param_1': c2id}
),
),
CompiledSQL(
"DELETE FROM addresses WHERE addresses.id = :id",
lambda ctx: [{'id': c1id}, {'id': c2id}]
),
)
def test_natural_ordering(self):
"""test that unconnected items take relationship() into account regardless."""
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties={
'parent':relationship(User)
})
sess = create_session()
u1 = User(id=1, name='u1')
a1 = Address(id=1, user_id=1, email_address='a2')
sess.add_all([u1, a1])
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"INSERT INTO users (id, name) VALUES (:id, :name)",
{'id':1, 'name':'u1'}),
CompiledSQL(
"INSERT INTO addresses (id, user_id, email_address) "
"VALUES (:id, :user_id, :email_address)",
{'email_address': 'a2', 'user_id': 1, 'id': 1}
)
)
sess.delete(u1)
sess.delete(a1)
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"DELETE FROM addresses WHERE addresses.id = :id",
[{'id': 1}]
),
CompiledSQL(
"DELETE FROM users WHERE users.id = :id",
[{'id': 1}]
)
)
def test_natural_selfref(self):
"""test that unconnected items take relationship() into account regardless."""
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
'children':relationship(Node)
})
sess = create_session()
n1 = Node(id=1)
n2 = Node(id=2, parent_id=1)
n3 = Node(id=3, parent_id=2)
# insert order is determined from add order since they
# are the same class
sess.add_all([n1, n2, n3])
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"INSERT INTO nodes (id, parent_id, data) VALUES "
"(:id, :parent_id, :data)",
[{'parent_id': None, 'data': None, 'id': 1},
{'parent_id': 1, 'data': None, 'id': 2},
{'parent_id': 2, 'data': None, 'id': 3}]
),
)
def test_many_to_many(self):
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Item, items, properties={
'keywords':relationship(Keyword, secondary=item_keywords)
})
mapper(Keyword, keywords)
sess = create_session()
k1 = Keyword(name='k1')
i1 = Item(description='i1', keywords=[k1])
sess.add(i1)
self.assert_sql_execution(
testing.db,
sess.flush,
AllOf(
CompiledSQL(
"INSERT INTO keywords (name) VALUES (:name)",
{'name':'k1'}
),
CompiledSQL(
"INSERT INTO items (description) VALUES (:description)",
{'description':'i1'}
),
),
CompiledSQL(
"INSERT INTO item_keywords (item_id, keyword_id) "
"VALUES (:item_id, :keyword_id)",
lambda ctx:{'item_id':i1.id, 'keyword_id':k1.id}
)
)
# test that keywords collection isn't loaded
sess.expire(i1, ['keywords'])
i1.description = 'i2'
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL("UPDATE items SET description=:description "
"WHERE items.id = :items_id",
lambda ctx:{'description':'i2', 'items_id':i1.id})
)
def test_m2o_flush_size(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties={
'user':relationship(User, passive_updates=True)
})
sess = create_session()
u1 = User(name='ed')
sess.add(u1)
self._assert_uow_size(sess, 2)
def test_o2m_flush_size(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(Address),
})
mapper(Address, addresses)
sess = create_session()
u1 = User(name='ed')
sess.add(u1)
self._assert_uow_size(sess, 2)
sess.flush()
u1.name='jack'
self._assert_uow_size(sess, 2)
sess.flush()
a1 = Address(email_address='foo')
sess.add(a1)
sess.flush()
u1.addresses.append(a1)
self._assert_uow_size(sess, 6)
sess.flush()
sess = create_session()
u1 = sess.query(User).first()
u1.name='ed'
self._assert_uow_size(sess, 2)
u1.addresses
self._assert_uow_size(sess, 6)
class SingleCycleTest(UOWTest):
def teardown(self):
engines.testing_reaper.rollback_all()
# mysql can't handle delete from nodes
# since it doesn't deal with the FKs correctly,
# so wipe out the parent_id first
testing.db.execute(
self.tables.nodes.update().values(parent_id=None)
)
super(SingleCycleTest, self).teardown()
def test_one_to_many_save(self):
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
'children':relationship(Node)
})
sess = create_session()
n2, n3 = Node(data='n2'), Node(data='n3')
n1 = Node(data='n1', children=[n2, n3])
sess.add(n1)
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
"(:parent_id, :data)",
{'parent_id': None, 'data': 'n1'}
),
AllOf(
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
"(:parent_id, :data)",
lambda ctx: {'parent_id': n1.id, 'data': 'n2'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
"(:parent_id, :data)",
lambda ctx: {'parent_id': n1.id, 'data': 'n3'}
),
)
)
def test_one_to_many_delete_all(self):
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
'children':relationship(Node)
})
sess = create_session()
n2, n3 = Node(data='n2', children=[]), Node(data='n3', children=[])
n1 = Node(data='n1', children=[n2, n3])
sess.add(n1)
sess.flush()
sess.delete(n1)
sess.delete(n2)
sess.delete(n3)
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx:[{'id':n2.id}, {'id':n3.id}]),
CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx: {'id':n1.id})
)
def test_one_to_many_delete_parent(self):
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
'children':relationship(Node)
})
sess = create_session()
n2, n3 = Node(data='n2', children=[]), Node(data='n3', children=[])
n1 = Node(data='n1', children=[n2, n3])
sess.add(n1)
sess.flush()
sess.delete(n1)
self.assert_sql_execution(
testing.db,
sess.flush,
AllOf(
CompiledSQL("UPDATE nodes SET parent_id=:parent_id "
"WHERE nodes.id = :nodes_id",
lambda ctx: {'nodes_id':n3.id, 'parent_id':None}),
CompiledSQL("UPDATE nodes SET parent_id=:parent_id "
"WHERE nodes.id = :nodes_id",
lambda ctx: {'nodes_id':n2.id, 'parent_id':None}),
),
CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx:{'id':n1.id})
)
def test_many_to_one_save(self):
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
'parent':relationship(Node, remote_side=nodes.c.id)
})
sess = create_session()
n1 = Node(data='n1')
n2, n3 = Node(data='n2', parent=n1), Node(data='n3', parent=n1)
sess.add_all([n2, n3])
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
"(:parent_id, :data)",
{'parent_id': None, 'data': 'n1'}
),
AllOf(
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
"(:parent_id, :data)",
lambda ctx: {'parent_id': n1.id, 'data': 'n2'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
"(:parent_id, :data)",
lambda ctx: {'parent_id': n1.id, 'data': 'n3'}
),
)
)
def test_many_to_one_delete_all(self):
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
'parent':relationship(Node, remote_side=nodes.c.id)
})
sess = create_session()
n1 = Node(data='n1')
n2, n3 = Node(data='n2', parent=n1), Node(data='n3', parent=n1)
sess.add_all([n2, n3])
sess.flush()
sess.delete(n1)
sess.delete(n2)
sess.delete(n3)
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx:[{'id':n2.id},{'id':n3.id}]),
CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx: {'id':n1.id})
)
def test_many_to_one_set_null_unloaded(self):
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
'parent':relationship(Node, remote_side=nodes.c.id)
})
sess = create_session()
n1 = Node(data='n1')
n2 = Node(data='n2', parent=n1)
sess.add_all([n1, n2])
sess.flush()
sess.close()
n2 = sess.query(Node).filter_by(data='n2').one()
n2.parent = None
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"UPDATE nodes SET parent_id=:parent_id WHERE "
"nodes.id = :nodes_id",
lambda ctx: {"parent_id":None, "nodes_id":n2.id}
)
)
def test_cycle_rowswitch(self):
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
'children':relationship(Node)
})
sess = create_session()
n2, n3 = Node(data='n2', children=[]), Node(data='n3', children=[])
n1 = Node(data='n1', children=[n2])
sess.add(n1)
sess.flush()
sess.delete(n2)
n3.id = n2.id
n1.children.append(n3)
sess.flush()
def test_bidirectional_mutations_one(self):
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
'children':relationship(Node,
backref=backref('parent',
remote_side=nodes.c.id))
})
sess = create_session()
n2, n3 = Node(data='n2', children=[]), Node(data='n3', children=[])
n1 = Node(data='n1', children=[n2])
sess.add(n1)
sess.flush()
sess.delete(n2)
n1.children.append(n3)
sess.flush()
sess.delete(n1)
sess.delete(n3)
sess.flush()
def test_bidirectional_multilevel_save(self):
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
'children':relationship(Node,
backref=backref('parent', remote_side=nodes.c.id)
)
})
sess = create_session()
n1 = Node(data='n1')
n1.children.append(Node(data='n11'))
n12 = Node(data='n12')
n1.children.append(n12)
n1.children.append(Node(data='n13'))
n1.children[1].children.append(Node(data='n121'))
n1.children[1].children.append(Node(data='n122'))
n1.children[1].children.append(Node(data='n123'))
sess.add(n1)
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
"(:parent_id, :data)",
lambda ctx:{'parent_id':None, 'data':'n1'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
"(:parent_id, :data)",
lambda ctx:{'parent_id':n1.id, 'data':'n11'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
"(:parent_id, :data)",
lambda ctx:{'parent_id':n1.id, 'data':'n12'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
"(:parent_id, :data)",
lambda ctx:{'parent_id':n1.id, 'data':'n13'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
"(:parent_id, :data)",
lambda ctx:{'parent_id':n12.id, 'data':'n121'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
"(:parent_id, :data)",
lambda ctx:{'parent_id':n12.id, 'data':'n122'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
"(:parent_id, :data)",
lambda ctx:{'parent_id':n12.id, 'data':'n123'}
),
)
def test_singlecycle_flush_size(self):
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
'children':relationship(Node)
})
sess = create_session()
n1 = Node(data='ed')
sess.add(n1)
self._assert_uow_size(sess, 2)
sess.flush()
n1.data='jack'
self._assert_uow_size(sess, 2)
sess.flush()
n2 = Node(data='foo')
sess.add(n2)
sess.flush()
n1.children.append(n2)
self._assert_uow_size(sess, 3)
sess.flush()
sess = create_session()
n1 = sess.query(Node).first()
n1.data='ed'
self._assert_uow_size(sess, 2)
n1.children
self._assert_uow_size(sess, 2)
def test_delete_unloaded_m2o(self):
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
'parent':relationship(Node, remote_side=nodes.c.id)
})
parent = Node()
c1, c2 = Node(parent=parent), Node(parent=parent)
session = Session()
session.add_all([c1, c2])
session.add(parent)
session.flush()
pid = parent.id
c1id = c1.id
c2id = c2.id
session.expire(parent)
session.expire(c1)
session.expire(c2)
session.delete(c1)
session.delete(c2)
session.delete(parent)
# testing that relationships
# are loaded even if all ids/references are
# expired
self.assert_sql_execution(
testing.db,
session.flush,
AllOf(
# ensure all three m2os are loaded.
# the selects here are in fact unexpiring
# each row - the m2o comes from the identity map.
CompiledSQL(
"SELECT nodes.id AS nodes_id, nodes.parent_id AS "
"nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes "
"WHERE nodes.id = :param_1",
lambda ctx: {'param_1': pid}
),
CompiledSQL(
"SELECT nodes.id AS nodes_id, nodes.parent_id AS "
"nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes "
"WHERE nodes.id = :param_1",
lambda ctx: {'param_1': c1id}
),
CompiledSQL(
"SELECT nodes.id AS nodes_id, nodes.parent_id AS "
"nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes "
"WHERE nodes.id = :param_1",
lambda ctx: {'param_1': c2id}
),
AllOf(
CompiledSQL(
"DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx: [{'id': c1id}, {'id': c2id}]
),
CompiledSQL(
"DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx: {'id': pid}
),
),
),
)
class SingleCyclePlusAttributeTest(fixtures.MappedTest,
testing.AssertsExecutionResults, AssertsUOW):
@classmethod
def define_tables(cls, metadata):
Table('nodes', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('parent_id', Integer, ForeignKey('nodes.id')),
Column('data', String(30))
)
Table('foobars', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('parent_id', Integer, ForeignKey('nodes.id')),
)
def test_flush_size(self):
foobars, nodes = self.tables.foobars, self.tables.nodes
class Node(fixtures.ComparableEntity):
pass
class FooBar(fixtures.ComparableEntity):
pass
mapper(Node, nodes, properties={
'children':relationship(Node),
'foobars':relationship(FooBar)
})
mapper(FooBar, foobars)
sess = create_session()
n1 = Node(data='n1')
n2 = Node(data='n2')
n1.children.append(n2)
sess.add(n1)
# ensure "foobars" doesn't get yanked in here
self._assert_uow_size(sess, 3)
n1.foobars.append(FooBar())
# saveupdateall/deleteall for FooBar added here,
# plus processstate node.foobars
# currently the "all" procs stay in pairs
self._assert_uow_size(sess, 6)
sess.flush()
class SingleCycleM2MTest(fixtures.MappedTest,
testing.AssertsExecutionResults, AssertsUOW):
@classmethod
def define_tables(cls, metadata):
nodes = Table('nodes', metadata,
Column('id', Integer,
primary_key=True,
test_needs_autoincrement=True),
Column('data', String(30)),
Column('favorite_node_id', Integer, ForeignKey('nodes.id'))
)
node_to_nodes =Table('node_to_nodes', metadata,
Column('left_node_id', Integer,
ForeignKey('nodes.id'),primary_key=True),
Column('right_node_id', Integer,
ForeignKey('nodes.id'),primary_key=True),
)
def test_many_to_many_one(self):
nodes, node_to_nodes = self.tables.nodes, self.tables.node_to_nodes
class Node(fixtures.ComparableEntity):
pass
mapper(Node, nodes, properties={
'children':relationship(Node, secondary=node_to_nodes,
primaryjoin=nodes.c.id==node_to_nodes.c.left_node_id,
secondaryjoin=nodes.c.id==node_to_nodes.c.right_node_id,
backref='parents'
),
'favorite':relationship(Node, remote_side=nodes.c.id)
})
sess = create_session()
n1 = Node(data='n1')
n2 = Node(data='n2')
n3 = Node(data='n3')
n4 = Node(data='n4')
n5 = Node(data='n5')
n4.favorite = n3
n1.favorite = n5
n5.favorite = n2
n1.children = [n2, n3, n4]
n2.children = [n3, n5]
n3.children = [n5, n4]
sess.add_all([n1, n2, n3, n4, n5])
# can't really assert the SQL on this easily
# since there's too many ways to insert the rows.
# so check the end result
sess.flush()
eq_(
sess.query(node_to_nodes.c.left_node_id,
node_to_nodes.c.right_node_id).\
order_by(node_to_nodes.c.left_node_id,
node_to_nodes.c.right_node_id).\
all(),
sorted([
(n1.id, n2.id), (n1.id, n3.id), (n1.id, n4.id),
(n2.id, n3.id), (n2.id, n5.id),
(n3.id, n5.id), (n3.id, n4.id)
])
)
sess.delete(n1)
self.assert_sql_execution(
testing.db,
sess.flush,
# this is n1.parents firing off, as it should, since
# passive_deletes is False for n1.parents
CompiledSQL(
"SELECT nodes.id AS nodes_id, nodes.data AS nodes_data, "
"nodes.favorite_node_id AS nodes_favorite_node_id FROM "
"nodes, node_to_nodes WHERE :param_1 = "
"node_to_nodes.right_node_id AND nodes.id = "
"node_to_nodes.left_node_id" ,
lambda ctx:{'param_1': n1.id},
),
CompiledSQL(
"DELETE FROM node_to_nodes WHERE "
"node_to_nodes.left_node_id = :left_node_id AND "
"node_to_nodes.right_node_id = :right_node_id",
lambda ctx:[
{'right_node_id': n2.id, 'left_node_id': n1.id},
{'right_node_id': n3.id, 'left_node_id': n1.id},
{'right_node_id': n4.id, 'left_node_id': n1.id}
]
),
CompiledSQL(
"DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx:{'id': n1.id}
),
)
for n in [n2, n3, n4, n5]:
sess.delete(n)
# load these collections
# outside of the flush() below
n4.children
n5.children
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"DELETE FROM node_to_nodes WHERE node_to_nodes.left_node_id "
"= :left_node_id AND node_to_nodes.right_node_id = "
":right_node_id",
lambda ctx:[
{'right_node_id': n5.id, 'left_node_id': n3.id},
{'right_node_id': n4.id, 'left_node_id': n3.id},
{'right_node_id': n3.id, 'left_node_id': n2.id},
{'right_node_id': n5.id, 'left_node_id': n2.id}
]
),
CompiledSQL(
"DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx:[{'id': n4.id}, {'id': n5.id}]
),
CompiledSQL(
"DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx:[{'id': n2.id}, {'id': n3.id}]
),
)
class RowswitchAccountingTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('parent', metadata,
Column('id', Integer, primary_key=True),
Column('data', Integer)
)
Table('child', metadata,
Column('id', Integer, ForeignKey('parent.id'), primary_key=True),
Column('data', Integer)
)
def _fixture(self):
parent, child = self.tables.parent, self.tables.child
class Parent(fixtures.BasicEntity):
pass
class Child(fixtures.BasicEntity):
pass
mapper(Parent, parent, properties={
'child':relationship(Child, uselist=False,
cascade="all, delete-orphan",
backref="parent")
})
mapper(Child, child)
return Parent, Child
def test_switch_on_update(self):
Parent, Child = self._fixture()
sess = create_session(autocommit=False)
p1 = Parent(id=1, child=Child())
sess.add(p1)
sess.commit()
sess.close()
p2 = Parent(id=1, child=Child())
p3 = sess.merge(p2)
old = attributes.get_history(p3, 'child')[2][0]
assert old in sess
sess.flush()
assert p3.child._sa_instance_state.session_id == sess.hash_key
assert p3.child in sess
p4 = Parent(id=1, child=Child())
p5 = sess.merge(p4)
old = attributes.get_history(p5, 'child')[2][0]
assert old in sess
sess.flush()
def test_switch_on_delete(self):
Parent, Child = self._fixture()
sess = Session()
p1 = Parent(id=1, data=2, child=None)
sess.add(p1)
sess.flush()
p1.id = 5
sess.delete(p1)
eq_(p1.id, 5)
sess.flush()
eq_(sess.scalar(self.tables.parent.count()), 0)
class RowswitchM2OTest(fixtures.MappedTest):
# tests for #3060 and related issues
@classmethod
def define_tables(cls, metadata):
Table(
'a', metadata,
Column('id', Integer, primary_key=True),
)
Table(
'b', metadata,
Column('id', Integer, primary_key=True),
Column('aid', ForeignKey('a.id')),
Column('cid', ForeignKey('c.id')),
Column('data', String(50))
)
Table(
'c', metadata,
Column('id', Integer, primary_key=True),
)
def _fixture(self):
a, b, c = self.tables.a, self.tables.b, self.tables.c
class A(fixtures.BasicEntity):
pass
class B(fixtures.BasicEntity):
pass
class C(fixtures.BasicEntity):
pass
mapper(A, a, properties={
'bs': relationship(B, cascade="all, delete-orphan")
})
mapper(B, b, properties={
'c': relationship(C)
})
mapper(C, c)
return A, B, C
def test_set_none_replaces_m2o(self):
# we have to deal here with the fact that a
# get of an unset attribute implicitly sets it to None
# with no history. So while we'd like "b.x = None" to
# record that "None" was added and we can then actively set it,
# a simple read of "b.x" ruins that; we'd have to dramatically
# alter the semantics of get() such that it creates history, which
# would incur extra work within the flush process to deal with
# change that previously showed up as nothing.
A, B, C = self._fixture()
sess = Session()
sess.add(
A(id=1, bs=[B(id=1, c=C(id=1))])
)
sess.commit()
a1 = sess.query(A).first()
a1.bs = [B(id=1, c=None)]
sess.commit()
assert a1.bs[0].c is None
def test_set_none_w_get_replaces_m2o(self):
A, B, C = self._fixture()
sess = Session()
sess.add(
A(id=1, bs=[B(id=1, c=C(id=1))])
)
sess.commit()
a1 = sess.query(A).first()
b2 = B(id=1)
assert b2.c is None
b2.c = None
a1.bs = [b2]
sess.commit()
assert a1.bs[0].c is None
def test_set_none_replaces_scalar(self):
# this case worked before #3060, because a straight scalar
# set of None shows up. Howver, as test_set_none_w_get
# shows, we can't rely on this - the get of None will blow
# away the history.
A, B, C = self._fixture()
sess = Session()
sess.add(
A(id=1, bs=[B(id=1, data='somedata')])
)
sess.commit()
a1 = sess.query(A).first()
a1.bs = [B(id=1, data=None)]
sess.commit()
assert a1.bs[0].data is None
def test_set_none_w_get_replaces_scalar(self):
A, B, C = self._fixture()
sess = Session()
sess.add(
A(id=1, bs=[B(id=1, data='somedata')])
)
sess.commit()
a1 = sess.query(A).first()
b2 = B(id=1)
assert b2.data is None
b2.data = None
a1.bs = [b2]
sess.commit()
assert a1.bs[0].data is None
class BasicStaleChecksTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('parent', metadata,
Column('id', Integer, primary_key=True),
Column('data', Integer)
)
Table('child', metadata,
Column('id', Integer, ForeignKey('parent.id'), primary_key=True),
Column('data', Integer)
)
def _fixture(self, confirm_deleted_rows=True):
parent, child = self.tables.parent, self.tables.child
class Parent(fixtures.BasicEntity):
pass
class Child(fixtures.BasicEntity):
pass
mapper(Parent, parent, properties={
'child':relationship(Child, uselist=False,
cascade="all, delete-orphan",
backref="parent"),
}, confirm_deleted_rows=confirm_deleted_rows)
mapper(Child, child)
return Parent, Child
def test_update_single_missing(self):
Parent, Child = self._fixture()
sess = Session()
p1 = Parent(id=1, data=2)
sess.add(p1)
sess.flush()
sess.execute(self.tables.parent.delete())
p1.data = 3
assert_raises_message(
orm_exc.StaleDataError,
"UPDATE statement on table 'parent' expected to "
"update 1 row\(s\); 0 were matched.",
sess.flush
)
@testing.requires.sane_multi_rowcount
def test_delete_multi_missing_warning(self):
Parent, Child = self._fixture()
sess = Session()
p1 = Parent(id=1, data=2, child=None)
p2 = Parent(id=2, data=3, child=None)
sess.add_all([p1, p2])
sess.flush()
sess.execute(self.tables.parent.delete())
sess.delete(p1)
sess.delete(p2)
assert_raises_message(
exc.SAWarning,
"DELETE statement on table 'parent' expected to "
"delete 2 row\(s\); 0 were matched.",
sess.flush
)
def test_delete_multi_missing_allow(self):
Parent, Child = self._fixture(confirm_deleted_rows=False)
sess = Session()
p1 = Parent(id=1, data=2, child=None)
p2 = Parent(id=2, data=3, child=None)
sess.add_all([p1, p2])
sess.flush()
sess.execute(self.tables.parent.delete())
sess.delete(p1)
sess.delete(p2)
sess.flush()
class BatchInsertsTest(fixtures.MappedTest, testing.AssertsExecutionResults):
@classmethod
def define_tables(cls, metadata):
Table('t', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(50)),
Column('def_', String(50), server_default='def1')
)
def test_batch_interaction(self):
"""test batching groups same-structured, primary
key present statements together.
"""
t = self.tables.t
class T(fixtures.ComparableEntity):
pass
mapper(T, t)
sess = Session()
sess.add_all([
T(data='t1'),
T(data='t2'),
T(id=3, data='t3'),
T(id=4, data='t4'),
T(id=5, data='t5'),
T(id=6, data=func.lower('t6')),
T(id=7, data='t7'),
T(id=8, data='t8'),
T(id=9, data='t9', def_='def2'),
T(id=10, data='t10', def_='def3'),
T(id=11, data='t11'),
])
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"INSERT INTO t (data) VALUES (:data)",
{'data': 't1'}
),
CompiledSQL(
"INSERT INTO t (data) VALUES (:data)",
{'data': 't2'}
),
CompiledSQL(
"INSERT INTO t (id, data) VALUES (:id, :data)",
[{'data': 't3', 'id': 3},
{'data': 't4', 'id': 4},
{'data': 't5', 'id': 5}]
),
CompiledSQL(
"INSERT INTO t (id, data) VALUES (:id, lower(:lower_1))",
{'lower_1': 't6', 'id': 6}
),
CompiledSQL(
"INSERT INTO t (id, data) VALUES (:id, :data)",
[{'data': 't7', 'id': 7}, {'data': 't8', 'id': 8}]
),
CompiledSQL(
"INSERT INTO t (id, data, def_) VALUES (:id, :data, :def_)",
[{'data': 't9', 'id': 9, 'def_':'def2'},
{'data': 't10', 'id': 10, 'def_':'def3'}]
),
CompiledSQL(
"INSERT INTO t (id, data) VALUES (:id, :data)",
{'data': 't11', 'id': 11}
),
)
class LoadersUsingCommittedTest(UOWTest):
"""Test that events which occur within a flush()
get the same attribute loading behavior as on the outside
of the flush, and that the unit of work itself uses the
"committed" version of primary/foreign key attributes
when loading a collection for historical purposes (this typically
has importance for when primary key values change).
"""
def _mapper_setup(self, passive_updates=True):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses': relationship(Address,
order_by=addresses.c.email_address,
passive_updates=passive_updates,
backref='user')
})
mapper(Address, addresses)
return create_session(autocommit=False)
def test_before_update_m2o(self):
"""Expect normal many to one attribute load behavior
(should not get committed value)
from within public 'before_update' event"""
sess = self._mapper_setup()
Address, User = self.classes.Address, self.classes.User
def before_update(mapper, connection, target):
# if get committed is used to find target.user, then
# it will be still be u1 instead of u2
assert target.user.id == target.user_id == u2.id
from sqlalchemy import event
event.listen(Address, 'before_update', before_update)
a1 = Address(email_address='a1')
u1 = User(name='u1', addresses=[a1])
sess.add(u1)
u2 = User(name='u2')
sess.add(u2)
sess.commit()
sess.expunge_all()
# lookup an address and move it to the other user
a1 = sess.query(Address).get(a1.id)
# move address to another user's fk
assert a1.user_id == u1.id
a1.user_id = u2.id
sess.flush()
def test_before_update_o2m_passive(self):
"""Expect normal one to many attribute load behavior
(should not get committed value)
from within public 'before_update' event"""
self._test_before_update_o2m(True)
def test_before_update_o2m_notpassive(self):
"""Expect normal one to many attribute load behavior
(should not get committed value)
from within public 'before_update' event with
passive_updates=False
"""
self._test_before_update_o2m(False)
def _test_before_update_o2m(self, passive_updates):
sess = self._mapper_setup(passive_updates=passive_updates)
Address, User = self.classes.Address, self.classes.User
class AvoidReferencialError(Exception):
"""the test here would require ON UPDATE CASCADE on FKs
for the flush to fully succeed; this exception is used
to cancel the flush before we get that far.
"""
def before_update(mapper, connection, target):
if passive_updates:
# we shouldn't be using committed value.
# so, having switched target's primary key,
# we expect no related items in the collection
# since we are using passive_updates
# this is a behavior change since #2350
assert 'addresses' not in target.__dict__
eq_(target.addresses, [])
else:
# in contrast with passive_updates=True,
# here we expect the orm to have looked up the addresses
# with the committed value (it needs to in order to
# update the foreign keys). So we expect addresses
# collection to move with the user,
# (just like they will be after the update)
# collection is already loaded
assert 'addresses' in target.__dict__
eq_([a.id for a in target.addresses],
[a.id for a in [a1, a2]])
raise AvoidReferencialError()
from sqlalchemy import event
event.listen(User, 'before_update', before_update)
a1 = Address(email_address='jack1')
a2 = Address(email_address='jack2')
u1 = User(id=1, name='jack', addresses=[a1, a2])
sess.add(u1)
sess.commit()
sess.expunge_all()
u1 = sess.query(User).get(u1.id)
u1.id = 2
try:
sess.flush()
except AvoidReferencialError:
pass
| michaelBenin/sqlalchemy | test/orm/test_unitofworkv2.py | Python | mit | 55,891 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ConfigurationProfilePreferencesOperations(object):
"""ConfigurationProfilePreferencesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~automanage_client.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def create_or_update(
self,
configuration_profile_preference_name, # type: str
resource_group_name, # type: str
parameters, # type: "models.ConfigurationProfilePreference"
**kwargs # type: Any
):
# type: (...) -> "models.ConfigurationProfilePreference"
"""Creates a configuration profile preference.
:param configuration_profile_preference_name: Name of the configuration profile preference.
:type configuration_profile_preference_name: str
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param parameters: Parameters supplied to create or update configuration profile preference.
:type parameters: ~automanage_client.models.ConfigurationProfilePreference
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConfigurationProfilePreference, or the result of cls(response)
:rtype: ~automanage_client.models.ConfigurationProfilePreference
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ConfigurationProfilePreference"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-30-preview"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'configurationProfilePreferenceName': self._serialize.url("configuration_profile_preference_name", configuration_profile_preference_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ConfigurationProfilePreference')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ConfigurationProfilePreference', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ConfigurationProfilePreference', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automanage/configurationProfilePreferences/{configurationProfilePreferenceName}'} # type: ignore
def get(
self,
configuration_profile_preference_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ConfigurationProfilePreference"
"""Get information about a configuration profile preference.
:param configuration_profile_preference_name: The configuration profile preference name.
:type configuration_profile_preference_name: str
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConfigurationProfilePreference, or the result of cls(response)
:rtype: ~automanage_client.models.ConfigurationProfilePreference
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ConfigurationProfilePreference"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-30-preview"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'configurationProfilePreferenceName': self._serialize.url("configuration_profile_preference_name", configuration_profile_preference_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConfigurationProfilePreference', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automanage/configurationProfilePreferences/{configurationProfilePreferenceName}'} # type: ignore
def delete(
self,
resource_group_name, # type: str
configuration_profile_preference_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Delete a configuration profile preference.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param configuration_profile_preference_name: Name of the configuration profile preference.
:type configuration_profile_preference_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-30-preview"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'configurationProfilePreferenceName': self._serialize.url("configuration_profile_preference_name", configuration_profile_preference_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automanage/configurationProfilePreferences/{configurationProfilePreferenceName}'} # type: ignore
def update(
self,
configuration_profile_preference_name, # type: str
resource_group_name, # type: str
parameters, # type: "models.ConfigurationProfilePreferenceUpdate"
**kwargs # type: Any
):
# type: (...) -> "models.ConfigurationProfilePreference"
"""Updates a configuration profile preference.
:param configuration_profile_preference_name: Name of the configuration profile preference.
:type configuration_profile_preference_name: str
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param parameters: Parameters supplied to create or update configuration profile preference.
:type parameters: ~automanage_client.models.ConfigurationProfilePreferenceUpdate
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConfigurationProfilePreference, or the result of cls(response)
:rtype: ~automanage_client.models.ConfigurationProfilePreference
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ConfigurationProfilePreference"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-30-preview"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'configurationProfilePreferenceName': self._serialize.url("configuration_profile_preference_name", configuration_profile_preference_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ConfigurationProfilePreferenceUpdate')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConfigurationProfilePreference', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automanage/configurationProfilePreferences/{configurationProfilePreferenceName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.ConfigurationProfilePreferenceList"]
"""Retrieve a list of configuration profile preferences within a given resource group.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ConfigurationProfilePreferenceList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~automanage_client.models.ConfigurationProfilePreferenceList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ConfigurationProfilePreferenceList"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-30-preview"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ConfigurationProfilePreferenceList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automanage/configurationProfilePreferences'} # type: ignore
def list_by_subscription(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["models.ConfigurationProfilePreferenceList"]
"""Retrieve a list of configuration profile preferences within a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ConfigurationProfilePreferenceList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~automanage_client.models.ConfigurationProfilePreferenceList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ConfigurationProfilePreferenceList"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-30-preview"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
if not next_link:
# Construct URL
url = self.list_by_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ConfigurationProfilePreferenceList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Automanage/configurationProfilePreferences'} # type: ignore
| Azure/azure-sdk-for-python | sdk/automanage/azure-mgmt-automanage/azure/mgmt/automanage/operations/_configuration_profile_preferences_operations.py | Python | mit | 22,341 |
import re
from contextlib import contextmanager
from tempfile import NamedTemporaryFile
from plumbum.commands import CommandNotFound, ConcreteCommand, shquote
from plumbum.lib import ProcInfo
from plumbum.machines.base import BaseMachine
from plumbum.machines.env import BaseEnv
from plumbum.machines.local import LocalPath
from plumbum.path.remote import RemotePath, RemoteWorkdir, StatRes
class RemoteEnv(BaseEnv):
"""The remote machine's environment; exposes a dict-like interface"""
__slots__ = ["_orig", "remote"]
def __init__(self, remote):
session = remote._session
# GNU env has a -0 argument; use it if present. Otherwise,
# fall back to calling printenv on each (possible) variable
# from plain env.
env0 = session.run("env -0; echo")
if env0[0] == 0 and not env0[2].rstrip():
_curr = dict(
line.split("=", 1) for line in env0[1].split("\x00") if "=" in line
)
else:
lines = session.run("env; echo")[1].splitlines()
split = (line.split("=", 1) for line in lines)
keys = (line[0] for line in split if len(line) > 1)
runs = ((key, session.run(f'printenv "{key}"; echo')) for key in keys)
_curr = {
key: run[1].rstrip("\n")
for (key, run) in runs
if run[0] == 0 and run[1].rstrip("\n") and not run[2]
}
super().__init__(remote.path, ":", _curr=_curr)
self.remote = remote
self._orig = self._curr.copy()
def __delitem__(self, name):
BaseEnv.__delitem__(self, name)
self.remote._session.run(f"unset {name}")
def __setitem__(self, name, value):
BaseEnv.__setitem__(self, name, value)
self.remote._session.run(f"export {name}={shquote(value)}")
def pop(self, name, *default):
BaseEnv.pop(self, name, *default)
self.remote._session.run(f"unset {name}")
def update(self, *args, **kwargs):
BaseEnv.update(self, *args, **kwargs)
self.remote._session.run(
"export " + " ".join(f"{k}={shquote(v)}" for k, v in self.getdict().items())
)
def expand(self, expr):
"""Expands any environment variables and home shortcuts found in ``expr``
(like ``os.path.expanduser`` combined with ``os.path.expandvars``)
:param expr: An expression containing environment variables (as ``$FOO``) or
home shortcuts (as ``~/.bashrc``)
:returns: The expanded string"""
return self.remote.expand(expr)
def expanduser(self, expr):
"""Expand home shortcuts (e.g., ``~/foo/bar`` or ``~john/foo/bar``)
:param expr: An expression containing home shortcuts
:returns: The expanded string"""
return self.remote.expanduser(expr)
# def clear(self):
# BaseEnv.clear(self, *args, **kwargs)
# self.remote._session.run("export %s" % " ".join("%s=%s" % (k, v) for k, v in self.getdict()))
def getdelta(self):
"""Returns the difference between the this environment and the original environment of
the remote machine"""
self._curr["PATH"] = self.path.join()
delta = {}
for k, v in self._curr.items():
if k not in self._orig:
delta[k] = str(v)
for k, v in self._orig.items():
if k not in self._curr:
delta[k] = ""
else:
if v != self._curr[k]:
delta[k] = self._curr[k]
return delta
class RemoteCommand(ConcreteCommand):
__slots__ = ["remote", "executable"]
QUOTE_LEVEL = 1
def __init__(self, remote, executable, encoding="auto"):
self.remote = remote
ConcreteCommand.__init__(
self, executable, remote.custom_encoding if encoding == "auto" else encoding
)
@property
def machine(self):
return self.remote
def __repr__(self):
return f"RemoteCommand({self.remote!r}, {self.executable!r})"
def popen(self, args=(), **kwargs):
return self.remote.popen(self[args], **kwargs)
def nohup(self, cwd=".", stdout="nohup.out", stderr=None, append=True):
"""Runs a command detached."""
return self.machine.daemonic_popen(self, cwd, stdout, stderr, append)
class ClosedRemoteMachine(Exception):
pass
class ClosedRemote:
__slots__ = ["_obj", "__weakref__"]
def __init__(self, obj):
self._obj = obj
def close(self):
pass
def __getattr__(self, name):
raise ClosedRemoteMachine(f"{self._obj!r} has been closed")
class BaseRemoteMachine(BaseMachine):
"""Represents a *remote machine*; serves as an entry point to everything related to that
remote machine, such as working directory and environment manipulation, command creation,
etc.
Attributes:
* ``cwd`` - the remote working directory
* ``env`` - the remote environment
* ``custom_encoding`` - the remote machine's default encoding (assumed to be UTF8)
* ``connect_timeout`` - the connection timeout
There also is a _cwd attribute that exists if the cwd is not current (del if cwd is changed).
"""
# allow inheritors to override the RemoteCommand class
RemoteCommand = RemoteCommand
@property
def cwd(self):
if not hasattr(self, "_cwd"):
self._cwd = RemoteWorkdir(self)
return self._cwd
def __init__(self, encoding="utf8", connect_timeout=10, new_session=False):
self.custom_encoding = encoding
self.connect_timeout = connect_timeout
self._session = self.session(new_session=new_session)
self.uname = self._get_uname()
self.env = RemoteEnv(self)
self._python = None
self._program_cache = {}
def _get_uname(self):
rc, out, _ = self._session.run("uname", retcode=None)
if rc == 0:
return out.strip()
rc, out, _ = self._session.run(
"python3 -c 'import platform;print(platform.uname()[0])'", retcode=None
)
if rc == 0:
return out.strip()
# all POSIX systems should have uname. make an educated guess it's Windows
return "Windows"
def __repr__(self):
return f"<{self.__class__.__name__} {self}>"
def __enter__(self):
return self
def __exit__(self, t, v, tb):
self.close()
def close(self):
"""closes the connection to the remote machine; all paths and programs will
become defunct"""
self._session.close()
self._session = ClosedRemote(self)
def path(self, *parts):
"""A factory for :class:`RemotePaths <plumbum.path.remote.RemotePath>`.
Usage: ``p = rem.path("/usr", "lib", "python2.7")``
"""
parts2 = [str(self.cwd)]
for p in parts:
if isinstance(p, LocalPath):
raise TypeError(f"Cannot construct RemotePath from {p!r}")
parts2.append(self.expanduser(str(p)))
return RemotePath(self, *parts2)
def which(self, progname):
"""Looks up a program in the ``PATH``. If the program is not found, raises
:class:`CommandNotFound <plumbum.commands.CommandNotFound>`
:param progname: The program's name. Note that if underscores (``_``) are present
in the name, and the exact name is not found, they will be replaced
in turn by hyphens (``-``) then periods (``.``), and the name will
be looked up again for each alternative
:returns: A :class:`RemotePath <plumbum.path.local.RemotePath>`
"""
key = (progname, self.env.get("PATH", ""))
try:
return self._program_cache[key]
except KeyError:
pass
alternatives = [progname]
if "_" in progname:
alternatives.append(progname.replace("_", "-"))
alternatives.append(progname.replace("_", "."))
for name in alternatives:
for p in self.env.path:
fn = p / name
if fn.access("x") and not fn.is_dir():
self._program_cache[key] = fn
return fn
raise CommandNotFound(progname, self.env.path)
def __getitem__(self, cmd):
"""Returns a `Command` object representing the given program. ``cmd`` can be a string or
a :class:`RemotePath <plumbum.path.remote.RemotePath>`; if it is a path, a command
representing this path will be returned; otherwise, the program name will be looked up in
the system's ``PATH`` (using ``which``). Usage::
r_ls = rem["ls"]
"""
if isinstance(cmd, RemotePath):
if cmd.remote is self:
return self.RemoteCommand(self, cmd)
raise TypeError(
f"Given path does not belong to this remote machine: {cmd!r}"
)
if not isinstance(cmd, LocalPath):
return self.RemoteCommand(
self, self.path(cmd) if "/" in cmd or "\\" in cmd else self.which(cmd)
)
raise TypeError(f"cmd must not be a LocalPath: {cmd!r}")
@property
def python(self):
"""A command that represents the default remote python interpreter"""
if not self._python:
self._python = self["python3"]
return self._python
def session(self, isatty=False, new_session=False):
"""Creates a new :class:`ShellSession <plumbum.session.ShellSession>` object; this invokes the user's
shell on the remote machine and executes commands on it over stdin/stdout/stderr"""
raise NotImplementedError()
def download(self, src, dst):
"""Downloads a remote file/directory (``src``) to a local destination (``dst``).
``src`` must be a string or a :class:`RemotePath <plumbum.path.remote.RemotePath>`
pointing to this remote machine, and ``dst`` must be a string or a
:class:`LocalPath <plumbum.machines.local.LocalPath>`"""
raise NotImplementedError()
def upload(self, src, dst):
"""Uploads a local file/directory (``src``) to a remote destination (``dst``).
``src`` must be a string or a :class:`LocalPath <plumbum.machines.local.LocalPath>`,
and ``dst`` must be a string or a :class:`RemotePath <plumbum.path.remote.RemotePath>`
pointing to this remote machine"""
raise NotImplementedError()
def popen(self, args, **kwargs):
"""Spawns the given command on the remote machine, returning a ``Popen``-like object;
do not use this method directly, unless you need "low-level" control on the remote
process"""
raise NotImplementedError()
def list_processes(self):
"""
Returns information about all running processes (on POSIX systems: using ``ps``)
.. versionadded:: 1.3
"""
ps = self["ps"]
lines = ps("-e", "-o", "pid,uid,stat,args").splitlines()
lines.pop(0) # header
for line in lines:
parts = line.strip().split()
yield ProcInfo(int(parts[0]), int(parts[1]), parts[2], " ".join(parts[3:]))
def pgrep(self, pattern):
"""
Process grep: return information about all processes whose command-line args match the given regex pattern
"""
pat = re.compile(pattern)
for procinfo in self.list_processes():
if pat.search(procinfo.args):
yield procinfo
@contextmanager
def tempdir(self):
"""A context manager that creates a remote temporary directory, which is removed when
the context exits"""
_, out, _ = self._session.run(
"mktemp -d 2>/dev/null || mktemp -d tmp.XXXXXXXXXX"
)
local_dir = self.path(out.strip())
try:
yield local_dir
finally:
local_dir.delete()
#
# Path implementation
#
def _path_listdir(self, fn):
files = self._session.run(f"ls -a {shquote(fn)}")[1].splitlines()
files.remove(".")
files.remove("..")
return files
def _path_glob(self, fn, pattern):
# shquote does not work here due to the way bash loops use space as a separator
pattern = pattern.replace(" ", r"\ ")
fn = fn.replace(" ", r"\ ")
matches = self._session.run(rf"for fn in {fn}/{pattern}; do echo $fn; done")[
1
].splitlines()
if len(matches) == 1 and not self._path_stat(matches[0]):
return [] # pattern expansion failed
return matches
def _path_getuid(self, fn):
stat_cmd = (
"stat -c '%u,%U' "
if self.uname not in ("Darwin", "FreeBSD")
else "stat -f '%u,%Su' "
)
return self._session.run(stat_cmd + shquote(fn))[1].strip().split(",")
def _path_getgid(self, fn):
stat_cmd = (
"stat -c '%g,%G' "
if self.uname not in ("Darwin", "FreeBSD")
else "stat -f '%g,%Sg' "
)
return self._session.run(stat_cmd + shquote(fn))[1].strip().split(",")
def _path_stat(self, fn):
if self.uname not in ("Darwin", "FreeBSD"):
stat_cmd = "stat -c '%F,%f,%i,%d,%h,%u,%g,%s,%X,%Y,%Z' "
else:
stat_cmd = "stat -f '%HT,%Xp,%i,%d,%l,%u,%g,%z,%a,%m,%c' "
rc, out, _ = self._session.run(stat_cmd + shquote(fn), retcode=None)
if rc != 0:
return None
statres = out.strip().split(",")
text_mode = statres.pop(0).lower()
res = StatRes((int(statres[0], 16),) + tuple(int(sr) for sr in statres[1:]))
res.text_mode = text_mode
return res
def _path_delete(self, fn):
self._session.run(f"rm -rf {shquote(fn)}")
def _path_move(self, src, dst):
self._session.run(f"mv {shquote(src)} {shquote(dst)}")
def _path_copy(self, src, dst):
self._session.run(f"cp -r {shquote(src)} {shquote(dst)}")
def _path_mkdir(
self,
fn,
mode=None, # pylint: disable=unused-argument
minus_p=True,
):
p_str = "-p " if minus_p else ""
cmd = f"mkdir {p_str}{shquote(fn)}"
self._session.run(cmd)
def _path_chmod(self, mode, fn):
self._session.run(f"chmod {mode:o} {shquote(fn)}")
def _path_touch(self, path):
self._session.run(f"touch {path}")
def _path_chown(self, fn, owner, group, recursive):
args = ["chown"]
if recursive:
args.append("-R")
if owner is not None and group is not None:
args.append(f"{owner}:{group}")
elif owner is not None:
args.append(str(owner))
elif group is not None:
args.append(f":{group}")
args.append(shquote(fn))
self._session.run(" ".join(args))
def _path_read(self, fn):
data = self["cat"](fn)
if self.custom_encoding and isinstance(data, str):
data = data.encode(self.custom_encoding)
return data
def _path_write(self, fn, data):
if self.custom_encoding and isinstance(data, str):
data = data.encode(self.custom_encoding)
with NamedTemporaryFile() as f:
f.write(data)
f.flush()
f.seek(0)
self.upload(f.name, fn)
def _path_link(self, src, dst, symlink):
symlink_str = "-s " if symlink else ""
self._session.run(f"ln {symlink_str}{shquote(src)} {shquote(dst)}")
def expand(self, expr):
return self._session.run(f"echo {expr}")[1].strip()
def expanduser(self, expr):
if not any(part.startswith("~") for part in expr.split("/")):
return expr
# we escape all $ signs to avoid expanding env-vars
expr_repl = expr.replace("$", "\\$")
return self._session.run(f"echo {expr_repl}")[1].strip()
| tomerfiliba/plumbum | plumbum/machines/remote.py | Python | mit | 16,021 |
#!/usr/bin/env python2
import os, shutil, glob
from functools import wraps
def print_warning(message, *args, **kwargs):
from . import colortext
if args or kwargs: message = message.format(*args, **kwargs)
colortext.write(message + '\n', color='red')
def print_error_and_die(message, *args, **kwargs):
aborting = "Aborting..."
if not message.endswith('\n'):
aborting = ' ' + aborting
print_warning(message + aborting, *args, **kwargs)
raise SystemExit(1)
class catch_and_print_errors:
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
if exc_type == KeyboardInterrupt:
print()
return True
if getattr(exc_value, 'no_stack_trace', False):
print_warning(str(exc_value))
return True
def __call__(self, function):
@wraps(function)
def wrapper(*args, **kwargs):
with self:
return function(*args, **kwargs)
return wrapper
def use_path_completion():
import readline
readline.set_completer_delims(' \t\n;')
readline.parse_and_bind("tab: complete")
readline.set_completer(path_completer)
def path_completer(text, state):
globs = glob.glob(os.path.expanduser(text) + '*') + [None]
add_slash = lambda x: x + '/' if os.path.isdir(x) else x
return add_slash(globs[state])
def clear_directory(directory):
if os.path.exists(directory): shutil.rmtree(directory)
os.makedirs(directory)
def relative_symlink(target, link_name):
"""Make a symlink to target using the shortest possible relative path."""
link_name = os.path.abspath(link_name)
abs_target = os.path.abspath(target)
rel_target = os.path.relpath(target, os.path.dirname(link_name))
if os.path.exists(link_name):
os.remove(link_name)
os.symlink(rel_target, link_name)
# Bread'n'butter shell commands.
def mkdir(newdir):
if os.path.isdir(newdir):
pass
elif os.path.isfile(newdir):
raise OSError("a file with the same name as the desired " \
"dir, '%s', already exists." % newdir)
else:
os.makedirs(newdir)
def touch(path):
with open(path, 'w'):
pass
| Kortemme-Lab/klab | klab/scripting.py | Python | mit | 2,249 |
# coding: utf-8
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import json
china = json.loads(open('china.json', 'r').read()) # slow
new_provs = []
new_citys = []
for prov in china['children']:
new_provs.append(prov['name'])
for city in prov['children']:
if city['name'] not in [u'市辖区', u'县', u'省直辖县级行政区划']:
if city['name'][-1] == '市':
new_citys.append(city['name'][:-1])
else:
new_citys.append(city['name'])
print new_citys
with open('citys.json', 'w') as f:
f.write(json.dumps(new_citys, ensure_ascii=False, indent=4)) | phyng/phyip | geodata/provinces_script.py | Python | mit | 637 |
# AUTHOR: Kale Miller
# DESCRIPTION: Contains the core classes for the program.
# 50726f6772616d6d696e6720697320627265616b696e67206f66206f6e652062696720696d706f737369626c65207461736b20696e746f20736576
# 6572616c207665727920736d616c6c20706f737369626c65207461736b732e
# DEVELOPMENT LOG:
# 05/12/16: Initialized classes file. Created the parent container class along with children for the basic container,
# the heavy container and the refrigerated container.
# 06/12/16: Fixed some of the public attributes to make them private.
# 07/12/16: Renamed the file to 'containers'. Added 'real' time processing of containers for load/unload. Removed
# some magic numbers and placed them at the top of the script.
# 12/12/16: Fixed some of the methods that failed under unit tests.
# 15/12/16: Added methods to add auxilary labels. Added method to generate information label. Small bug fixes.
# TODO: Somehow make the init call on the base class not hardcoded in.
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~IMPORTS/GLOBALS~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
from time import sleep
TEMP_LOW = -20.0
TEMP_HIGH = 10.0
UNLOAD_TIME = 0.75
LOAD_TIME = 0.75
LABEL_APPLICATION_TIME = 0.1
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.:.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~MAIN~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class _BaseContainer:
"""The parent class for which all containers are inherited."""
def __init__(self, id, info, destination):
"""
Initialises the class. id is the id tag on the container while info is the additional information you
wish to include on the label, such as contents or comments.
"""
# TODO: Change the assertion error to a value error.
assert isinstance(id, str) and isinstance(info, str), "Some of the parameters passed aren't strings."
self._id = id
self._info = info
self._destination = destination
self._labels = list()
self._auxilarylabelsalreadyadded = True
self._type = 'N/A'
self._loaded = False
self._onship = True
self._weight = None
self._currentlocation = self.currentlocation()
def currentlocation(self):
"""Find where the container currently is."""
if self._onship:
loc = 'On the ship'
elif self._loaded:
loc = 'Loaded'
elif not self._onship and not self._loaded:
loc = 'Holding bay'
self._currentlocation = loc
return loc
def settype(self, type):
"""Sets the type of the container."""
self._type = type
return None
def id(self):
"""Fetches the container's id."""
return self._id
def information(self):
"""Print the information about this container."""
print "----------------------------------------------------------------------"
print "CONTAINER: %s" % self._id
print "INFORMATION: %s" % self._info
print "DESTINATION: %s" % self._destination
print "LABELS: %s" % str(self._labels)
print "CURRENT LOCATION: %s" % self._currentlocation
print "----------------------------------------------------------------------"
return None
def _informationlabel(self):
"""Generates a label that contains information about the container."""
return "INFORMATION: %s. DESTINATION: %s." % (self._info,self._destination)
def addidtag(self):
"""Adds a id tag to the container."""
self._labels.append(self._id)
return None
def addlabel(self, label2add, debug=False, quiet=False):
"""Add a label to the container (e.g. fragile)."""
self._labels.append(label2add)
if not quiet: print "Added the label %r to container %s." % (label2add, self._id)
if not debug: sleep(LABEL_APPLICATION_TIME)
return None
def defineweight(self, m):
"""Defines the weight of the container."""
self._weight = m
return None
def weight(self):
"""Returns the weight of the container."""
return self._weight
def removelabel(self, label2remove):
"""Removes a label from the container."""
try:
self._labels.index(label2remove)
except ValueError:
print "The label %r is not on container %s." % (label2remove, self._id)
else:
print "Successfully removed the label %r from container %s." % (label2remove, self._id)
def load(self, load_location, debug=False):
"""Loads the container."""
if not self._auxilarylabelsalreadyadded:
print "WARNING: All of the required labels haven't been added to container %s" % self._id
if self._onship:
print "Container %s is still on the ship." % self._id
elif self._loaded:
print "Container %s is already loaded on %s." % (self._id, load_location)
elif not self._loaded:
print "Loading container %s onto %s." % (self._id, load_location)
if not debug: sleep(LOAD_TIME)
self._loaded = True
else:
raise RuntimeError, "There was a problem with container %s while loading." % self._id
self.currentlocation()
return None
def unload(self, debug=False):
"""Unloads the container."""
if self._onship:
print "Unloading container %s." % self._id
if not debug: sleep(UNLOAD_TIME)
self._onship = False
elif not self._onship:
print "Container %s has already been unloaded." % self._id
else:
raise RuntimeError, "There was a problem with container %s while unloading from the ship." % self._id
self.currentlocation()
return None
class BasicContainer(_BaseContainer):
"""The most basic container possible."""
def __init__(self, id, info, destination):
_BaseContainer.__init__(self, id, info, destination) # Call the parent class' constructor.
self.settype('basic')
self._auxilarylabelsalreadyadded = True
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
return None
class HeavyContainer(_BaseContainer):
"""The heavy type container."""
def __init__(self, id, info, destination):
_BaseContainer.__init__(self, id, info, destination) # Call the parent class' constructor.
self.settype('heavy')
self._auxilarylabelsalreadyadded = False
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('NOTE: Heavy container.', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
class RefrigeratedContainer(_BaseContainer):
"""The refrigerated container."""
def __init__(self, id, info, destination):
_BaseContainer.__init__(self, id, info, destination) # Call the parent class' constructor.
self.settype('refrigerated')
self._auxilarylabelsalreadyadded = False
self._temp = 0.0 # Set in celsius.
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('NOTE: Refrigerated. Handle with care.', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
def assertValidTemp(self, T):
"""Asserts that the temperature is valid."""
assert isinstance(T, float) or isinstance(T, int), "Temperature must be a number."
assert TEMP_LOW <= T <= TEMP_HIGH, "Temperature specified is outside functioning range."
def settemp(self, newtemp):
"""Sets the temperature of the container."""
self._temp = newtemp
print "The temperature of container %s is now %f degrees." % (self._id, newtemp)
return None
def checktemp(self):
"""Checks the temperature of the container."""
print "The current temperature of container %s is %f degrees." % self._temp
class LiquidContainer(HeavyContainer):
"""A liquid container that inherits from the heavy container type."""
def __init__(self, id, info, destination, occupiedvolume, totalvolume):
"""Initialise the class."""
HeavyContainer.__init__(self, id, info, destination)
self.settype('liquid')
self._auxilarylabelsalreadyadded = False
self._occupiedvolume = occupiedvolume
self._totalvolume = totalvolume
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('Contains liquids.', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
def _validvolumetoadd(self, V):
"""Sanity checks on adding volume."""
if not (isinstance(V, int) or isinstance(V, float)):
print "The volume must be either a float or an integer."
return False
elif V > self._totalvolume - self._occupiedvolume:
print "There isn't enough space in container %s to add an additional %f litres." % (self._id, V)
return False
elif V < 0:
print "Volume must be a positive number."
return False
else:
return True
def _validvolumetoremove(self, V):
"""Sanity checks on removing volume."""
if not (isinstance(V, int) or isinstance(V, float)):
print "The volume must be either a float or an integer."
return False
elif V > self._occupiedvolume:
print "There isn't enough liquid in container %s to remove %f litres." % (self._id, V)
return False
elif V < 0:
print "Volume must be a positive number."
return False
else:
return True
def setoccupiedvolume(self, V):
"""Set the current volume of liquid in the tank."""
if (isinstance(V, int) or isinstance(V, float)) and 0 < V < self._totalvolume:
self._occupiedvolume = V
return None
def addvolume(self, V):
"""Fills the tank by the specified volume."""
if self._validvolumetoadd(V):
self._occupiedvolume += V
return None
def removevolume(self, V):
"""Drains the tank by the specified volume."""
if self._validvolumetoremove(V):
self._occupiedvolume -= V
return None
class ExplosivesContainer(HeavyContainer):
"""A heavy container that carries explosives."""
def __init__(self, id, info, destination):
"""Initialise the class."""
HeavyContainer.__init__(self, id, info, destination)
self.settype('explosives')
self._auxilarylabelsalreadyadded = False
self.safe = True
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('CAUTION: Dangerous goods inside. Handle with care.', quiet=True)
self.addlabel('Do not leave container in hot conditions.', quiet=True)
self.addlabel('No naked flames.', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
def checkifsafe(self):
"""Checks the explosives to make sure they are in safe conditions."""
if self.safe:
print "Container %s is operating under safe conditions." % self._id
if not self.safe:
print "!!! DANGER: CONTAINER %s IS OPERATING UNDER UNSAFE CONDITIONS !!!" % self._id
return None
def emergencyprocedures(self):
"""A command to be used only when the explosives are likely to detonate."""
# There is nothing here yet because there is nothing I could program in.
return None
class ToxicContainer(BasicContainer):
"""A container that contains toxins."""
def __init__(self, id, info, destination):
BasicContainer.__init__(self, id, info, destination)
self.settype('toxic')
self._auxilarylabelsalreadyadded = False
self.safe = True
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('CAUTION: Dangerous goods inside. Handle with care.', quiet=True)
self.addlabel('Do not ingest goods.', quiet=True)
self.addlabel('If leakage, call toxins hotline at 1900 882 301.', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
def checkifsafe(self):
"""A check to confirm that the container isn't leaking and is safe to be around."""
if self.safe:
print "Container %s is operating under safe conditions." % self._id
if not self.safe:
print "!!! DANGER: CONTAINER %s IS OPERATING UNDER UNSAFE CONDITIONS !!!" % self._id
return None
class ChemicalContainer(RefrigeratedContainer, ToxicContainer):
"""A chemical container that is both refrigerated and toxic."""
def __init__(self, id, info, destination):
RefrigeratedContainer.__init__(self, id, info, destination)
ToxicContainer.__init__(self, id, info, destination)
self.settype('chemical')
self._auxilarylabelsalreadyadded = False
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('CAUTION: Dangerous goods inside. Handle with care.', quiet=True)
self.addlabel('Do not ingest goods.', quiet=True)
self.addlabel('CAUTION: Strong chemicals.', quiet=True)
self.addlabel('If leak call chemicals hotline at 1900 772 900', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.:.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
| kmiller96/Shipping-Containers-Software | lib/containers.py | Python | mit | 14,805 |
"""
Test for the bandicoot.helper.group module.
"""
import bandicoot as bc
from bandicoot.core import Record, Position
import unittest
import datetime
from bandicoot.tests.generate_user import random_burst
from bandicoot.helper.group import group_records
from bandicoot.helper.tools import std, mean
from datetime import timedelta
import numpy as np
import os
class TestGroup(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls._dir_changed = False
def setUp(self):
if not TestGroup._dir_changed:
abspath = os.path.abspath(__file__)
name = abspath.index(os.path.basename(__file__))
abspath = abspath[:name]
os.chdir(abspath)
TestGroup._dir_changed = True
self.maxDiff = None
self.user = bc.io.read_orange("u_test", "samples", describe=False)
self.random_int_list = np.random.randint(1, 1000, size=9001)
self.sum_stats_list = [bc.helper.tools.SummaryStats(np.random.rand(), np.random.rand(),
np.random.rand(), np.random.rand(), np.random.rand(), np.random.rand(), np.random.rand(), []) for _ in range(9001)]
def test_statistics(self):
self.assertDictEqual(bc.helper.group.statistics(self.random_int_list, summary='default'), {
'mean': mean(self.random_int_list),
'std': std(self.random_int_list),
})
def mean_std(key):
return {
'mean': mean([getattr(s, key) for s in self.sum_stats_list]),
'std': std([getattr(s, key) for s in self.sum_stats_list]),
}
self.assertDictEqual(bc.helper.group.statistics(self.sum_stats_list, summary='extended'), {
'kurtosis': mean_std('kurtosis'),
'mean': mean_std('mean'),
'median': mean_std('median'),
'skewness': mean_std('skewness'),
'std': mean_std('std'),
'min': mean_std('min'),
'max': mean_std('max')
})
self.assertEqual(bc.helper.group.statistics([]).values(), [None] * 2)
def test_statistics_bad_aggregated(self):
def run_bad_aggregated():
try:
bc.helper.group.statistics("bad_aggregated")
except (TypeError, ValueError):
return True
return False
self.assertTrue(run_bad_aggregated())
def test_weekly_group(self):
records = [
Record("test_itr", "in", "1", datetime.datetime(2014, 8, 24), 1, Position()),
Record("test_itr", "in", "1", datetime.datetime(2014, 9, 4), 1, Position()),
Record("test_itr", "in", "1", datetime.datetime(2014, 9, 11), 1, Position())
]
user = bc.User()
user.records = records
grouping = bc.helper.group.group_records(user, groupby='week')
groups = [[r for r in l] for l in grouping]
self.assertEqual(groups, [[records[0]], [records[1]], [records[2]]])
def test_weekday_group(self):
records = [
Record("test_itr", "in", "1", datetime.datetime(2014, 8, 25), 1, Position()),
Record("test_itr", "in", "1", datetime.datetime(2014, 9, 4), 1, Position()),
Record("test_itr", "in", "1", datetime.datetime(2014, 9, 11), 1, Position())
]
user = bc.User()
user.records = records
grouping = bc.helper.group.group_records(user, groupby='week', part_of_week='weekday')
groups = [[r for r in l] for l in grouping]
self.assertEqual(groups, [[records[0]], [records[1]], [records[2]]])
def test_weekend_group(self):
records = [
Record("test_itr", "in", "1", datetime.datetime(2014, 8, 23), 1, Position()),
Record("test_itr", "in", "1", datetime.datetime(2014, 8, 31), 1, Position()),
Record("test_itr", "in", "1", datetime.datetime(2014, 10, 18), 1, Position())
]
user = bc.User()
user.records = records
grouping = bc.helper.group.group_records(user, groupby='week', part_of_week='weekend')
groups = [[r for r in l] for l in grouping]
self.assertEqual(groups, [[records[0]], [records[1]], [records[2]]])
def test_daily_group(self):
records = [
Record("test_itr", "in", "1", datetime.datetime(2014, 8, 22, 10, 00), 1, Position()),
Record("test_itr", "in", "1", datetime.datetime(2014, 8, 23, 10, 00), 1, Position()),
Record("test_itr", "in", "1", datetime.datetime(2014, 9, 7, 11, 00), 1, Position()),
Record("test_itr", "in", "1", datetime.datetime(2014, 10, 18, 2, 00), 1, Position())
]
user = bc.User()
user.records = records
grouping = bc.helper.group.group_records(user, groupby='week', part_of_day='night')
groups = [[r for r in l] for l in grouping]
self.assertEqual(groups, [[records[3]]])
grouping = bc.helper.group.group_records(user, groupby='week', part_of_day='day')
groups = [[r for r in l] for l in grouping]
self.assertEqual(groups, [[records[0], records[1]], [records[2]]])
def test_none_group(self):
records = [
Record("call", "in", "1", datetime.datetime(2014, 9, 4), 1, Position()),
Record("call", "in", "1", datetime.datetime(2014, 9, 5), 1, Position()),
Record("call", "in", "1", datetime.datetime(2014, 9, 11), 1, Position()),
Record("call", "in", "1", datetime.datetime(2014, 9, 12), 1, Position())
]
user = bc.User()
user.records = records
grouping = bc.helper.group.group_records(user, groupby=None)
self.assertEqual(records, list(next(grouping)))
self.assertRaises(StopIteration, grouping.next)
class ConsistencyTests(unittest.TestCase):
def setUp(self):
self.user = bc.User()
self.user.records = random_burst(100, delta=timedelta(days=2))
def _group_set(self, method, interaction):
chunks = group_records(self.user, groupby=method,
interaction=interaction)
new_records = set(r for c in chunks for r in c)
return new_records
def test_weekly(self):
old_records = set(self.user.records)
new_records = self._group_set('week', None)
self.assertSetEqual(new_records, old_records)
new_records = self._group_set('week', 'call')
self.assertSetEqual(new_records, {r for r in old_records
if r.interaction == 'call'})
class MissingTests(unittest.TestCase):
def setUp(self):
self.user = bc.read_csv('user_ignored', 'samples')
def test_amount(self):
result = {
'all': 4,
'interaction': 2,
'direction': 2,
'correspondent_id': 0,
'datetime': 0,
'call_duration': 1,
}
self.assertDictEqual(self.user.ignored_records, result)
def test_total_records(self):
self.assertEqual(len(self.user.records), 1)
| econandrew/bandicoot | bandicoot/tests/test_group.py | Python | mit | 7,063 |
"""tcprocd client."""
from __future__ import unicode_literals, print_function, absolute_import
from tcprocd.protocol import Protocol
import socket
import select
import sys
if sys.version_info[0] < 3:
str_types = (str, unicode) # noqa
else:
str_types = (str, bytes) # noqa
class SocketShell(object):
"""
A class to connect to a process's thread.
Adds a line buffer for the socket and passes received messages to ``on_receive``.
Messages by the user are passed to ``on_stdin``.
:param client: :class:`tcprocd.client.Client` - The client to use for the connection.
"""
def __init__(self, client):
"""Initialize shell."""
self.client = client
self.sockets = [sys.stdin, client.socket]
self._do_stop = False
def on_stdin_ready(self):
"""Called when some input is ready."""
line = sys.stdin.readline().strip()
if line == 'exit':
return True
self.client.protocol.sendline(line + '\n')
def on_socket_ready(self):
"""Called when receiving some process output."""
line = self.client.protocol.readline()
if line == 'exit':
return True
sys.stdout.write(line + '\n')
sys.stdout.flush()
def run(self):
"""Start waiting for input/output."""
try:
while not self._do_stop:
ready = select.select(self.sockets, [], [])[0]
for s in ready:
if s == self.client.socket: # message by server
if self.on_socket_ready():
self._do_stop = True
else: # message by user
if self.on_stdin_ready():
self._do_stop = True
finally:
self.client.socket.close()
class AuthenticationError(Exception):
"""Exception raised when authentication fails."""
pass
class ServerError(Exception):
"""Exception raised when the server answers with an error."""
pass
class Client(object):
"""
A class to connect to a tcprocd server.
:param server_address: tuple of host and port or the path to the socket file
"""
def __init__(self, server_address):
"""Initialize client."""
self.server_address = server_address
if isinstance(server_address, str_types):
self.is_unix_domain = True
sock_type = socket.AF_UNIX
else:
self.is_unix_domain = False
sock_type = socket.AF_INET
self.socket = socket.socket(sock_type, socket.SOCK_STREAM)
self.protocol = Protocol(self.socket)
self.server_version = None
self.attached_to = None
def connect(self, username=None, password=None, username_callback=None, password_callback=None):
"""Connect to the server."""
if self.is_unix_domain:
try:
SO_PASSCRED = socket.SO_PASSCRED
except AttributeError:
SO_PASSCRED = 16
self.socket.setsockopt(socket.SOL_SOCKET, SO_PASSCRED, 1)
self.socket.connect(self.server_address)
self.server_version = self.protocol.recv_part(3)
answer = self.protocol.recv_part(2)
# TCP connections always require username and password.
# A unix domain socket does not accept an username and
# only requires a password if the connecting user has one.
# TODO: return 'authentication required' and let the caller authenticate on its own
if answer == self.protocol.AUTHENTICATION_REQUIRED:
if not self.is_unix_domain:
if username is None:
username = username_callback()
self.protocol.send_part(2, username)
if password is None:
password = password_callback()
self.protocol.send_part(2, password)
answer = self.protocol.recv_part(2)
if answer != self.protocol.OK:
raise AuthenticationError()
elif answer != self.protocol.OK:
raise ServerError(answer)
def close(self):
"""Close the connection."""
self.socket.close()
def __enter__(self):
"""Connect when used as context manager."""
self.connect()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Disconnect afterwards, when used as context manager."""
self.close()
def list(self):
"""List servers."""
self.protocol.send_part(2, 'list')
answer = self.protocol.recv_part(2)
data = []
if answer == self.protocol.OK:
data = self.protocol.recv_part(6).split('\n')
return answer, data
def cat(self, name, start=0):
"""Get output of given process.
:param name: :class:`str` - Name of the process.
:param start: :class:`int` - Start at this line. (Default: ``0``)
:return: :class:`str` - Multi-line output of the process.
"""
self.protocol.send_part(2, 'cat')
self.protocol.send_part(2, name)
self.protocol.send_part(1, str(start))
answer = self.protocol.recv_part(2)
data = []
if answer == self.protocol.OK:
data = self.protocol.recv_part(6).split('\n')
return answer, data
def start(self, name, command, path=''):
"""Create a new process with the given ``name`` and ``command``.
:param name: :class:`str` - Name of the process.
:param command: :class:`str` - The command to run the process.
:param path: :class:`str` - The (remote) path to execute the
command in. (Default: ``None``)
:return: :class:`str` - Status message
"""
self.protocol.send_part(2, 'start')
self.protocol.send_part(2, name)
self.protocol.send_part(3, command)
self.protocol.send_part(3, path)
return self.protocol.recv_part(2)
def kill(self, name):
"""Kill the given process.
:param name: :class:`str` - Name of the process.
:return: :class:`str` - Status message
"""
self.protocol.send_part(2, 'kill')
self.protocol.send_part(2, name)
return self.protocol.recv_part(2)
def command(self, name, command):
"""Write the given command to the given process's stdin.
.. Note: Use ``cat`` to see the process's stdout!
:param name: :class:`str` - Name of the process.
:param command: :class:`str` - The command to send to the process.
:return: :class:`str` - Status message
"""
self.protocol.send_part(2, 'command')
self.protocol.send_part(2, name)
self.protocol.send_part(3, command)
return self.protocol.recv_part(2)
def attach(self, name):
"""
Attach to the given process's shell.
:param name: :class:`str` - Name of the process.
:return: :class:`str` - Status message
"""
self.protocol.send_part(2, 'attach')
self.protocol.send_part(2, name)
return self.protocol.recv_part(2)
| knoppo/tcprocd | tcprocd/client.py | Python | mit | 7,172 |
major = 0
minor = 37
patch = 0
| vovanbo/aiohttp_json_api | punch_version.py | Python | mit | 31 |
"""
Custom management command to rebuild thumbnail images
- May be required after importing a new dataset, for example
"""
import os
import logging
from PIL import UnidentifiedImageError
from django.core.management.base import BaseCommand
from django.conf import settings
from django.db.utils import OperationalError, ProgrammingError
from company.models import Company
from part.models import Part
logger = logging.getLogger('inventree')
class Command(BaseCommand):
"""
Rebuild all thumbnail images
"""
def rebuild_thumbnail(self, model):
"""
Rebuild the thumbnail specified by the "image" field of the provided model
"""
if not model.image:
return
img = model.image
url = img.thumbnail.name
loc = os.path.join(settings.MEDIA_ROOT, url)
if not os.path.exists(loc):
logger.info(f"Generating thumbnail image for '{img}'")
try:
model.image.render_variations(replace=False)
except FileNotFoundError:
logger.error(f"ERROR: Image file '{img}' is missing")
except UnidentifiedImageError:
logger.error(f"ERROR: Image file '{img}' is not a valid image")
def handle(self, *args, **kwargs):
logger.setLevel(logging.INFO)
logger.info("Rebuilding Part thumbnails")
for part in Part.objects.exclude(image=None):
try:
self.rebuild_thumbnail(part)
except (OperationalError, ProgrammingError):
logger.error("ERROR: Database read error.")
break
logger.info("Rebuilding Company thumbnails")
for company in Company.objects.exclude(image=None):
try:
self.rebuild_thumbnail(company)
except (OperationalError, ProgrammingError):
logger.error("ERROR: abase read error.")
break
| SchrodingersGat/InvenTree | InvenTree/InvenTree/management/commands/rebuild_thumbnails.py | Python | mit | 1,950 |
#!/usr/bin/python
#
# linearize.py: Construct a linear, no-fork, best version of the blockchain.
#
#
# Copyright (c) 2013 The Umaro developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import json
import struct
import re
import base64
import httplib
import sys
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
class UmaroRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblock(self, hash, verbose=True):
return self.rpc('getblock', [hash, verbose])
def getblockhash(self, index):
return self.rpc('getblockhash', [index])
def getblock(rpc, settings, n):
hash = rpc.getblockhash(n)
hexdata = rpc.getblock(hash, False)
data = hexdata.decode('hex')
return data
def get_blocks(settings):
rpc = UmaroRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
outf = open(settings['output'], 'wb')
for height in xrange(settings['max_height']+1):
data = getblock(rpc, settings, height)
outhdr = settings['netmagic']
outhdr += struct.pack("<i", len(data))
outf.write(outhdr)
outf.write(data)
if (height % 1000) == 0:
sys.stdout.write("Wrote block " + str(height) + "\n")
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: linearize.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'netmagic' not in settings:
settings['netmagic'] = 'f9beb4d9'
if 'output' not in settings:
settings['output'] = 'bootstrap.dat'
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 8332
if 'max_height' not in settings:
settings['max_height'] = 250000
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['netmagic'] = settings['netmagic'].decode('hex')
settings['port'] = int(settings['port'])
settings['max_height'] = int(settings['max_height'])
get_blocks(settings)
| randayh/umaro | contrib/misc/linearize.py | Python | mit | 3,196 |
# This package will contain the spiders of your Scrapy project
#
# Please refer to the documentation for information on how to create and manage
# your spiders.
import json
from urllib import urlencode
from scrapy import log
from scrapy.http import Request
from scrapy.selector import Selector
from scrapy.contrib.loader import ItemLoader
from scrapy.contrib.loader.processor import Identity, TakeFirst
from torabot.spy.spiders.redis import RedisSpider
from torabot.spy.items import Result
from ..items import (
Bangumi,
User,
Post,
SearchResult,
SearchResultPost,
Recommendation,
QueryResult,
)
class Bilibili(RedisSpider):
name = 'bilibili'
def __init__(self, life=60, *args, **kargs):
super(Bilibili, self).__init__(*args, life=life, **kargs)
def make_requests_from_query(self, query):
query = json.loads(query)
for req in {
'bangumi': self.make_bangumi_requests,
'user': self.make_user_requests,
'username': self.make_username_requests,
'query': self.make_query_requests,
}[query['method']](query):
yield req
def make_username_requests(self, query):
yield Request(
make_username_search_uri(query['username']),
callback=self.parse_username_prepare,
meta=dict(query=query),
dont_filter=True,
)
def make_query_requests(self, query):
yield Request(
make_query_uri(query['query']),
callback=self.parse_query,
meta=dict(query=query),
dont_filter=True,
)
def make_bangumi_requests(self, query):
yield Request(
'http://www.bilibili.tv/index/bangumi.json',
callback=self.parse_bangumi,
meta=dict(query=query),
dont_filter=True,
)
def make_user_requests(self, query):
yield Request(
'http://space.bilibili.tv/' + query['user_id'],
callback=self.parse_user,
meta=dict(query=query),
dont_filter=True,
)
def parse_bangumi(self, response):
query = response.meta['query']
try:
return Bangumi(
query=query,
content=json.loads(response.body_as_unicode())
)
except:
log.msg('parse failed', level=log.ERROR)
return Result(ok=False, query=query)
def parse_user(self, response):
query = response.meta['query']
try:
sel = Selector(response)
return User(
user_uri=response.url,
query=query,
posts=[make_post(sub) for sub in sel.xpath('//div[@class="main_list"]/ul/li')]
)
except Exception as e:
return failed(query, str(e))
def parse_query(self, response):
query = response.meta['query']
try:
sel = Selector(response)
return QueryResult(
uri=response.url,
query=query,
posts=[make_search_post(sub) for sub in sel.xpath('//ul[@class="result"]/li')]
)
except Exception as e:
return failed(query, str(e))
def parse_username_prepare(self, response):
query = response.meta['query']
try:
sel = Selector(response)
posts = []
for li in sel.xpath('//ul[@class="result"]/li'):
post = make_search_post(li)
if query['username'] == post['upper']:
return Request(
post['user_uri'],
callback=self.parse_user,
meta=dict(query=query),
dont_filter=True,
)
posts.append(post)
return SearchResult(
query=query,
posts=[],
recommendations=make_recommendations(posts),
)
except Exception as e:
return failed(query, str(e))
def make_recommendations(posts):
def gen():
names = {}
for p in posts:
r = make_recommendation(p)
if r['username'] not in names:
yield r
names[r['username']] = 1
return list(gen())
def make_recommendation(post):
return Recommendation(
user_uri=post['user_uri'],
username=post['upper'],
)
def failed(query, message):
log.msg('parse failed: %s' % message, level=log.ERROR)
return Result(ok=False, query=query, message=message)
class SearchResultPostLoader(ItemLoader):
default_item_class = SearchResultPost
default_input_processor = Identity()
default_output_processor = TakeFirst()
def date_in(self, values):
for s in values:
yield s.strip()
def make_search_post(sel):
loader = SearchResultPostLoader(selector=sel)
loader.add_xpath('title', 'string(.//div[@class="t"])')
loader.add_xpath('upper', 'string(.//a[@class="upper"])')
loader.add_xpath('kind', 'string(.//div[@class="t"]/span)')
loader.add_xpath('date', 'string(.//i[@class="date"])')
loader.add_xpath('intro', 'string(.//i[@class="intro"])')
# mylist don't have title a, use first a instead
# loader.add_xpath('uri', './/a[@class="title"]/@href')
loader.add_xpath('uri', './/a/@href')
loader.add_xpath('user_uri', './/a[@class="upper"]/@href')
loader.add_xpath('cover', './/a[@class="title"]//img/@src')
post = loader.load_item()
if post.get('title', '') and post['title'].startswith(post.get('kind', '')):
post['title'] = post['title'][len(post.get('kind', '')):]
return post
class PostLoader(ItemLoader):
default_item_class = Post
default_input_processor = Identity()
default_output_processor = TakeFirst()
def ctime_in(self, values):
for s in values:
yield s[5:]
def make_post(sel):
loader = PostLoader(selector=sel)
loader.add_xpath('title', 'string(.//a[@class="title"])')
loader.add_xpath('uri', './/a[@class="title"]/@href')
loader.add_xpath('cover', './/img/@src')
loader.add_xpath('kind', 'string(.//a[@class="l"])')
loader.add_xpath('ctime', 'string(.//div[@class="c"])')
loader.add_xpath('desc', 'string(.//div[@class="q"])')
return loader.load_item()
def make_username_search_uri(username):
return make_query_uri(u'@author %s' % username)
def make_query_uri(query):
return 'http://www.bilibili.tv/search?' + urlencode({
'keyword': query.encode('utf-8'),
'orderby': 'senddate',
})
| Answeror/torabot | torabot/mods/bilibili/spy/bilibili/spiders/__init__.py | Python | mit | 6,671 |
from django.contrib import admin
from books.models import *
# Register your models here.
# Register your models here.
class BookInfoAdmin(admin.ModelAdmin):
list_display=('title','isbn','price')
class GoodInfoAdmin(admin.ModelAdmin):
list_display=('seller','book','price')
admin.site.register(UserInfo)
admin.site.register(BookInfo,BookInfoAdmin)
admin.site.register(GoodsInfo,GoodInfoAdmin)
| zearom32/SmartBooks | books/admin.py | Python | mit | 402 |
#!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPCs related to blockchainstate.
Test the following RPCs:
- getblockchaininfo
- gettxoutsetinfo
- getdifficulty
- getbestblockhash
- getblockhash
- getblockheader
- getchaintxstats
- getnetworkhashps
- verifychain
Tests correspond to code in rpc/blockchain.cpp.
"""
from decimal import Decimal
import http.client
import subprocess
from test_framework.test_framework import SyscoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_greater_than_or_equal,
assert_raises,
assert_raises_rpc_error,
assert_is_hex_string,
assert_is_hash_string,
)
from test_framework.blocktools import (
create_block,
create_coinbase,
TIME_GENESIS_BLOCK,
)
from test_framework.messages import (
msg_block,
)
from test_framework.mininode import (
P2PInterface,
)
class BlockchainTest(SyscoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.supports_cli = False
def run_test(self):
self.mine_chain()
self.restart_node(0, extra_args=['-stopatheight=207', '-prune=1']) # Set extra args with pruning after rescan is complete
self._test_getblockchaininfo()
self._test_getchaintxstats()
self._test_gettxoutsetinfo()
self._test_getblockheader()
self._test_getdifficulty()
self._test_getnetworkhashps()
self._test_stopatheight()
self._test_waitforblockheight()
assert self.nodes[0].verifychain(4, 0)
def mine_chain(self):
self.log.info('Create some old blocks')
address = self.nodes[0].get_deterministic_priv_key().address
for t in range(TIME_GENESIS_BLOCK, TIME_GENESIS_BLOCK + 200 * 600, 600):
# ten-minute steps from genesis block time
self.nodes[0].setmocktime(t)
self.nodes[0].generatetoaddress(1, address)
assert_equal(self.nodes[0].getblockchaininfo()['blocks'], 200)
def _test_getblockchaininfo(self):
self.log.info("Test getblockchaininfo")
keys = [
'bestblockhash',
'blocks',
'chain',
'chainwork',
'difficulty',
'headers',
'initialblockdownload',
'mediantime',
'pruned',
'size_on_disk',
'softforks',
'verificationprogress',
'warnings',
]
res = self.nodes[0].getblockchaininfo()
# result should have these additional pruning keys if manual pruning is enabled
assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning'] + keys))
# size_on_disk should be > 0
assert_greater_than(res['size_on_disk'], 0)
# pruneheight should be greater or equal to 0
assert_greater_than_or_equal(res['pruneheight'], 0)
# check other pruning fields given that prune=1
assert res['pruned']
assert not res['automatic_pruning']
self.restart_node(0, ['-stopatheight=207'])
res = self.nodes[0].getblockchaininfo()
# should have exact keys
assert_equal(sorted(res.keys()), keys)
self.restart_node(0, ['-stopatheight=207', '-prune=550'])
res = self.nodes[0].getblockchaininfo()
# result should have these additional pruning keys if prune=550
assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning', 'prune_target_size'] + keys))
# check related fields
assert res['pruned']
assert_equal(res['pruneheight'], 0)
assert res['automatic_pruning']
assert_equal(res['prune_target_size'], 576716800)
assert_greater_than(res['size_on_disk'], 0)
assert_equal(res['softforks'], {
'bip34': {'type': 'buried', 'active': False, 'height': 500},
'bip66': {'type': 'buried', 'active': False, 'height': 1251},
'bip65': {'type': 'buried', 'active': False, 'height': 1351},
'csv': {'type': 'buried', 'active': False, 'height': 432},
'segwit': {'type': 'buried', 'active': True, 'height': 0},
'testdummy': {
'type': 'bip9',
'bip9': {
'status': 'started',
'bit': 28,
'start_time': 0,
'timeout': 0x7fffffffffffffff, # testdummy does not have a timeout so is set to the max int64 value
'since': 144,
'statistics': {
'period': 144,
'threshold': 108,
'elapsed': 57,
'count': 57,
'possible': True,
},
},
'active': False}
})
def _test_getchaintxstats(self):
self.log.info("Test getchaintxstats")
# Test `getchaintxstats` invalid extra parameters
assert_raises_rpc_error(-1, 'getchaintxstats', self.nodes[0].getchaintxstats, 0, '', 0)
# Test `getchaintxstats` invalid `nblocks`
assert_raises_rpc_error(-1, "JSON value is not an integer as expected", self.nodes[0].getchaintxstats, '')
assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, -1)
assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, self.nodes[0].getblockcount())
# Test `getchaintxstats` invalid `blockhash`
assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getchaintxstats, blockhash=0)
assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 1, for '0')", self.nodes[0].getchaintxstats, blockhash='0')
assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getchaintxstats, blockhash='ZZZ0000000000000000000000000000000000000000000000000000000000000')
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getchaintxstats, blockhash='0000000000000000000000000000000000000000000000000000000000000000')
blockhash = self.nodes[0].getblockhash(200)
self.nodes[0].invalidateblock(blockhash)
assert_raises_rpc_error(-8, "Block is not in main chain", self.nodes[0].getchaintxstats, blockhash=blockhash)
self.nodes[0].reconsiderblock(blockhash)
chaintxstats = self.nodes[0].getchaintxstats(nblocks=1)
# 200 txs plus genesis tx
assert_equal(chaintxstats['txcount'], 201)
# tx rate should be 1 per 10 minutes, or 1/600
# we have to round because of binary math
assert_equal(round(chaintxstats['txrate'] * 600, 10), Decimal(1))
b1_hash = self.nodes[0].getblockhash(1)
b1 = self.nodes[0].getblock(b1_hash)
b200_hash = self.nodes[0].getblockhash(200)
b200 = self.nodes[0].getblock(b200_hash)
time_diff = b200['mediantime'] - b1['mediantime']
chaintxstats = self.nodes[0].getchaintxstats()
assert_equal(chaintxstats['time'], b200['time'])
assert_equal(chaintxstats['txcount'], 201)
assert_equal(chaintxstats['window_final_block_hash'], b200_hash)
assert_equal(chaintxstats['window_final_block_height'], 200)
assert_equal(chaintxstats['window_block_count'], 199)
assert_equal(chaintxstats['window_tx_count'], 199)
assert_equal(chaintxstats['window_interval'], time_diff)
assert_equal(round(chaintxstats['txrate'] * time_diff, 10), Decimal(199))
chaintxstats = self.nodes[0].getchaintxstats(blockhash=b1_hash)
assert_equal(chaintxstats['time'], b1['time'])
assert_equal(chaintxstats['txcount'], 2)
assert_equal(chaintxstats['window_final_block_hash'], b1_hash)
assert_equal(chaintxstats['window_final_block_height'], 1)
assert_equal(chaintxstats['window_block_count'], 0)
assert 'window_tx_count' not in chaintxstats
assert 'window_interval' not in chaintxstats
assert 'txrate' not in chaintxstats
def _test_gettxoutsetinfo(self):
node = self.nodes[0]
res = node.gettxoutsetinfo()
assert_equal(res['total_amount'], Decimal('8725.00000000'))
assert_equal(res['transactions'], 200)
assert_equal(res['height'], 200)
assert_equal(res['txouts'], 200)
assert_equal(res['bogosize'], 15000),
assert_equal(res['bestblock'], node.getblockhash(200))
size = res['disk_size']
assert size > 6400
assert size < 64000
assert_equal(len(res['bestblock']), 64)
assert_equal(len(res['hash_serialized_2']), 64)
self.log.info("Test that gettxoutsetinfo() works for blockchain with just the genesis block")
b1hash = node.getblockhash(1)
node.invalidateblock(b1hash)
res2 = node.gettxoutsetinfo()
assert_equal(res2['transactions'], 0)
assert_equal(res2['total_amount'], Decimal('0'))
assert_equal(res2['height'], 0)
assert_equal(res2['txouts'], 0)
assert_equal(res2['bogosize'], 0),
assert_equal(res2['bestblock'], node.getblockhash(0))
assert_equal(len(res2['hash_serialized_2']), 64)
self.log.info("Test that gettxoutsetinfo() returns the same result after invalidate/reconsider block")
node.reconsiderblock(b1hash)
res3 = node.gettxoutsetinfo()
# The field 'disk_size' is non-deterministic and can thus not be
# compared between res and res3. Everything else should be the same.
del res['disk_size'], res3['disk_size']
assert_equal(res, res3)
def _test_getblockheader(self):
node = self.nodes[0]
assert_raises_rpc_error(-8, "hash must be of length 64 (not 8, for 'nonsense')", node.getblockheader, "nonsense")
assert_raises_rpc_error(-8, "hash must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", node.getblockheader, "ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844")
assert_raises_rpc_error(-5, "Block not found", node.getblockheader, "0cf7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844")
besthash = node.getbestblockhash()
secondbesthash = node.getblockhash(199)
header = node.getblockheader(blockhash=besthash)
assert_equal(header['hash'], besthash)
assert_equal(header['height'], 200)
assert_equal(header['confirmations'], 1)
assert_equal(header['previousblockhash'], secondbesthash)
assert_is_hex_string(header['chainwork'])
assert_equal(header['nTx'], 1)
assert_is_hash_string(header['hash'])
assert_is_hash_string(header['previousblockhash'])
assert_is_hash_string(header['merkleroot'])
assert_is_hash_string(header['bits'], length=None)
assert isinstance(header['time'], int)
assert isinstance(header['mediantime'], int)
assert isinstance(header['nonce'], int)
assert isinstance(header['version'], int)
assert isinstance(int(header['versionHex'], 16), int)
assert isinstance(header['difficulty'], Decimal)
def _test_getdifficulty(self):
difficulty = self.nodes[0].getdifficulty()
# 1 hash in 2 should be valid, so difficulty should be 1/2**31
# binary => decimal => binary math is why we do this check
assert abs(difficulty * 2**31 - 1) < 0.0001
def _test_getnetworkhashps(self):
hashes_per_second = self.nodes[0].getnetworkhashps()
# This should be 2 hashes every 10 minutes or 1/300
assert abs(hashes_per_second * 300 - 1) < 0.0001
def _test_stopatheight(self):
assert_equal(self.nodes[0].getblockcount(), 200)
self.nodes[0].generatetoaddress(6, self.nodes[0].get_deterministic_priv_key().address)
assert_equal(self.nodes[0].getblockcount(), 206)
self.log.debug('Node should not stop at this height')
assert_raises(subprocess.TimeoutExpired, lambda: self.nodes[0].process.wait(timeout=3))
try:
self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)
except (ConnectionError, http.client.BadStatusLine):
pass # The node already shut down before response
self.log.debug('Node should stop at this height...')
self.nodes[0].wait_until_stopped()
self.start_node(0)
assert_equal(self.nodes[0].getblockcount(), 207)
def _test_waitforblockheight(self):
self.log.info("Test waitforblockheight")
node = self.nodes[0]
node.add_p2p_connection(P2PInterface())
current_height = node.getblock(node.getbestblockhash())['height']
# Create a fork somewhere below our current height, invalidate the tip
# of that fork, and then ensure that waitforblockheight still
# works as expected.
#
# (Previously this was broken based on setting
# `rpc/blockchain.cpp:latestblock` incorrectly.)
#
b20hash = node.getblockhash(20)
b20 = node.getblock(b20hash)
def solve_and_send_block(prevhash, height, time):
b = create_block(prevhash, create_coinbase(height), time)
b.solve()
node.p2p.send_message(msg_block(b))
node.p2p.sync_with_ping()
return b
b21f = solve_and_send_block(int(b20hash, 16), 21, b20['time'] + 1)
b22f = solve_and_send_block(b21f.sha256, 22, b21f.nTime + 1)
node.invalidateblock(b22f.hash)
def assert_waitforheight(height, timeout=2):
assert_equal(
node.waitforblockheight(height=height, timeout=timeout)['height'],
current_height)
assert_waitforheight(0)
assert_waitforheight(current_height - 1)
assert_waitforheight(current_height)
assert_waitforheight(current_height + 1)
if __name__ == '__main__':
BlockchainTest().main()
| syscoin/syscoin2 | test/functional/rpc_blockchain.py | Python | mit | 14,422 |
# Resources
from lob.resource import (
Address,
BankAccount,
BillingGroup,
BulkUSVerification,
BulkIntlVerification,
Card,
CardOrder,
Check,
Letter,
Postcard,
SelfMailer,
USAutocompletion,
USReverseGeocodeLookup,
USVerification,
USZipLookup,
IntlVerification
)
from lob.version import VERSION
api_key = None
api_base = 'https://api.lob.com/v1'
| lob/lob-python | lob/__init__.py | Python | mit | 410 |
import datetime
from xml.etree import ElementTree
try:
from urllib.parse import urlparse, parse_qs
except ImportError:
# Python 2
from urlparse import urlparse, parse_qs
import html5lib
import requests
from .models import (
Category, SearchSortKey, SearchOrderKey, SearchResultPage, TorrentPage,
Torrent, TorrentStub, User
)
TORRENT_NOT_FOUND_TEXT = \
u'The torrent you are looking for does not appear to be in the database.'
class TorrentNotFoundError(Exception):
pass
class NyaaClient(object):
"""The Nyaa client.
Provides configuration and methods to access Nyaa.
"""
def __init__(self, url='http://www.nyaa.se'):
"""Initialize the :class:`NyaaClient`.
:param url: the base URL of the Nyaa or Nyaa-like torrent tracker
"""
self.base_url = url
def _get_page_content(self, response):
"""Given a :class:`requests.Response`, return the
:class:`xml.etree.Element` of the content `div`.
:param response: a :class:`requests.Response` to parse
:returns: the :class:`Element` of the first content `div` or `None`
"""
document = html5lib.parse(
response.content,
encoding=response.encoding,
treebuilder='etree',
namespaceHTMLElements=False
)
# etree doesn't fully support XPath, so we can't just search
# the attribute values for "content"
divs = document.findall(
".//body//div[@class]")
content_div = None
for div in divs:
if "content" in div.attrib['class'].split(' '):
content_div = div
break
# The `Element` object is False-y when there are no subelements,
# so compare to `None`
if content_div is None:
return None
return content_div
def view_torrent(self, torrent_id):
"""Retrieves and parses the torrent page for a given `torrent_id`.
:param torrent_id: the ID of the torrent to view
:raises TorrentNotFoundError: if the torrent does not exist
:returns: a :class:`TorrentPage` with a snapshot view of the torrent
detail page
"""
params = {
'page': 'view',
'tid': torrent_id,
}
r = requests.get(self.base_url, params=params)
content = self._get_page_content(r)
# Check if the content div has any child elements
if not len(content):
# The "torrent not found" text in the page has some unicode junk
# that we can safely ignore.
text = str(content.text.encode('ascii', 'ignore'))
if TORRENT_NOT_FOUND_TEXT in text:
raise TorrentNotFoundError(TORRENT_NOT_FOUND_TEXT)
cell_td_elems = content.findall('.//td')
name = cell_td_elems[3].text
category_href = content\
.findall(".//td[@class='viewcategory']/a[2]")[0]\
.attrib['href']
category_value = category_href.split('cats=')[1]
category = Category.lookup_category(category_value)
# parse the submitter details
submitter_a_elem = cell_td_elems[7].findall('a')[0]
submitter_id = submitter_a_elem.attrib['href'].split('=')[1]
submitter_name = submitter_a_elem.findall('span')[0].text
submitter = User(submitter_id, submitter_name)
tracker = cell_td_elems[11].text
date_created = datetime.datetime.strptime(
cell_td_elems[5].text, '%Y-%m-%d, %H:%M %Z')
seeders = int(content.findall(".//span[@class='viewsn']")[0].text)
leechers = int(content.findall(".//span[@class='viewln']")[0].text)
downloads = int(content.findall(".//span[@class='viewdn']")[0].text)
file_size = cell_td_elems[21].text
# note that the tree returned by html5lib might not exactly match the
# original contents of the description div
description = ElementTree.tostring(
content.findall(".//div[@class='viewdescription']")[0],
encoding='utf8', method='html')
return TorrentPage(
torrent_id, name, submitter, category, tracker, date_created,
seeders, leechers, downloads, file_size, description)
def get_torrent(self, torrent_id):
"""Gets the `.torrent` data for the given `torrent_id`.
:param torrent_id: the ID of the torrent to download
:raises TorrentNotFoundError: if the torrent does not exist
:returns: :class:`Torrent` of the associated torrent
"""
params = {
'page': 'download',
'tid': torrent_id,
}
r = requests.get(self.base_url, params=params)
if r.headers.get('content-type') != 'application/x-bittorrent':
raise TorrentNotFoundError(TORRENT_NOT_FOUND_TEXT)
torrent_data = r.content
return Torrent(torrent_id, torrent_data)
def search(self, terms, category=Category.all_categories, page=1,
sort_key=SearchSortKey.date,
order_key=SearchOrderKey.descending):
"""Get a list of torrents that match the given search term
:param terms: the `str` needle
:param category: the desired :class:`Category` of the results
:param page: the 1-based page to return the result
:param sort_key: the :class:`SearchSortKey` of the results list
:param order_key: the :class:`SearchOrderkey` of the results list
:return: a :class:`SearchPage` of results
"""
params = {
'page': 'search',
'term': terms,
'cats': category.value,
'sort': sort_key.value,
'order': order_key.value,
}
r = requests.get(self.base_url, params=params)
content = self._get_page_content(r)
# first, get the total number of pages returned. this findall returns
# two results (one from the top half of the page, one from the bottom),
# so only take the first element.
a_to_last_page = content.findall('.//div[@class="rightpages"]/a[2]')
if not a_to_last_page:
total_pages = 1
else:
last_page_url = a_to_last_page[0].attrib['href']
offset = extract_url_query_parameter(last_page_url, "offset")[0]
total_pages = int(offset)
torrent_stubs = []
rows = (x for x in content.findall('.//table//tr')
if 'tlistrow' in x.attrib.get('class', ''))
for row in rows:
cell_td_elems = row.findall('td')
category_value = extract_url_query_parameter(
cell_td_elems[0].find('a').attrib['href'],
'cats')[0]
category = Category.lookup_category(category_value)
torrent_id = extract_url_query_parameter(
cell_td_elems[1].find('a').attrib['href'],
"tid")[0]
name = cell_td_elems[1].find('a').text
file_size = cell_td_elems[3].text
if cell_td_elems[4].text.isdigit():
seeders = int(cell_td_elems[4].text)
else:
seeders = None
if cell_td_elems[5].text.isdigit():
leechers = int(cell_td_elems[5].text)
else:
leechers = None
downloads = int(cell_td_elems[6].text)
stub = TorrentStub(torrent_id, name, category, seeders, leechers,
file_size, downloads)
torrent_stubs.append(stub)
return SearchResultPage(
terms, category, sort_key, order_key, page, total_pages,
torrent_stubs)
def extract_url_query_parameter(url, parameter):
"""Given a URL (ex: "http://www.test.com/path?query=3") and a parameter
(ex: "query"), return the value as a list
:param url: a `str` URL
:param parameter: the URL query we went to extract
:return: a `list` of values for the given query name in the given URL or
an empty string if the query is not in the URL
"""
query_string = urlparse(url).query
return parse_qs(query_string).get(parameter, [])
| kennydo/nyaalib | nyaalib/__init__.py | Python | mit | 8,198 |
import csv
def model_name(table_name):
if table_name in [
'vtm',
'vpi',
'vmp',
'vmpp',
'amp',
'ampp',
'gtin',
]:
return table_name.upper()
else:
return ''.join(tok.title() for tok in table_name.split('_'))
def quote(s):
assert '"' not in s
return '"' + s + '"'
with open('schema.csv') as f:
lines = list(csv.DictReader(f))
print('from django.db import models')
table = None
for line in lines:
if line['table'] == 'ccontent':
continue
if line['table'] != table:
table = line['table']
print()
print()
print(f'class {model_name(table)}(models.Model):')
print('# class Meta:')
print('# verbose_name = "TODO"')
print()
if line['type'] == 'retired':
continue
options = []
if line['primary_key'] == 'True':
options.append(('primary_key', 'True'))
if line['db_column']:
options.append(('db_column', quote(line['db_column'])))
if line['type'] in ['ForeignKey', 'OneToOneField']:
options.append(('to', quote(model_name(line['to']))))
options.append(('on_delete', 'models.CASCADE'))
if 'prevcd' in line['db_column'] or 'uomcd' in line['db_column']:
options.append(('related_name', quote('+')))
elif line['type'] == 'CharField':
options.append(('max_length', line['max_length']))
elif line['type'] == 'DecimalField':
options.append(('max_digits', line['max_digits']))
options.append(('decimal_places', line['decimal_places']))
if line['optional'] == 'Y':
if line['type'] != 'BooleanField' and line['primary_key'] != 'True':
options.append(('null', 'True'))
options.append(('help_text', quote(line['descr'])))
print(f' {line["field"]} = models.{line["type"]}(')
for k, v in options:
print(f' {k}={v},')
print(' )')
| annapowellsmith/openpresc | openprescribing/dmd2/gen_models/gen_models.py | Python | mit | 1,981 |
#!/usr/bin/env python
import csv
import json
import requests
def main():
# We'll use a local version of this file from now on to save on
# bandwith.
with open('bills.json', 'r') as f:
data = json.load(f)
objects = data['objects']
# Create a csv file to output
with open('bills.csv', 'w') as o:
# Create a csv writer. This will help us format the file
# correctly.
writer = csv.writer(o)
# Write out the header row
writer.writerow([
u'title',
u'label',
u'number',
u'current_status'
])
# Iterate through each dict in the array `objects`
for bill in objects:
writer.writerow([
bill['title_without_number'].encode('utf-8'),
bill['bill_type_label'].encode('utf-8'),
bill['number'],
bill['current_status'].encode('utf-8')
])
if __name__ == '__main__':
main()
| tommeagher/pycar14 | project4/step_3_complete.py | Python | mit | 1,081 |
# box
# Copyright 2013-2014 Dipen Patel
# See LICENSE for details.
STATUSCODES = {
200 : "success",
201 : "created",
202 : "accepted",
204 : "no_content",
302 : "redirect",
304 : "not_modified",
400 : "bad_request",
401 : "unauthorized",
403 : "forbidden",
404 : "not_found",
405 : "method_not_allowed",
409 : "conflict",
412 : "precondition_failed",
429 : "too_many_requests",
500 : "internal_server_error",
507 : "insufficient_storage"
}
ERRORCODES = (204,400,401,403,404,405,409,412,429,500,507)
class BoxError(Exception):
"""Box exception"""
def __init__(self, reason, response=None):
self.reason = unicode(reason)
self.response = response
Exception.__init__(self, reason)
def __str__(self):
return self.reason
| dipen30/boxapi | box/error.py | Python | mit | 982 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import unittest
from pyowm.airpollutionapi30 import airpollution_client, airpollution_manager, coindex, so2index, ozone, no2index, airstatus
from pyowm.config import DEFAULT_CONFIG
from pyowm.constants import AIRPOLLUTION_API_VERSION
from pyowm.utils import timestamps
from tests.unit.airpollutionapi30.test_ozone import OZONE_JSON
from tests.unit.airpollutionapi30.test_coindex import COINDEX_JSON
from tests.unit.airpollutionapi30.test_no2index import NO2INDEX_JSON
from tests.unit.airpollutionapi30.test_so2index import SO2INDEX_JSON
from tests.unit.airpollutionapi30.test_airstatus import AIRSTATUS_JSON, AIRSTATUS_MULTIPLE_JSON
class TestAirPollutionManager(unittest.TestCase):
__test_instance = airpollution_manager.AirPollutionManager('fakeapikey', DEFAULT_CONFIG)
def mock_get_coi_returning_coindex_around_coords(self, params_dict):
return json.loads(COINDEX_JSON)
def mock_get_o3_returning_ozone_around_coords(self, params_dict):
return json.loads(OZONE_JSON)
def mock_get_no2_returning_no2index_around_coords(self, params_dict):
return json.loads(NO2INDEX_JSON)
def mock_get_air_pollution(self, params_dict):
return json.loads(AIRSTATUS_JSON)
def mock_get_forecast_air_pollution(self, params_dict):
return json.loads(AIRSTATUS_MULTIPLE_JSON)
def mock_get_historical_air_pollution(self, params_dict):
return json.loads(AIRSTATUS_MULTIPLE_JSON)
def mock_get_so2_returning_so2index_around_coords(self, params_dict):
return json.loads(SO2INDEX_JSON)
def test_instantiation_with_wrong_params(self):
self.assertRaises(AssertionError, airpollution_manager.AirPollutionManager, None, dict())
self.assertRaises(AssertionError, airpollution_manager.AirPollutionManager, 'apikey', None)
def test_get_uvindex_api_version(self):
result = self.__test_instance.airpollution_api_version()
self.assertIsInstance(result, tuple)
self.assertEqual(result, AIRPOLLUTION_API_VERSION)
def test_coindex_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_coi
airpollution_client.AirPollutionHttpClient.get_coi = \
self.mock_get_coi_returning_coindex_around_coords
result = self.__test_instance.coindex_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.coi = ref_to_original
self.assertTrue(isinstance(result, coindex.COIndex))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.co_samples)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_coi
airpollution_client.AirPollutionHttpClient.get_coi = \
self.mock_get_coi_returning_coindex_around_coords
result = self.__test_instance.coindex_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.coi = ref_to_original
self.assertTrue(isinstance(result, coindex.COIndex))
self.assertEqual('year', result.interval)
def test_coindex_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, 200, 2.5)
def test_ozone_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_o3
airpollution_client.AirPollutionHttpClient.get_o3 = \
self.mock_get_o3_returning_ozone_around_coords
result = self.__test_instance.ozone_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.o3 = ref_to_original
self.assertTrue(isinstance(result, ozone.Ozone))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.du_value)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_o3
airpollution_client.AirPollutionHttpClient.get_o3 = \
self.mock_get_o3_returning_ozone_around_coords
result = self.__test_instance.ozone_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.o3 = ref_to_original
self.assertTrue(isinstance(result, ozone.Ozone))
self.assertEqual('year', result.interval)
def test_ozone_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, 200, 2.5)
def test_no2index_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_no2
airpollution_client.AirPollutionHttpClient.get_no2 = \
self.mock_get_no2_returning_no2index_around_coords
result = self.__test_instance.no2index_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.get_no2 = ref_to_original
self.assertTrue(isinstance(result, no2index.NO2Index))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.no2_samples)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_no2
airpollution_client.AirPollutionHttpClient.get_no2 = \
self.mock_get_no2_returning_no2index_around_coords
result = self.__test_instance.no2index_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.get_no2 = ref_to_original
self.assertTrue(isinstance(result, no2index.NO2Index))
self.assertEqual('year', result.interval)
def test_no2index_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, 200, 2.5)
def test_so2index_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_so2
airpollution_client.AirPollutionHttpClient.get_so2 = \
self.mock_get_so2_returning_so2index_around_coords
result = self.__test_instance.so2index_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.get_so2 = ref_to_original
self.assertTrue(isinstance(result, so2index.SO2Index))
self.assertIsNotNone(result.reference_time())
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.so2_samples)
self.assertIsNotNone(result.interval)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_so2
airpollution_client.AirPollutionHttpClient.get_so2 = \
self.mock_get_so2_returning_so2index_around_coords
result = self.__test_instance.so2index_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.get_so2 = ref_to_original
self.assertTrue(isinstance(result, so2index.SO2Index))
self.assertEqual('year', result.interval)
def test_so2index_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.so2index_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.so2index_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.so2index_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.so2index_around_coords, \
self.__test_instance, 200, 2.5)
def test_air_quality_at_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_air_pollution
airpollution_client.AirPollutionHttpClient.get_air_pollution = \
self.mock_get_air_pollution
result = self.__test_instance.air_quality_at_coords(45, 9)
airpollution_client.AirPollutionHttpClient.get_air_pollution = ref_to_original
self.assertTrue(isinstance(result, airstatus.AirStatus))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.air_quality_data)
def test_air_quality_at_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_at_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_at_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_at_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_at_coords, \
self.__test_instance, 200, 2.5)
def test_air_quality_forecast_at_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_forecast_air_pollution
airpollution_client.AirPollutionHttpClient.get_forecast_air_pollution = \
self.mock_get_forecast_air_pollution
result = self.__test_instance.air_quality_forecast_at_coords(45, 9)
airpollution_client.AirPollutionHttpClient.get_forecast_air_pollution = ref_to_original
self.assertTrue(isinstance(result, list))
for item in result:
self.assertIsInstance(item, airstatus.AirStatus)
self.assertIsNotNone(item.reference_time)
self.assertIsNotNone(item.reception_time())
loc = item.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(item.air_quality_data)
def test_air_quality_forecast_at_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_forecast_at_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_forecast_at_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_forecast_at_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_forecast_at_coords, \
self.__test_instance, 200, 2.5)
def test_air_quality_history_at_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_historical_air_pollution
airpollution_client.AirPollutionHttpClient.get_historical_air_pollution = \
self.mock_get_historical_air_pollution
result = self.__test_instance.air_quality_history_at_coords(45, 9, 12345678)
airpollution_client.AirPollutionHttpClient.get_historical_air_pollution = ref_to_original
self.assertTrue(isinstance(result, list))
for item in result:
self.assertIsInstance(item, airstatus.AirStatus)
self.assertIsNotNone(item.reference_time)
self.assertIsNotNone(item.reception_time())
loc = item.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(item.air_quality_data)
def test_air_quality_history_at_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 43.7, -200.0, 12345678, 12349999)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 43.7, 200.0, 12345678, 12349999)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, -200, 2.5, 12345678, 12349999)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 200, 2.5, 12345678, 12349999)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 200, 2.5, 'test')
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 200, 2.5, 'test', 'test2')
def test_air_quality_history_at_coords_clips_end_param_to_current_timestamp(self):
now = timestamps.now(timeformat='unix')
end = now + 99999999999
def assert_clipped(obj, params_dict):
self.assertEqual(params_dict['end'], now)
airpollution_client.AirPollutionHttpClient.get_historical_air_pollution = assert_clipped
_ = self.__test_instance.air_quality_history_at_coords(45, 9, 12345678, end=end)
def test_repr(self):
print(self.__test_instance)
| csparpa/pyowm | tests/unit/airpollutionapi30/test_airpollution_manager.py | Python | mit | 15,769 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import clusters
blognames,words,data=clusters.readfile( './../data/feed_list.csv' )
for k in range( 1, 10 ) :
kclust = None
kclust = clusters.kcluster(data, blognames, clusters.pearson, k)
for k_id in range( len( kclust ) ):
print [blognames[r] for r in kclust[k_id]]
| oreno4645/clustering | clusters/kcluster_do.py | Python | mit | 323 |
import logging
import rlp
from utils import big_endian_to_int as idec
from utils import int_to_big_endian4 as ienc4
from utils import int_to_big_endian as ienc
from utils import recursive_int_to_big_endian
import dispatch
import sys
import signals
logger = logging.getLogger(__name__)
def lrlp_decode(data):
"always return a list"
d = rlp.decode(data)
if isinstance(d, str):
d = [d]
return d
def load_packet(packet):
return Packeter.load_packet(packet)
class Packeter(object):
"""
Translates between the network and the local data
https://github.com/ethereum/wiki/wiki/%5BEnglish%5D-Wire-Protocol
stateless!
.. note::
#. Can only be used after the `config` method is called
'''
"""
cmd_map = dict(((0x00, 'Hello'),
(0x01, 'Disconnect'),
(0x02, 'Ping'),
(0x03, 'Pong'),
(0x10, 'GetPeers'),
(0x11, 'Peers'),
(0x12, 'Transactions'),
(0x13, 'Blocks'),
(0x14, 'GetChain'),
(0x15, 'NotInChain'),
(0x16, 'GetTransactions')))
cmd_map_by_name = dict((v, k) for k, v in cmd_map.items())
disconnect_reasons_map = dict((
('Disconnect requested', 0x00),
('TCP sub-system error', 0x01),
('Bad protocol', 0x02),
('Useless peer', 0x03),
('Too many peers', 0x04),
('Already connected', 0x05),
('Wrong genesis block', 0x06),
('Incompatible network protocols', 0x07),
('Client quitting', 0x08)))
disconnect_reasons_map_by_id = \
dict((v, k) for k, v in disconnect_reasons_map.items())
SYNCHRONIZATION_TOKEN = 0x22400891
PROTOCOL_VERSION = 19
# is the node s Unique Identifier and is the 512-bit hash that serves to
# identify the node.
NETWORK_ID = 0
CLIENT_ID = 'Ethereum(py)/0.5.2/%s/Protocol:%d' % (sys.platform,
PROTOCOL_VERSION)
CAPABILITIES = 0x01 + 0x02 + 0x04 # node discovery + transaction relaying
def __init__(self):
pass
def configure(self, config):
self.config = config
self.CLIENT_ID = self.config.get('network', 'client_id') \
or self.CLIENT_ID
self.NODE_ID = self.config.get('network', 'node_id')
@classmethod
def load_packet(cls, packet):
'''
Though TCP provides a connection-oriented medium, Ethereum nodes
communicate in terms of packets. These packets are formed as a 4-byte
synchronisation token (0x22400891), a 4-byte "payload size", to be
interpreted as a big-endian integer and finally an N-byte
RLP-serialised data structure, where N is the aforementioned
"payload size". To be clear, the payload size specifies the number of
bytes in the packet ''following'' the first 8.
:return: (success, result), where result should be None when fail,
and (header, payload_len, cmd, data) when success
'''
header = idec(packet[:4])
if header != cls.SYNCHRONIZATION_TOKEN:
return False, 'check header failed, skipping message,'\
'sync token was hex: {0:x}'.format(header)
try:
payload_len = idec(packet[4:8])
except Exception as e:
return False, str(e)
if len(packet) < payload_len + 8:
return False, 'Packet is broken'
try:
payload = lrlp_decode(packet[8:8 + payload_len])
except Exception as e:
return False, str(e)
if (not len(payload)) or (idec(payload[0]) not in cls.cmd_map):
return False, 'check cmd failed'
cmd = Packeter.cmd_map.get(idec(payload[0]))
remain = packet[8 + payload_len:]
return True, (header, payload_len, cmd, payload[1:], remain)
def load_cmd(self, packet):
success, res = self.load_packet(packet)
if not success:
raise Exception(res)
_, _, cmd, data, remain = res
return cmd, data, remain
@classmethod
def dump_packet(cls, data):
"""
4-byte synchronisation token, (0x22400891),
a 4-byte "payload size", to be interpreted as a big-endian integer
an N-byte RLP-serialised data structure
"""
payload = rlp.encode(recursive_int_to_big_endian(data))
packet = ienc4(cls.SYNCHRONIZATION_TOKEN)
packet += ienc4(len(payload))
packet += payload
return packet
def dump_Hello(self):
# inconsistency here!
# spec says CAPABILITIES, LISTEN_PORT but code reverses
"""
[0x00, PROTOCOL_VERSION, NETWORK_ID, CLIENT_ID, CAPABILITIES,
LISTEN_PORT, NODE_ID]
First packet sent over the connection, and sent once by both sides.
No other messages may be sent until a Hello is received.
PROTOCOL_VERSION is one of:
0x00 for PoC-1;
0x01 for PoC-2;
0x07 for PoC-3.
0x08 sent by Ethereum(++)/v0.3.11/brew/Darwin/unknown
NETWORK_ID should be 0.
CLIENT_ID Specifies the client software identity, as a human-readable
string (e.g. "Ethereum(++)/1.0.0").
LISTEN_PORT specifies the port that the client is listening on
(on the interface that the present connection traverses).
If 0 it indicates the client is not listening.
CAPABILITIES specifies the capabilities of the client as a set of
flags; presently three bits are used:
0x01 for peers discovery,
0x02 for transaction relaying,
0x04 for block-chain querying.
NODE_ID is optional and specifies a 512-bit hash, (potentially to be
used as public key) that identifies this node.
"""
data = [self.cmd_map_by_name['Hello'],
self.PROTOCOL_VERSION,
self.NETWORK_ID,
self.CLIENT_ID,
self.config.getint('network', 'listen_port'),
self.CAPABILITIES,
self.NODE_ID
]
return self.dump_packet(data)
def dump_Ping(self):
data = [self.cmd_map_by_name['Ping']]
return self.dump_packet(data)
def dump_Pong(self):
data = [self.cmd_map_by_name['Pong']]
return self.dump_packet(data)
def dump_Disconnect(self, reason=None):
data = [self.cmd_map_by_name['Disconnect']]
if reason:
data.append(self.disconnect_reasons_map[reason])
return self.dump_packet(data)
def dump_GetPeers(self):
data = [self.cmd_map_by_name['GetPeers']]
return self.dump_packet(data)
def dump_Peers(self, peers):
'''
:param peers: a sequence of (ip, port, pid)
:return: None if no peers
'''
data = [self.cmd_map_by_name['Peers']]
for ip, port, pid in peers:
assert ip.count('.') == 3
ip = ''.join(chr(int(x)) for x in ip.split('.'))
data.append([ip, port, pid])
return self.dump_packet(data)
def dump_Transactions(self, transactions):
data = [self.cmd_map_by_name['Transactions']] + transactions
return self.dump_packet(data)
def dump_GetTransactions(self):
"""
[0x12, [nonce, receiving_address, value, ... ], ... ]
Specify (a) transaction(s) that the peer should make sure is included
on its transaction queue. The items in the list (following the first
item 0x12) are transactions in the format described in the main
Ethereum specification.
"""
data = [self.cmd_map_by_name['GetTransactions']]
return self.dump_packet(data)
def dump_Blocks(self, blocks):
blocks_as_lists = [rlp.decode(b.serialize()) for b in blocks]
# FIXME, can we have a method to append rlp encoded data
data = [self.cmd_map_by_name['Blocks']] + blocks_as_lists
return self.dump_packet(data)
def dump_GetChain(self, parent_hashes=[], count=1):
"""
[0x14, Parent1, Parent2, ..., ParentN, Count]
Request the peer to send Count (to be interpreted as an integer) blocks
in the current canonical block chain that are children of Parent1
(to be interpreted as a SHA3 block hash). If Parent1 is not present in
the block chain, it should instead act as if the request were for
Parent2 &c. through to ParentN. If the designated parent is the present
block chain head, an empty reply should be sent. If none of the parents
are in the current canonical block chain, then NotInChain should be
sent along with ParentN (i.e. the last Parent in the parents list).
If no parents are passed, then a reply need not be made.
"""
data = [self.cmd_map_by_name['GetChain']] + parent_hashes + [count]
return self.dump_packet(data)
def dump_NotInChain(self, block_hash):
data = [self.cmd_map_by_name['NotInChain'], block_hash]
return self.dump_packet(data)
packeter = Packeter()
@dispatch.receiver(signals.config_ready)
def config_packeter(sender, config, **kwargs):
packeter.configure(config)
| elkingtowa/azove | azove/packet.py | Python | mit | 9,338 |
from RPi import GPIO as gpio
from robotics.actors.redbot_motor_actor import RedbotMotorActor
from robotics.interfaces.spi.mcp3008_spi_interface import MCP3008SpiInterface
from robotics.robots.aizek_robot import AizekRobot
from robotics.sensors.redbot_wheel_encoder_sensor import RedbotWheelEncoderSensor
from robotics.sensors.sharp_ir_distance_sensor import SharpIrDistanceSensor
class RobotFactory(object):
@staticmethod
def createAizekRobot():
gpio.setmode(gpio.BOARD)
lmotor = RedbotMotorActor(gpio, 8, 10, 12)
rmotor = RedbotMotorActor(gpio, 11, 13, 15)
spi = MCP3008SpiInterface(0)
wencoder = RedbotWheelEncoderSensor(spi)
lsensor = SharpIrDistanceSensor(spi, 5)
fsensor = SharpIrDistanceSensor(spi, 4)
rsensor = SharpIrDistanceSensor(spi, 3)
wheel_radius = 0.032
wheel_distance = 0.1
robot = AizekRobot(
left_motor=lmotor,
right_motor=rmotor,
wheel_encoder=wencoder,
left_distance_sensor=lsensor,
front_distance_sensor=fsensor,
right_distance_sensor=rsensor,
wheel_radius=wheel_radius,
wheel_distance=wheel_distance,
)
return robot
| asydorchuk/robotics | python/robotics/robots/factory.py | Python | mit | 1,253 |
#!/usr/bin/env python
"""Simple tools to query github.com and gather stats about issues.
"""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import print_function
import json
import re
import sys
from datetime import datetime, timedelta
from subprocess import check_output
from urllib2 import urlopen
#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
ISO8601 = "%Y-%m-%dT%H:%M:%SZ"
PER_PAGE = 100
element_pat = re.compile(r'<(.+?)>')
rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]')
#-----------------------------------------------------------------------------
# Functions
#-----------------------------------------------------------------------------
def parse_link_header(headers):
link_s = headers.get('link', '')
urls = element_pat.findall(link_s)
rels = rel_pat.findall(link_s)
d = {}
for rel,url in zip(rels, urls):
d[rel] = url
return d
def get_paged_request(url):
"""get a full list, handling APIv3's paging"""
results = []
while url:
print("fetching %s" % url, file=sys.stderr)
f = urlopen(url)
results.extend(json.load(f))
links = parse_link_header(f.headers)
url = links.get('next')
return results
def get_issues(project="matplotlib/matplotlib", state="closed", pulls=False):
"""Get a list of the issues from the Github API."""
which = 'pulls' if pulls else 'issues'
url = "https://api.github.com/repos/%s/%s?state=%s&per_page=%i" % (project, which, state, PER_PAGE)
return get_paged_request(url)
def _parse_datetime(s):
"""Parse dates in the format returned by the Github API."""
if s:
return datetime.strptime(s, ISO8601)
else:
return datetime.fromtimestamp(0)
def issues2dict(issues):
"""Convert a list of issues to a dict, keyed by issue number."""
idict = {}
for i in issues:
idict[i['number']] = i
return idict
def is_pull_request(issue):
"""Return True if the given issue is a pull request."""
return 'pull_request_url' in issue
def issues_closed_since(period=timedelta(days=365), project="matplotlib/matplotlib", pulls=False):
"""Get all issues closed since a particular point in time. period
can either be a datetime object, or a timedelta object. In the
latter case, it is used as a time before the present."""
which = 'pulls' if pulls else 'issues'
if isinstance(period, timedelta):
period = datetime.now() - period
url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, period.strftime(ISO8601), PER_PAGE)
allclosed = get_paged_request(url)
# allclosed = get_issues(project=project, state='closed', pulls=pulls, since=period)
filtered = [i for i in allclosed if _parse_datetime(i['closed_at']) > period]
# exclude rejected PRs
if pulls:
filtered = [ pr for pr in filtered if pr['merged_at'] ]
return filtered
def sorted_by_field(issues, field='closed_at', reverse=False):
"""Return a list of issues sorted by closing date date."""
return sorted(issues, key = lambda i:i[field], reverse=reverse)
def report(issues, show_urls=False):
"""Summary report about a list of issues, printing number and title.
"""
# titles may have unicode in them, so we must encode everything below
if show_urls:
for i in issues:
role = 'ghpull' if 'merged_at' in i else 'ghissue'
print('* :%s:`%d`: %s' % (role, i['number'],
i['title'].encode('utf-8')))
else:
for i in issues:
print('* %d: %s' % (i['number'], i['title'].encode('utf-8')))
#-----------------------------------------------------------------------------
# Main script
#-----------------------------------------------------------------------------
if __name__ == "__main__":
# Whether to add reST urls for all issues in printout.
show_urls = True
# By default, search one month back
tag = None
if len(sys.argv) > 1:
try:
days = int(sys.argv[1])
except:
tag = sys.argv[1]
else:
tag = check_output(['git', 'describe', '--abbrev=0']).strip()
if tag:
cmd = ['git', 'log', '-1', '--format=%ai', tag]
tagday, tz = check_output(cmd).strip().rsplit(' ', 1)
since = datetime.strptime(tagday, "%Y-%m-%d %H:%M:%S")
else:
since = datetime.now() - timedelta(days=days)
print("fetching GitHub stats since %s (tag: %s)" % (since, tag), file=sys.stderr)
# turn off to play interactively without redownloading, use %run -i
if 1:
issues = issues_closed_since(since, pulls=False)
pulls = issues_closed_since(since, pulls=True)
# For regular reports, it's nice to show them in reverse chronological order
issues = sorted_by_field(issues, reverse=True)
pulls = sorted_by_field(pulls, reverse=True)
n_issues, n_pulls = map(len, (issues, pulls))
n_total = n_issues + n_pulls
# Print summary report we can directly include into release notes.
print()
since_day = since.strftime("%Y/%m/%d")
today = datetime.today().strftime("%Y/%m/%d")
print(".. _github-stats:")
print('Github stats')
print('============')
print()
print("GitHub stats for %s - %s (tag: %s)" % (since_day, today, tag))
print()
print("These lists are automatically generated, and may be incomplete or contain duplicates.")
print()
if tag:
# print git info, in addition to GitHub info:
since_tag = tag+'..'
cmd = ['git', 'log', '--oneline', since_tag]
ncommits = len(check_output(cmd).splitlines())
author_cmd = ['git', 'log', '--format=* %aN', since_tag]
all_authors = check_output(author_cmd).splitlines()
unique_authors = sorted(set(all_authors))
print("The following %i authors contributed %i commits." % (len(unique_authors), ncommits))
print()
print('\n'.join(unique_authors))
print()
print()
print("We closed a total of %d issues, %d pull requests and %d regular issues;\n"
"this is the full list (generated with the script \n"
":file:`tools/github_stats.py`):" % (n_total, n_pulls, n_issues))
print()
print('Pull Requests (%d):\n' % n_pulls)
report(pulls, show_urls)
print()
print('Issues (%d):\n' % n_issues)
report(issues, show_urls)
| yavalvas/yav_com | build/matplotlib/tools/github_stats.py | Python | mit | 6,681 |
import termcolor
import sys
class StrUtils:
def __init__(self):
pass
@staticmethod
def print_log_row(op_code, url, status_code, documented_reason, body, curlcommand):
if type(curlcommand) == str:
curlstr = curlcommand
else:
curlstr = curlcommand.get()
print(termcolor.colored(StrUtils.fill_string_up_with_blanks(op_code, 7), color='red') + ' | ' +
termcolor.colored(StrUtils.fill_string_up_with_blanks(url, 100), color='green') + ' |-| ' +
StrUtils.fill_string_up_with_blanks(status_code, 3) + ' | ' +
StrUtils.fill_string_up_with_blanks(documented_reason, 20) + ' | ' +
body + ' | ' + curlstr)
@staticmethod
def fill_string_up_with_blanks(fillup_string, num_chars):
if fillup_string is None:
return 'None '
if len(fillup_string) > num_chars:
return fillup_string
else:
num_blanks = num_chars - len(fillup_string)
for x in range(0, num_blanks):
fillup_string = fillup_string + ' '
return fillup_string
| Teebytes/TnT-Fuzzer | tntfuzzer/utils/strutils.py | Python | mit | 1,142 |
import unittest
from probe import SearchApi, CompanyApi
class TestProbeAPI(unittest.TestCase):
def test_search_company_loanzen(self):
api = SearchApi()
companies = api.companies_get('1.1', filters='{"nameStartsWith": "loanzen"}')
#print type(companies.data), companies.data.companies
self.assertFalse(len(companies.data.companies) == 0)
def test_search_authorized_signatory(self):
api = SearchApi()
directors = api.authorized_signatories_get('1.1', filters='{"pan": "ANQPK6045G"}')
#print directors.data.authorized_signatories
self.assertFalse(len(directors.data.authorized_signatories) == 0)
def test_get_company_details_empty(self):
api = CompanyApi()
company = api.companies_cin_get('1.1', 'U24239DL2002PTC114413')
#print company.data.company
self.assertEquals(company.data.company.cin, 'U24239DL2002PTC114413')
def test_get_company_authorized_signatories(self):
api = CompanyApi()
signatories = api.companies_cin_authorized_signatories_get('1.1', 'U24239DL2002PTC114413')
#print signatories.data.authorized_signatories
self.assertFalse(len(signatories.data.authorized_signatories) == 0)
def test_get_company_charges(self):
api = CompanyApi()
charges = api.companies_cin_charges_get('1.1', 'U24239DL2002PTC114413')
#print charges.data.charges
self.assertFalse(len(charges.data.charges) == 0)
def test_get_company_financials(self):
api = CompanyApi()
financials = api.companies_cin_financials_get('1.1', 'U24239DL2002PTC114413')
print financials.data.financials
if __name__ == '__main__':
unittest.main() | loanzen/probe-py | probe/tests.py | Python | mit | 1,727 |
import pytest
from path import Path
import yaml
from uuid import uuid4
from formica import cli
from tests.unit.constants import (REGION, PROFILE, STACK,
CHANGE_SET_PARAMETERS, CHANGE_SET_STACK_TAGS,
FULL_CONFIG_FILE, CHANGE_SET_CAPABILITIES,
ROLE_ARN, VARS)
def test_loads_config_file(mocker, tmpdir, session):
stacks = mocker.patch('formica.cli.stacks')
file_name = 'test.config.yaml'
with Path(tmpdir):
with open(file_name, 'w') as f:
f.write(yaml.dump(FULL_CONFIG_FILE))
cli.main(['stacks', '-c', file_name])
call_args = stacks.call_args[0][0]
assert call_args.region == REGION
assert call_args.profile == PROFILE
assert call_args.stack == STACK
assert call_args.parameters == CHANGE_SET_PARAMETERS
assert call_args.tags == CHANGE_SET_STACK_TAGS
assert call_args.capabilities == CHANGE_SET_CAPABILITIES
assert call_args.role_arn == ROLE_ARN
assert call_args.vars == VARS
def test_loads_multiple_config_files(mocker, tmpdir, session):
stacks = mocker.patch('formica.cli.stacks')
file_name = 'test.config.yaml'
overwrite_file = 'overwrite.config.yaml'
with Path(tmpdir):
with open(file_name, 'w') as f:
f.write(yaml.dump(FULL_CONFIG_FILE))
with open(overwrite_file, 'w') as f:
f.write(yaml.dump(dict(stack='someotherstacktestvalue', vars=dict(OtherVar=3))))
cli.main(['stacks', '-c', file_name, overwrite_file])
call_args = stacks.call_args[0][0]
assert call_args.stack == 'someotherstacktestvalue'
assert call_args.stack != STACK
assert call_args.vars['OtherVar'] == 3
assert call_args.vars['OtherVar'] != VARS['OtherVar']
def test_prioritises_cli_args(mocker, tmpdir, session):
stacks = mocker.patch('formica.cli.new')
cli_stack = str(uuid4())
file_name = 'test.config.yaml'
with Path(tmpdir):
with open(file_name, 'w') as f:
f.write(yaml.dump(FULL_CONFIG_FILE))
cli.main(['new', '-s', cli_stack, '-c', file_name])
call_args = stacks.call_args[0][0]
assert call_args.stack == cli_stack
assert call_args.stack != STACK
def test_merges_cli_args_on_load(mocker, tmpdir, session):
stacks = mocker.patch('formica.cli.new')
param1 = str(uuid4())
param2 = str(uuid4())
file_name = 'test.config.yaml'
with Path(tmpdir):
with open(file_name, 'w') as f:
f.write(yaml.dump(FULL_CONFIG_FILE))
cli.main(['new', '--parameters', "A={}".format(param1), "D={}".format(param2), '-c', file_name])
call_args = stacks.call_args[0][0]
assert call_args.parameters == {"A": param1, "B": 2, 'C': True, 'D': param2}
def test_merges_vars(mocker, tmpdir, session):
stacks = mocker.patch('formica.cli.template')
param1 = str(uuid4())
file_name = 'test.config.yaml'
with Path(tmpdir):
with open(file_name, 'w') as f:
f.write(yaml.dump(FULL_CONFIG_FILE))
with open('test.template.yaml', 'w') as f:
f.write('{"Description": "{{ OtherVar }}"}')
cli.main(['template', '--vars', "OtherVar={}".format(param1), '-c', file_name])
call_args = stacks.call_args[0][0]
assert call_args.vars['OtherVar'] == param1
def test_exception_with_wrong_config_type(mocker, tmpdir, session, logger):
file_name = 'test.config.yaml'
with Path(tmpdir):
with open(file_name, 'w') as f:
f.write(yaml.dump({'stack': ['test', 'test2']}))
with pytest.raises(SystemExit):
cli.main(['stacks', '-c', file_name])
logger.error.assert_called_with('Config file parameter stack needs to be of type str')
def test_exception_with_forbiddeng_config_argument(mocker, tmpdir, session, logger):
file_name = 'test.config.yaml'
with Path(tmpdir):
with open(file_name, 'w') as f:
f.write(yaml.dump({'stacks': 'somestack'}))
with pytest.raises(SystemExit):
cli.main(['stacks', '-c', file_name])
logger.error.assert_called_with('Config file parameter stacks is not supported')
def test_exception_with_failed_yaml_syntax(mocker, tmpdir, session, logger):
file_name = 'test.config.yaml'
with Path(tmpdir):
with open(file_name, 'w') as f:
f.write("stacks: somestack\nprofile testprofile")
with pytest.raises(SystemExit):
cli.main(['stacks', '-c', file_name])
logger.error.assert_called()
def test_loads_empty_config_file(mocker, tmpdir, session):
stacks = mocker.patch('formica.cli.stacks')
file_name = 'test.config.yaml'
with Path(tmpdir):
with open(file_name, 'w') as f:
f.write('')
cli.main(['stacks', '-c', file_name])
| flomotlik/formica | tests/unit/test_config_file.py | Python | mit | 4,899 |
"""
Helper module for Python version 2.6 and below
- Ordered dictionaries
- Encoding/decoding urls
- Unicode
- Exception handling (except Exception, e)
"""
import base64
from urllib import unquote, quote
try:
from ordereddict import OrderedDict
except:
pass
def modulename():
return 'Helper module for Python version 2.6 and below'
def url_decode(uri):
return unquote(uri)
def url_encode(uri):
return quote(uri)
def new_dictionary():
try:
return OrderedDict()
except:
return {}
def dictionary_keys(dictionary):
return dictionary.keys()
def dictionary_values(dictionary):
return dictionary.values()
def data_read(data):
# Data for reading/receiving already a string in version 2.*
return data
def data_write(data):
# Using string in version 2.* for sending/writing data
return data
def base64_decode(data):
return base64.b64decode(data)
def base64_encode(data):
return base64.b64encode(data)
def unicode_chr(code):
return unichr(code) # noqa: F821
def unicode_string(string):
if isinstance(string, unicode): # noqa: F821
return string
return string.decode('utf8', 'replace')
def is_digit(string):
# Check if basestring (str, unicode) is digit
return isinstance(string, basestring) and string.isdigit() # noqa: F821
def is_number(value):
return isinstance(value, (int, long)) # noqa: F821
| martomo/SublimeTextXdebug | xdebug/helper/helper_26.py | Python | mit | 1,428 |
#!/usr/bin/python
"""
Google AJAX Search Module
http://code.google.com/apis/ajaxsearch/documentation/reference.html
Needs Python 2.6 or later
"""
try:
import json
except ImportError as e:
import simplejson as json
except ImportError as e:
print(e)
exit()
import sys
import urllib.request, urllib.parse, urllib.error
import logging
import argparse
__author__ = "Kiran Bandla"
__version__ = "0.2"
URL = 'http://ajax.googleapis.com/ajax/services/search/web?'
#Web Search Specific Arguments
#http://code.google.com/apis/ajaxsearch/documentation/reference.html#_fonje_web
#SAFE,FILTER
"""
SAFE
This optional argument supplies the search safety level which may be one of:
* safe=active - enables the highest level of safe search filtering
* safe=moderate - enables moderate safe search filtering (default)
* safe=off - disables safe search filtering
"""
SAFE_ACTIVE = "active"
SAFE_MODERATE = "moderate"
SAFE_OFF = "off"
"""
FILTER
This optional argument controls turning on or off the duplicate content filter:
* filter=0 - Turns off the duplicate content filter
* filter=1 - Turns on the duplicate content filter (default)
"""
FILTER_OFF = 0
FILTER_ON = 1
#Standard URL Arguments
#http://code.google.com/apis/ajaxsearch/documentation/reference.html#_fonje_args
"""
RSZ
This optional argument supplies the number of results that the application would like to recieve.
A value of small indicates a small result set size or 4 results.
A value of large indicates a large result set or 8 results. If this argument is not supplied, a value of small is assumed.
"""
RSZ_SMALL = "small"
RSZ_LARGE = "large"
"""
HL
This optional argument supplies the host language of the application making the request.
If this argument is not present then the system will choose a value based on the value of the Accept-Language http header.
If this header is not present, a value of en is assumed.
"""
class pygoogle:
def __init__(self,query,pages=10,hl='en',log_level=logging.INFO):
self.pages = pages #Number of pages. default 10
self.query = query
self.filter = FILTER_ON #Controls turning on or off the duplicate content filter. On = 1.
self.rsz = RSZ_LARGE #Results per page. small = 4 /large = 8
self.safe = SAFE_OFF #SafeBrowsing - active/moderate/off
self.hl = hl #Defaults to English (en)
self.__setup_logging(level=log_level)
def __setup_logging(self, level):
logger = logging.getLogger('pygoogle')
logger.setLevel(level)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter('%(module)s %(levelname)s %(funcName)s| %(message)s'))
logger.addHandler(handler)
self.logger = logger
def __search__(self,print_results=False):
'''
returns list of results if successful or False otherwise
'''
results = []
for page in range(0,self.pages):
rsz = 8
if self.rsz == RSZ_SMALL:
rsz = 4
args = {'q' : self.query,
'v' : '1.0',
'start' : page*rsz,
'rsz': self.rsz,
'safe' : self.safe,
'filter' : self.filter,
'hl' : self.hl
}
self.logger.debug('search: "%s" page# : %s'%(self.query, page))
q = urllib.parse.urlencode(args)
search_results = urllib.request.urlopen(URL+q)
data = json.loads(search_results.read().decode("utf-8"))
if 'responseStatus' not in data:
self.logger.error('response does not have a responseStatus key')
continue
if data.get('responseStatus') != 200:
self.logger.debug('responseStatus is not 200')
self.logger.error('responseDetails : %s'%(data.get('responseDetails', None)))
continue
if print_results:
if 'responseData' in data and 'results' in data['responseData']:
for result in data['responseData']['results']:
if result:
print('[%s]'%(urllib.parse.unquote(result['titleNoFormatting'])))
print(result['content'].strip("<b>...</b>").replace("<b>",'').replace("</b>",'').replace("'","'").strip())
print(urllib.parse.unquote(result['unescapedUrl'])+'\n')
else:
# no responseData key was found in 'data'
self.logger.error('no responseData key found in response. very unusal')
results.append(data)
return results
def search(self):
"""Returns a dict of Title/URLs"""
results = {}
search_results = self.__search__()
if not search_results:
self.logger.info('No results returned')
return results
for data in search_results:
if 'responseData' in data and 'results' in data['responseData']:
for result in data['responseData']['results']:
if result and 'titleNoFormatting' in result:
title = urllib.parse.unquote(result['titleNoFormatting'])
results[title] = urllib.parse.unquote(result['unescapedUrl'])
else:
self.logger.error('no responseData key found in response')
self.logger.error(data)
return results
def search_page_wise(self):
"""Returns a dict of page-wise urls"""
results = {}
for page in range(0,self.pages):
args = {'q' : self.query,
'v' : '1.0',
'start' : page,
'rsz': RSZ_LARGE,
'safe' : SAFE_OFF,
'filter' : FILTER_ON,
}
q = urllib.parse.urlencode(args)
search_results = urllib.request.urlopen(URL+q)
data = json.loads(search_results.read())
urls = []
if 'responseData' in data and 'results' in data['responseData']:
for result in data['responseData']['results']:
if result and 'unescapedUrl' in result:
url = urllib.parse.unquote(result['unescapedUrl'])
urls.append(url)
else:
self.logger.error('no responseData key found in response')
results[page] = urls
return results
def get_urls(self):
"""Returns list of result URLs"""
results = []
search_results = self.__search__()
if not search_results:
self.logger.info('No results returned')
return results
for data in search_results:
if data and 'responseData' in data and data['responseData']['results']:
for result in data['responseData']['results']:
if result:
results.append(urllib.parse.unquote(result['unescapedUrl']))
return results
def get_result_count(self):
"""Returns the number of results"""
temp = self.pages
self.pages = 1
result_count = 0
search_results = self.__search__()
if not search_results:
return 0
try:
result_count = search_results[0]
if not isinstance(result_count, dict):
return 0
result_count = result_count.get('responseData', None)
if result_count:
if 'cursor' in result_count and 'estimatedResultCount' in result_count['cursor']:
return result_count['cursor']['estimatedResultCount']
return 0
except Exception as e:
self.logger.error(e)
finally:
self.pages = temp
return result_count
def display_results(self):
"""Prints results (for command line)"""
self.__search__(True)
def main():
parser = argparse.ArgumentParser(description='A simple Google search module for Python')
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', default=False, help='Verbose mode')
parser.add_argument('-p', '--pages', dest='pages', action='store', default=1, help='Number of pages to return. Max 10')
parser.add_argument('-hl', '--language', dest='language', action='store', default='en', help="language. default is 'en'")
parser.add_argument('query', nargs='*', default=None)
args = parser.parse_args()
query = ' '.join(args.query)
log_level = logging.INFO
if args.verbose:
log_level = logging.DEBUG
if not query:
parser.print_help()
exit()
search = pygoogle( log_level=log_level, query=query, pages=args.pages, hl=args.language)
search.display_results()
if __name__ == "__main__":
main()
| jmdana/py3google | pygoogle.py | Python | mit | 9,074 |
import PyQt5 # this force pyqtgraph to deal with Qt5
# For matplotlib to Qt5 :
# * this avoid tinker problem when not installed
# * work better with GUI
# * trigger a warning on notebook
import matplotlib
import warnings
with warnings.catch_warnings():
try:
warnings.simplefilter("ignore")
matplotlib.use('Qt5Agg')
except:
# on server without screen this is not possible.
pass
from .myqt import QT,mkQApp
#for catalogue window
from .cataloguecontroller import CatalogueController
from .traceviewer import CatalogueTraceViewer
from .peaklists import PeakList, ClusterPeakList
from .ndscatter import NDScatter
from .waveformviewer import WaveformViewer
from .similarity import SpikeSimilarityView, ClusterSimilarityView, ClusterRatioSimilarityView
from .pairlist import PairList
from .silhouette import Silhouette
from .waveformhistviewer import WaveformHistViewer
from .featuretimeviewer import FeatureTimeViewer
from .cataloguewindow import CatalogueWindow
#for peeler window
from .peelercontroller import PeelerController
from .traceviewer import PeelerTraceViewer
from .spikelists import SpikeList, ClusterSpikeList
from .waveformviewer import PeelerWaveformViewer
from .isiviewer import ISIViewer
from .crosscorrelogramviewer import CrossCorrelogramViewer
from .peelerwindow import PeelerWindow
#main window
from .mainwindow import MainWindow
from .initializedatasetwindow import InitializeDatasetWindow, ChannelGroupWidget
from .probegeometryview import ProbeGeometryView
from .gpuselector import GpuSelector | tridesclous/tridesclous | tridesclous/gui/__init__.py | Python | mit | 2,001 |
from pysimplesoap.client import SoapClient
def inspect(wsdl_url):
client = SoapClient(wsdl=wsdl_url,trace=False)
print("Target Namespace", client.namespace)
for service in list(client.services.values()):
for port in list(service['ports'].values()):
print(port['location'])
for op in list(port['operations'].values()):
print('Name:', op['name'])
print('Docs:', op['documentation'].strip())
print('SOAPAction:', op['action'])
print('Input', op['input']) # args type declaration
print('Output', op['output']) # returns type declaration
print('\n') | xflows/clowdflows-backend | services/tests.py | Python | mit | 685 |
from PyQt4.QtCore import *
from PyQt4 import QtGui
class RadioManagement ():
def __init__ (self, radioCol, gridLayout):
self.radioGroup = QtGui.QButtonGroup() # To store radio buttons.
self.radioCol = radioCol
self.gridLayout = gridLayout
def initDelAndMovButtons (self, vLayout):
# Add hbox for edit buttons.
self.editHBox = QtGui.QHBoxLayout()
self.editHBox.addStretch (1)
vLayout.addLayout (self.editHBox)
button = QtGui.QPushButton ("&Delete", self)
self.editHBox.addWidget (button)
self.connect (button, SIGNAL ("clicked()"), self.deleteButtonClicked)
button = QtGui.QPushButton ("Move &Up", self)
self.editHBox.addWidget (button)
self.connect (button, SIGNAL ("clicked()"), self.moveUpButtonClicked)
button = QtGui.QPushButton ("Move Do&wn", self)
self.editHBox.addWidget (button)
self.connect (button, SIGNAL ("clicked()"), self.moveDownButtonClicked)
def buildRow (self, gridLayout, row = 0):
# Create the radio button for editing of the app.
radio = QtGui.QRadioButton (str (row), self)
gridLayout.addWidget (radio, row, self.radioCol)
self.radioGroup.addButton(radio, row)
def setCheckedRadioButtonByRow (self, row):
for radio in self.radioGroup.buttons():
if radio.text() == str (row):
radio.setChecked (True)
return
# Radio button not found.
raise RuntimeError ("Could not find radio at row: " + str (row))
def clearGridLayout (self):
# This function removes all of the automatically generated
# buttons and such from the layout.
# Remove the radio buttons fromt "radioGroup".
for buttonEntry in self.radioGroup.buttons():
self.radioGroup.removeButton (buttonEntry)
for i in range (self.gridLayout.rowCount()):
for j in range (self.gridLayout.columnCount()):
widgetItem = self.gridLayout.itemAtPosition (i, j)
self.deleteWidgetItem (widgetItem)
def getWidgetAtCheckedRow (self, column):
checkedButtonId = self.radioGroup.checkedId()
if checkedButtonId == -1:
# No radio buttons are checked.
return None
# widged at "checkedButtonId"'s row.
return self.gridLayout.itemAtPosition (checkedButtonId, column).widget()
def deleteRow (self, rowNum):
rowCount = self.getRowCount()
# What we'll actually do, is leave the radio buttons in tact, but
# delete all the other widgets in the row. Then we'll move everything
# else up, and finally delete the last radio button.
for i in range (self.gridLayout.columnCount()):
# Skip the radio button's column.
if i == self.radioCol:
continue
widgetItem = self.gridLayout.itemAtPosition (rowNum, i)
# Delete the widget.
self.deleteWidgetItem (widgetItem)
# Next, move everything up row by row. +1 is to offset from the row we just deleted.
for i in range (rowNum + 1, rowCount + 1):
for j in range (self.gridLayout.columnCount()):
if j == self.radioCol:
continue
widgetItem = self.gridLayout.itemAtPosition (i, j)
self.addWidgetItem (widgetItem, i - 1, j)
# We'll also need to remove the radio button's reference from "radioGroup".
lastRadioButon = self.gridLayout.itemAtPosition (rowCount, self.radioCol)
self.radioGroup.removeButton (lastRadioButon.widget())
# Finally, delete the last row.
for i in range (self.gridLayout.columnCount()):
widgetItem = self.gridLayout.itemAtPosition (rowCount, i)
# Delete the widget.
self.deleteWidgetItem (widgetItem)
self.gridLayout.invalidate()
def swapRows (self, row1, row2):
row1Widgets = {}
for i in range (self.gridLayout.columnCount()):
widgetItem1 = self.gridLayout.itemAtPosition (row1, i)
widgetItem2 = self.gridLayout.itemAtPosition (row2, i)
if widgetItem1 == None:
continue
a = widgetItem1.widget()
b = widgetItem2.widget()
# Is this the radio button widget?
if i == self.radioCol:
# We don't want to move the radio buttons, but
# we do want change which is checked, if applicable.
if widgetItem1.widget().isChecked():
widgetItem2.widget().setChecked (True)
elif widgetItem2.widget().isChecked():
widgetItem1.widget().setChecked (True)
continue
self.addWidgetItem (widgetItem2, row1, i)
self.addWidgetItem (widgetItem1, row2, i)
def deleteWidgetItem (self, widgetItem):
# If the item is actually empty.
if widgetItem == None:
return
# Check if it is a widget (and not a layout).
widget = widgetItem.widget()
if widget != None:
# Delete the widget item.
widgetItem.widget().setParent (None)
return
# No? then it must be a layout.
layout = widgetItem.layout()
if type (layout) != QtGui.QGridLayout:
for i in range (len (layout)):
self.deleteWidgetItem (layout.itemAt (i))
else:
# We'll just be assuming a grid layout here, since there
# isn't any convinient method to do otherwise.
for i in range (layout.rowCount()):
for j in range (layout.columnCount()):
self.deleteWidgetItem (layout.itemAtPosition (i, j))
# Finally, delete the empty layout.
layout.setParent (None)
def addWidgetItem (self, widgetItem, posX, posY):
if widgetItem == None:
return
widget = widgetItem.widget()
if widget != None:
# Looks like we're processing a widget.
self.gridLayout.addWidget (widget, posX, posY)
return
# We otherwise assume it's a layout.
layout = widgetItem.layout()
# Bug in PyQt. Causes segfault if I do not do this.
# I guess I /could/ go and fix it in the source...
layout.setParent (None)
self.gridLayout.addLayout (layout, posX, posY)
def getRowCount (self):
# Unfortunately, gridLayout.rowCount() is unreliable for getting the number
# rows in the grid, so we need an alternative method.
return len (self.radioGroup.buttons())
def getRowRange (self):
# Return the range of values from 1 (first param) to the number of rows,
# include the final value (achieved by +1 offset).
return range (1, len (self.radioGroup.buttons()) + 1)
def deleteButtonClicked (self):
pass
def moveUpButtonClicked (self):
# We are moving up, so we'll swap with the row above us.
checkedButtonId = self.radioGroup.checkedId()
if checkedButtonId == -1:
print "No item selected!"
return
previousButtonRow = checkedButtonId - 1
if previousButtonRow <= 0:
print "Row already at highest position!"
return
self.swapRows (checkedButtonId, previousButtonRow)
def moveDownButtonClicked (self):
checkedButtonId = self.radioGroup.checkedId()
if checkedButtonId == -1:
print "No item selected!"
return
nextButtonRow = checkedButtonId + 1
if nextButtonRow > len (self.radioGroup.buttons()):
print "Row already at lowest position!"
return
self.swapRows (checkedButtonId, nextButtonRow)
def deleteButtonClicked (self):
checkedButtonId = self.radioGroup.checkedId()
if checkedButtonId == -1:
print "No item selected!"
return
self.deleteRow (checkedButtonId)
| electronics45/pyapplaunch | pyapplaunch/RadioManagement.py | Python | mit | 6,893 |
# coding=utf8
from types import MethodType
def implant_method(obj, func, func_name):
base_class = obj.__class__
event = MethodType(func, obj, base_class)
setattr(obj, func_name, event)
if __name__ == "__main__":
pass
| cineuse/CNCGToolKit | cgtkLibs/cgtk_py/implant_method.py | Python | mit | 238 |
import sys
import os.path
import lzma
import time
import os
import subprocess
if "APPVEYOR_REPO_TAG" not in os.environ or os.environ["APPVEYOR_REPO_TAG"] != "true":
print("Not uploading symbols since build is not tagged")
sys.exit()
symbol_filename = sys.argv[1]
with open(symbol_filename, "rt") as fh:
first_line = next(fh).strip()
tokens = first_line.split()
expected_tokens = ['MODULE', 'windows', 'x86']
if tokens[0:3] != expected_tokens:
raise RuntimeError("Expected first tokens to be " + str(expected_tokens) + ", but was: " + str(tokens[0:3]))
file_hash = tokens[3]
file_name = tokens[4]
basename = os.path.basename(symbol_filename)
target_path = "%s/%s/%s.xz" % (file_name, file_hash, basename)
# Compress symbols with LZMA to save bandwidth
print("Compressing symbol file...")
t_start = time.perf_counter()
with open(symbol_filename, "rb") as fh:
symbol_data = fh.read()
symbol_data_len = len(symbol_data)
compressed_symbols = lzma.compress(symbol_data)
compression_ratio = len(compressed_symbols) * 100 / symbol_data_len
print("Compressed symbol data (ratio %d%%) in %fs" % (compression_ratio, time.perf_counter() - t_start))
print("Uploading symbols to ", target_path)
with open("TemplePlus.sym.xz", "wb") as fh:
fh.write(compressed_symbols)
subprocess.run(["aws", "s3", "cp", "TemplePlus.sym.xz", "s3://templeplus-symbols/" + target_path], check=True, shell=True)
print("Uploaded symbols to S3.")
| GrognardsFromHell/TemplePlus | deploy_symbols_s3.py | Python | mit | 1,455 |
# Unit3 - Lessons 3 and 4 folder
| flygeneticist/Thinkful-Data-Science-Examples | Unit2/Lesson3-4/__init__.py | Python | mit | 33 |
import os
import pickle
from parsers_backend import get_tree
from directories import dirs
def get_cached_sentence_image(argv, output_path, img_path):
"""Returns if the image is already generated or not, and avoids generating if yes.
Args:
argv: The command line arguments.
output_path: The path for the output folder.
img_path: The path to the image file to be checked.
Returns:
A boolean flagging if image is already generated or not.
"""
cache_file_final = output_path + img_path
if argv.tetre_force_clean:
return False
else:
return os.path.isfile(cache_file_final)
def get_cached_tokens(argv):
"""Returns the already parsed sentences containing the word being search, if the folder was not modified.
Args:
argv: The command line arguments.
Returns:
A list of tree.FullSentence objects, the sentences parsed from the raw text.
"""
updated_at_date = os.path.getmtime(dirs['raw_input']['path'])
cache_key = argv.tetre_word.lower() + str(int(updated_at_date))
cache_file = dirs['output_cache']['path'] + cache_key + ".spacy"
if os.path.isfile(cache_file) and not argv.tetre_force_clean:
# is cached
with open(cache_file, 'rb') as f:
sentences = pickle.load(f)
else:
# is not cached, so generates it again
sentences = get_tree(argv)
# saves to disk
with open(cache_file, "wb") as f:
pickle.dump(sentences, f, protocol=pickle.HIGHEST_PROTOCOL)
return sentences
| aoldoni/tetre | lib/parsers_cache.py | Python | mit | 1,577 |
import os, sys, argparse
from os import listdir
from os.path import isfile, join
from os import walk
from dd_client import DD
from annoy import AnnoyIndex
import shelve
import cv2
parser = argparse.ArgumentParser()
parser.add_argument("--index",help="repository of images to be indexed")
parser.add_argument("--index-batch-size",type=int,help="size of image batch when indexing",default=1)
parser.add_argument("--search",help="image input file for similarity search")
parser.add_argument("--search-size",help="number of nearest neighbors",type=int,default=10)
args = parser.parse_args()
def batch(iterable, n=1):
l = len(iterable)
for ndx in range(0, l, n):
yield iterable[ndx:min(ndx + n, l)]
def image_resize(imgfile,width):
imgquery = cv2.imread(imgfile)
r = width / imgquery.shape[1]
dim = (int(width), int(imgquery.shape[0] * r))
small = cv2.resize(imgquery,dim)
return small
host = 'localhost'
sname = 'imgserv'
description = 'image classification'
mllib = 'caffe'
mltype = 'unsupervised'
extract_layer = 'loss3/classifier'
#extract_layer = 'pool5/7x7_s1'
nclasses = 1000
layer_size = 1000 # default output code size
width = height = 224
binarized = False
dd = DD(host)
dd.set_return_format(dd.RETURN_PYTHON)
ntrees = 100
metric = 'angular' # or 'euclidean'
# creating ML service
model_repo = os.getcwd() + '/model'
model = {'repository':model_repo,'templates':'../templates/caffe/'}
parameters_input = {'connector':'image','width':width,'height':height}
parameters_mllib = {'nclasses':nclasses,'template':'googlenet'}
parameters_output = {}
dd.put_service(sname,model,description,mllib,
parameters_input,parameters_mllib,parameters_output,mltype)
# reset call params
parameters_input = {}
parameters_mllib = {'gpu':True,'extract_layer':extract_layer}
parameters_output = {'binarized':binarized}
if args.index:
try:
os.remove('names.bin')
except:
pass
s = shelve.open('names.bin')
# list files in image repository
c = 0
onlyfiles = []
for (dirpath, dirnames, filenames) in walk(args.index):
nfilenames = []
for f in filenames:
nfilenames.append(dirpath + '/' + f)
onlyfiles.extend(nfilenames)
for x in batch(onlyfiles,args.index_batch_size):
sys.stdout.write('\r'+str(c)+'/'+str(len(onlyfiles)))
sys.stdout.flush()
classif = dd.post_predict(sname,x,parameters_input,parameters_mllib,parameters_output)
for p in classif['body']['predictions']:
if c == 0:
layer_size = len(p['vals'])
s['layer_size'] = layer_size
t = AnnoyIndex(layer_size,metric) # prepare index
t.add_item(c,p['vals'])
s[str(c)] = p['uri']
c = c + 1
#if c >= 10000:
# break
print 'building index...\n'
print 'layer_size=',layer_size
t.build(ntrees)
t.save('index.ann')
s.close()
if args.search:
s = shelve.open('names.bin')
u = AnnoyIndex(s['layer_size'],metric)
u.load('index.ann')
data = [args.search]
classif = dd.post_predict(sname,data,parameters_input,parameters_mllib,parameters_output)
near = u.get_nns_by_vector(classif['body']['predictions'][0]['vals'],args.search_size,include_distances=True)
print near
near_names = []
for n in near[0]:
near_names.append(s[str(n)])
print near_names
cv2.imshow('query',image_resize(args.search,224.0))
cv2.waitKey(0)
for n in near_names:
cv2.imshow('res',image_resize(n,224.0))
cv2.waitKey(0)
dd.delete_service(sname,clear='')
| roscopecoltran/scraper | shared/docker/templates/deepdetect/demo/imgsearch/imgsearch.py | Python | mit | 3,645 |
import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "formalarg",
version = "0.0.1",
author = "Emmanuel Hadoux",
author_email = "emmanuel.hadoux@gmail.com",
description = ("Library for formal argumentation systems."),
license = "MIT",
keywords = "argumentation library artificial intelligence",
url = "http://packages.python.org/formalarg",
packages=['formalarg', 'tests'],
install_requires=["graphviz"],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Artificial Intelligence"
],
) | Amande-WP5/formalarg | setup.py | Python | mit | 1,036 |