hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1b96ffd78834b48c5e69e7914a7ad5e0db45e6ea
| 50,266
|
py
|
Python
|
src/cryptography/x509/extensions.py
|
abarto/cryptography
|
5526dca15bded0687e507a065b141694ee5f1530
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 2
|
2021-01-30T13:23:54.000Z
|
2021-06-07T21:35:19.000Z
|
src/cryptography/x509/extensions.py
|
abarto/cryptography
|
5526dca15bded0687e507a065b141694ee5f1530
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/cryptography/x509/extensions.py
|
abarto/cryptography
|
5526dca15bded0687e507a065b141694ee5f1530
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import datetime
import hashlib
import ipaddress
from enum import Enum
import six
from cryptography import utils
from cryptography.hazmat._der import (
BIT_STRING, DERReader, OBJECT_IDENTIFIER, SEQUENCE
)
from cryptography.hazmat.primitives import constant_time, serialization
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
from cryptography.x509.certificate_transparency import (
SignedCertificateTimestamp
)
from cryptography.x509.general_name import GeneralName, IPAddress, OtherName
from cryptography.x509.name import RelativeDistinguishedName
from cryptography.x509.oid import (
CRLEntryExtensionOID, ExtensionOID, OCSPExtensionOID, ObjectIdentifier,
)
def _key_identifier_from_public_key(public_key):
if isinstance(public_key, RSAPublicKey):
data = public_key.public_bytes(
serialization.Encoding.DER,
serialization.PublicFormat.PKCS1,
)
elif isinstance(public_key, EllipticCurvePublicKey):
data = public_key.public_bytes(
serialization.Encoding.X962,
serialization.PublicFormat.UncompressedPoint
)
else:
# This is a very slow way to do this.
serialized = public_key.public_bytes(
serialization.Encoding.DER,
serialization.PublicFormat.SubjectPublicKeyInfo
)
reader = DERReader(serialized)
with reader.read_single_element(SEQUENCE) as public_key_info:
algorithm = public_key_info.read_element(SEQUENCE)
public_key = public_key_info.read_element(BIT_STRING)
# Double-check the algorithm structure.
with algorithm:
algorithm.read_element(OBJECT_IDENTIFIER)
if not algorithm.is_empty():
# Skip the optional parameters field.
algorithm.read_any_element()
# BIT STRING contents begin with the number of padding bytes added. It
# must be zero for SubjectPublicKeyInfo structures.
if public_key.read_byte() != 0:
raise ValueError('Invalid public key encoding')
data = public_key.data
return hashlib.sha1(data).digest()
def _make_sequence_methods(field_name):
def len_method(self):
return len(getattr(self, field_name))
def iter_method(self):
return iter(getattr(self, field_name))
def getitem_method(self, idx):
return getattr(self, field_name)[idx]
return len_method, iter_method, getitem_method
class DuplicateExtension(Exception):
def __init__(self, msg, oid):
super(DuplicateExtension, self).__init__(msg)
self.oid = oid
class ExtensionNotFound(Exception):
def __init__(self, msg, oid):
super(ExtensionNotFound, self).__init__(msg)
self.oid = oid
@six.add_metaclass(abc.ABCMeta)
class ExtensionType(object):
@abc.abstractproperty
def oid(self):
"""
Returns the oid associated with the given extension type.
"""
class Extensions(object):
def __init__(self, extensions):
self._extensions = extensions
def get_extension_for_oid(self, oid):
for ext in self:
if ext.oid == oid:
return ext
raise ExtensionNotFound("No {} extension was found".format(oid), oid)
def get_extension_for_class(self, extclass):
if extclass is UnrecognizedExtension:
raise TypeError(
"UnrecognizedExtension can't be used with "
"get_extension_for_class because more than one instance of the"
" class may be present."
)
for ext in self:
if isinstance(ext.value, extclass):
return ext
raise ExtensionNotFound(
"No {} extension was found".format(extclass), extclass.oid
)
__len__, __iter__, __getitem__ = _make_sequence_methods("_extensions")
def __repr__(self):
return (
"<Extensions({})>".format(self._extensions)
)
@utils.register_interface(ExtensionType)
class CRLNumber(object):
oid = ExtensionOID.CRL_NUMBER
def __init__(self, crl_number):
if not isinstance(crl_number, six.integer_types):
raise TypeError("crl_number must be an integer")
self._crl_number = crl_number
def __eq__(self, other):
if not isinstance(other, CRLNumber):
return NotImplemented
return self.crl_number == other.crl_number
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.crl_number)
def __repr__(self):
return "<CRLNumber({})>".format(self.crl_number)
crl_number = utils.read_only_property("_crl_number")
@utils.register_interface(ExtensionType)
class AuthorityKeyIdentifier(object):
oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER
def __init__(self, key_identifier, authority_cert_issuer,
authority_cert_serial_number):
if (authority_cert_issuer is None) != (
authority_cert_serial_number is None
):
raise ValueError(
"authority_cert_issuer and authority_cert_serial_number "
"must both be present or both None"
)
if authority_cert_issuer is not None:
authority_cert_issuer = list(authority_cert_issuer)
if not all(
isinstance(x, GeneralName) for x in authority_cert_issuer
):
raise TypeError(
"authority_cert_issuer must be a list of GeneralName "
"objects"
)
if authority_cert_serial_number is not None and not isinstance(
authority_cert_serial_number, six.integer_types
):
raise TypeError(
"authority_cert_serial_number must be an integer"
)
self._key_identifier = key_identifier
self._authority_cert_issuer = authority_cert_issuer
self._authority_cert_serial_number = authority_cert_serial_number
@classmethod
def from_issuer_public_key(cls, public_key):
digest = _key_identifier_from_public_key(public_key)
return cls(
key_identifier=digest,
authority_cert_issuer=None,
authority_cert_serial_number=None
)
@classmethod
def from_issuer_subject_key_identifier(cls, ski):
return cls(
key_identifier=ski.digest,
authority_cert_issuer=None,
authority_cert_serial_number=None
)
def __repr__(self):
return (
"<AuthorityKeyIdentifier(key_identifier={0.key_identifier!r}, "
"authority_cert_issuer={0.authority_cert_issuer}, "
"authority_cert_serial_number={0.authority_cert_serial_number}"
")>".format(self)
)
def __eq__(self, other):
if not isinstance(other, AuthorityKeyIdentifier):
return NotImplemented
return (
self.key_identifier == other.key_identifier and
self.authority_cert_issuer == other.authority_cert_issuer and
self.authority_cert_serial_number ==
other.authority_cert_serial_number
)
def __ne__(self, other):
return not self == other
def __hash__(self):
if self.authority_cert_issuer is None:
aci = None
else:
aci = tuple(self.authority_cert_issuer)
return hash((
self.key_identifier, aci, self.authority_cert_serial_number
))
key_identifier = utils.read_only_property("_key_identifier")
authority_cert_issuer = utils.read_only_property("_authority_cert_issuer")
authority_cert_serial_number = utils.read_only_property(
"_authority_cert_serial_number"
)
@utils.register_interface(ExtensionType)
class SubjectKeyIdentifier(object):
oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER
def __init__(self, digest):
self._digest = digest
@classmethod
def from_public_key(cls, public_key):
return cls(_key_identifier_from_public_key(public_key))
digest = utils.read_only_property("_digest")
def __repr__(self):
return "<SubjectKeyIdentifier(digest={0!r})>".format(self.digest)
def __eq__(self, other):
if not isinstance(other, SubjectKeyIdentifier):
return NotImplemented
return constant_time.bytes_eq(self.digest, other.digest)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.digest)
@utils.register_interface(ExtensionType)
class AuthorityInformationAccess(object):
oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS
def __init__(self, descriptions):
descriptions = list(descriptions)
if not all(isinstance(x, AccessDescription) for x in descriptions):
raise TypeError(
"Every item in the descriptions list must be an "
"AccessDescription"
)
self._descriptions = descriptions
__len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions")
def __repr__(self):
return "<AuthorityInformationAccess({})>".format(self._descriptions)
def __eq__(self, other):
if not isinstance(other, AuthorityInformationAccess):
return NotImplemented
return self._descriptions == other._descriptions
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(tuple(self._descriptions))
class AccessDescription(object):
def __init__(self, access_method, access_location):
if not isinstance(access_method, ObjectIdentifier):
raise TypeError("access_method must be an ObjectIdentifier")
if not isinstance(access_location, GeneralName):
raise TypeError("access_location must be a GeneralName")
self._access_method = access_method
self._access_location = access_location
def __repr__(self):
return (
"<AccessDescription(access_method={0.access_method}, access_locati"
"on={0.access_location})>".format(self)
)
def __eq__(self, other):
if not isinstance(other, AccessDescription):
return NotImplemented
return (
self.access_method == other.access_method and
self.access_location == other.access_location
)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.access_method, self.access_location))
access_method = utils.read_only_property("_access_method")
access_location = utils.read_only_property("_access_location")
@utils.register_interface(ExtensionType)
class BasicConstraints(object):
oid = ExtensionOID.BASIC_CONSTRAINTS
def __init__(self, ca, path_length):
if not isinstance(ca, bool):
raise TypeError("ca must be a boolean value")
if path_length is not None and not ca:
raise ValueError("path_length must be None when ca is False")
if (
path_length is not None and
(not isinstance(path_length, six.integer_types) or path_length < 0)
):
raise TypeError(
"path_length must be a non-negative integer or None"
)
self._ca = ca
self._path_length = path_length
ca = utils.read_only_property("_ca")
path_length = utils.read_only_property("_path_length")
def __repr__(self):
return ("<BasicConstraints(ca={0.ca}, "
"path_length={0.path_length})>").format(self)
def __eq__(self, other):
if not isinstance(other, BasicConstraints):
return NotImplemented
return self.ca == other.ca and self.path_length == other.path_length
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.ca, self.path_length))
@utils.register_interface(ExtensionType)
class DeltaCRLIndicator(object):
oid = ExtensionOID.DELTA_CRL_INDICATOR
def __init__(self, crl_number):
if not isinstance(crl_number, six.integer_types):
raise TypeError("crl_number must be an integer")
self._crl_number = crl_number
crl_number = utils.read_only_property("_crl_number")
def __eq__(self, other):
if not isinstance(other, DeltaCRLIndicator):
return NotImplemented
return self.crl_number == other.crl_number
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.crl_number)
def __repr__(self):
return "<DeltaCRLIndicator(crl_number={0.crl_number})>".format(self)
@utils.register_interface(ExtensionType)
class CRLDistributionPoints(object):
oid = ExtensionOID.CRL_DISTRIBUTION_POINTS
def __init__(self, distribution_points):
distribution_points = list(distribution_points)
if not all(
isinstance(x, DistributionPoint) for x in distribution_points
):
raise TypeError(
"distribution_points must be a list of DistributionPoint "
"objects"
)
self._distribution_points = distribution_points
__len__, __iter__, __getitem__ = _make_sequence_methods(
"_distribution_points"
)
def __repr__(self):
return "<CRLDistributionPoints({})>".format(self._distribution_points)
def __eq__(self, other):
if not isinstance(other, CRLDistributionPoints):
return NotImplemented
return self._distribution_points == other._distribution_points
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(tuple(self._distribution_points))
@utils.register_interface(ExtensionType)
class FreshestCRL(object):
oid = ExtensionOID.FRESHEST_CRL
def __init__(self, distribution_points):
distribution_points = list(distribution_points)
if not all(
isinstance(x, DistributionPoint) for x in distribution_points
):
raise TypeError(
"distribution_points must be a list of DistributionPoint "
"objects"
)
self._distribution_points = distribution_points
__len__, __iter__, __getitem__ = _make_sequence_methods(
"_distribution_points"
)
def __repr__(self):
return "<FreshestCRL({})>".format(self._distribution_points)
def __eq__(self, other):
if not isinstance(other, FreshestCRL):
return NotImplemented
return self._distribution_points == other._distribution_points
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(tuple(self._distribution_points))
class DistributionPoint(object):
def __init__(self, full_name, relative_name, reasons, crl_issuer):
if full_name and relative_name:
raise ValueError(
"You cannot provide both full_name and relative_name, at "
"least one must be None."
)
if full_name:
full_name = list(full_name)
if not all(isinstance(x, GeneralName) for x in full_name):
raise TypeError(
"full_name must be a list of GeneralName objects"
)
if relative_name:
if not isinstance(relative_name, RelativeDistinguishedName):
raise TypeError(
"relative_name must be a RelativeDistinguishedName"
)
if crl_issuer:
crl_issuer = list(crl_issuer)
if not all(isinstance(x, GeneralName) for x in crl_issuer):
raise TypeError(
"crl_issuer must be None or a list of general names"
)
if reasons and (not isinstance(reasons, frozenset) or not all(
isinstance(x, ReasonFlags) for x in reasons
)):
raise TypeError("reasons must be None or frozenset of ReasonFlags")
if reasons and (
ReasonFlags.unspecified in reasons or
ReasonFlags.remove_from_crl in reasons
):
raise ValueError(
"unspecified and remove_from_crl are not valid reasons in a "
"DistributionPoint"
)
if reasons and not crl_issuer and not (full_name or relative_name):
raise ValueError(
"You must supply crl_issuer, full_name, or relative_name when "
"reasons is not None"
)
self._full_name = full_name
self._relative_name = relative_name
self._reasons = reasons
self._crl_issuer = crl_issuer
def __repr__(self):
return (
"<DistributionPoint(full_name={0.full_name}, relative_name={0.rela"
"tive_name}, reasons={0.reasons}, crl_issuer={0.crl_issuer})>"
.format(self)
)
def __eq__(self, other):
if not isinstance(other, DistributionPoint):
return NotImplemented
return (
self.full_name == other.full_name and
self.relative_name == other.relative_name and
self.reasons == other.reasons and
self.crl_issuer == other.crl_issuer
)
def __ne__(self, other):
return not self == other
def __hash__(self):
if self.full_name is not None:
fn = tuple(self.full_name)
else:
fn = None
if self.crl_issuer is not None:
crl_issuer = tuple(self.crl_issuer)
else:
crl_issuer = None
return hash((fn, self.relative_name, self.reasons, crl_issuer))
full_name = utils.read_only_property("_full_name")
relative_name = utils.read_only_property("_relative_name")
reasons = utils.read_only_property("_reasons")
crl_issuer = utils.read_only_property("_crl_issuer")
class ReasonFlags(Enum):
unspecified = "unspecified"
key_compromise = "keyCompromise"
ca_compromise = "cACompromise"
affiliation_changed = "affiliationChanged"
superseded = "superseded"
cessation_of_operation = "cessationOfOperation"
certificate_hold = "certificateHold"
privilege_withdrawn = "privilegeWithdrawn"
aa_compromise = "aACompromise"
remove_from_crl = "removeFromCRL"
@utils.register_interface(ExtensionType)
class PolicyConstraints(object):
oid = ExtensionOID.POLICY_CONSTRAINTS
def __init__(self, require_explicit_policy, inhibit_policy_mapping):
if require_explicit_policy is not None and not isinstance(
require_explicit_policy, six.integer_types
):
raise TypeError(
"require_explicit_policy must be a non-negative integer or "
"None"
)
if inhibit_policy_mapping is not None and not isinstance(
inhibit_policy_mapping, six.integer_types
):
raise TypeError(
"inhibit_policy_mapping must be a non-negative integer or None"
)
if inhibit_policy_mapping is None and require_explicit_policy is None:
raise ValueError(
"At least one of require_explicit_policy and "
"inhibit_policy_mapping must not be None"
)
self._require_explicit_policy = require_explicit_policy
self._inhibit_policy_mapping = inhibit_policy_mapping
def __repr__(self):
return (
u"<PolicyConstraints(require_explicit_policy={0.require_explicit"
u"_policy}, inhibit_policy_mapping={0.inhibit_policy_"
u"mapping})>".format(self)
)
def __eq__(self, other):
if not isinstance(other, PolicyConstraints):
return NotImplemented
return (
self.require_explicit_policy == other.require_explicit_policy and
self.inhibit_policy_mapping == other.inhibit_policy_mapping
)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(
(self.require_explicit_policy, self.inhibit_policy_mapping)
)
require_explicit_policy = utils.read_only_property(
"_require_explicit_policy"
)
inhibit_policy_mapping = utils.read_only_property(
"_inhibit_policy_mapping"
)
@utils.register_interface(ExtensionType)
class CertificatePolicies(object):
oid = ExtensionOID.CERTIFICATE_POLICIES
def __init__(self, policies):
policies = list(policies)
if not all(isinstance(x, PolicyInformation) for x in policies):
raise TypeError(
"Every item in the policies list must be a "
"PolicyInformation"
)
self._policies = policies
__len__, __iter__, __getitem__ = _make_sequence_methods("_policies")
def __repr__(self):
return "<CertificatePolicies({})>".format(self._policies)
def __eq__(self, other):
if not isinstance(other, CertificatePolicies):
return NotImplemented
return self._policies == other._policies
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(tuple(self._policies))
class PolicyInformation(object):
def __init__(self, policy_identifier, policy_qualifiers):
if not isinstance(policy_identifier, ObjectIdentifier):
raise TypeError("policy_identifier must be an ObjectIdentifier")
self._policy_identifier = policy_identifier
if policy_qualifiers:
policy_qualifiers = list(policy_qualifiers)
if not all(
isinstance(x, (six.text_type, UserNotice))
for x in policy_qualifiers
):
raise TypeError(
"policy_qualifiers must be a list of strings and/or "
"UserNotice objects or None"
)
self._policy_qualifiers = policy_qualifiers
def __repr__(self):
return (
"<PolicyInformation(policy_identifier={0.policy_identifier}, polic"
"y_qualifiers={0.policy_qualifiers})>".format(self)
)
def __eq__(self, other):
if not isinstance(other, PolicyInformation):
return NotImplemented
return (
self.policy_identifier == other.policy_identifier and
self.policy_qualifiers == other.policy_qualifiers
)
def __ne__(self, other):
return not self == other
def __hash__(self):
if self.policy_qualifiers is not None:
pq = tuple(self.policy_qualifiers)
else:
pq = None
return hash((self.policy_identifier, pq))
policy_identifier = utils.read_only_property("_policy_identifier")
policy_qualifiers = utils.read_only_property("_policy_qualifiers")
class UserNotice(object):
def __init__(self, notice_reference, explicit_text):
if notice_reference and not isinstance(
notice_reference, NoticeReference
):
raise TypeError(
"notice_reference must be None or a NoticeReference"
)
self._notice_reference = notice_reference
self._explicit_text = explicit_text
def __repr__(self):
return (
"<UserNotice(notice_reference={0.notice_reference}, explicit_text="
"{0.explicit_text!r})>".format(self)
)
def __eq__(self, other):
if not isinstance(other, UserNotice):
return NotImplemented
return (
self.notice_reference == other.notice_reference and
self.explicit_text == other.explicit_text
)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.notice_reference, self.explicit_text))
notice_reference = utils.read_only_property("_notice_reference")
explicit_text = utils.read_only_property("_explicit_text")
class NoticeReference(object):
def __init__(self, organization, notice_numbers):
self._organization = organization
notice_numbers = list(notice_numbers)
if not all(isinstance(x, int) for x in notice_numbers):
raise TypeError(
"notice_numbers must be a list of integers"
)
self._notice_numbers = notice_numbers
def __repr__(self):
return (
"<NoticeReference(organization={0.organization!r}, notice_numbers="
"{0.notice_numbers})>".format(self)
)
def __eq__(self, other):
if not isinstance(other, NoticeReference):
return NotImplemented
return (
self.organization == other.organization and
self.notice_numbers == other.notice_numbers
)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.organization, tuple(self.notice_numbers)))
organization = utils.read_only_property("_organization")
notice_numbers = utils.read_only_property("_notice_numbers")
@utils.register_interface(ExtensionType)
class ExtendedKeyUsage(object):
oid = ExtensionOID.EXTENDED_KEY_USAGE
def __init__(self, usages):
usages = list(usages)
if not all(isinstance(x, ObjectIdentifier) for x in usages):
raise TypeError(
"Every item in the usages list must be an ObjectIdentifier"
)
self._usages = usages
__len__, __iter__, __getitem__ = _make_sequence_methods("_usages")
def __repr__(self):
return "<ExtendedKeyUsage({})>".format(self._usages)
def __eq__(self, other):
if not isinstance(other, ExtendedKeyUsage):
return NotImplemented
return self._usages == other._usages
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(tuple(self._usages))
@utils.register_interface(ExtensionType)
class OCSPNoCheck(object):
oid = ExtensionOID.OCSP_NO_CHECK
def __eq__(self, other):
if not isinstance(other, OCSPNoCheck):
return NotImplemented
return True
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(OCSPNoCheck)
def __repr__(self):
return "<OCSPNoCheck()>"
@utils.register_interface(ExtensionType)
class PrecertPoison(object):
oid = ExtensionOID.PRECERT_POISON
def __eq__(self, other):
if not isinstance(other, PrecertPoison):
return NotImplemented
return True
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(PrecertPoison)
def __repr__(self):
return "<PrecertPoison()>"
@utils.register_interface(ExtensionType)
class TLSFeature(object):
oid = ExtensionOID.TLS_FEATURE
def __init__(self, features):
features = list(features)
if (
not all(isinstance(x, TLSFeatureType) for x in features) or
len(features) == 0
):
raise TypeError(
"features must be a list of elements from the TLSFeatureType "
"enum"
)
self._features = features
__len__, __iter__, __getitem__ = _make_sequence_methods("_features")
def __repr__(self):
return "<TLSFeature(features={0._features})>".format(self)
def __eq__(self, other):
if not isinstance(other, TLSFeature):
return NotImplemented
return self._features == other._features
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(tuple(self._features))
class TLSFeatureType(Enum):
# status_request is defined in RFC 6066 and is used for what is commonly
# called OCSP Must-Staple when present in the TLS Feature extension in an
# X.509 certificate.
status_request = 5
# status_request_v2 is defined in RFC 6961 and allows multiple OCSP
# responses to be provided. It is not currently in use by clients or
# servers.
status_request_v2 = 17
_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType}
@utils.register_interface(ExtensionType)
class InhibitAnyPolicy(object):
oid = ExtensionOID.INHIBIT_ANY_POLICY
def __init__(self, skip_certs):
if not isinstance(skip_certs, six.integer_types):
raise TypeError("skip_certs must be an integer")
if skip_certs < 0:
raise ValueError("skip_certs must be a non-negative integer")
self._skip_certs = skip_certs
def __repr__(self):
return "<InhibitAnyPolicy(skip_certs={0.skip_certs})>".format(self)
def __eq__(self, other):
if not isinstance(other, InhibitAnyPolicy):
return NotImplemented
return self.skip_certs == other.skip_certs
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.skip_certs)
skip_certs = utils.read_only_property("_skip_certs")
@utils.register_interface(ExtensionType)
class KeyUsage(object):
oid = ExtensionOID.KEY_USAGE
def __init__(self, digital_signature, content_commitment, key_encipherment,
data_encipherment, key_agreement, key_cert_sign, crl_sign,
encipher_only, decipher_only):
if not key_agreement and (encipher_only or decipher_only):
raise ValueError(
"encipher_only and decipher_only can only be true when "
"key_agreement is true"
)
self._digital_signature = digital_signature
self._content_commitment = content_commitment
self._key_encipherment = key_encipherment
self._data_encipherment = data_encipherment
self._key_agreement = key_agreement
self._key_cert_sign = key_cert_sign
self._crl_sign = crl_sign
self._encipher_only = encipher_only
self._decipher_only = decipher_only
digital_signature = utils.read_only_property("_digital_signature")
content_commitment = utils.read_only_property("_content_commitment")
key_encipherment = utils.read_only_property("_key_encipherment")
data_encipherment = utils.read_only_property("_data_encipherment")
key_agreement = utils.read_only_property("_key_agreement")
key_cert_sign = utils.read_only_property("_key_cert_sign")
crl_sign = utils.read_only_property("_crl_sign")
@property
def encipher_only(self):
if not self.key_agreement:
raise ValueError(
"encipher_only is undefined unless key_agreement is true"
)
else:
return self._encipher_only
@property
def decipher_only(self):
if not self.key_agreement:
raise ValueError(
"decipher_only is undefined unless key_agreement is true"
)
else:
return self._decipher_only
def __repr__(self):
try:
encipher_only = self.encipher_only
decipher_only = self.decipher_only
except ValueError:
encipher_only = None
decipher_only = None
return ("<KeyUsage(digital_signature={0.digital_signature}, "
"content_commitment={0.content_commitment}, "
"key_encipherment={0.key_encipherment}, "
"data_encipherment={0.data_encipherment}, "
"key_agreement={0.key_agreement}, "
"key_cert_sign={0.key_cert_sign}, crl_sign={0.crl_sign}, "
"encipher_only={1}, decipher_only={2})>").format(
self, encipher_only, decipher_only)
def __eq__(self, other):
if not isinstance(other, KeyUsage):
return NotImplemented
return (
self.digital_signature == other.digital_signature and
self.content_commitment == other.content_commitment and
self.key_encipherment == other.key_encipherment and
self.data_encipherment == other.data_encipherment and
self.key_agreement == other.key_agreement and
self.key_cert_sign == other.key_cert_sign and
self.crl_sign == other.crl_sign and
self._encipher_only == other._encipher_only and
self._decipher_only == other._decipher_only
)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((
self.digital_signature, self.content_commitment,
self.key_encipherment, self.data_encipherment,
self.key_agreement, self.key_cert_sign,
self.crl_sign, self._encipher_only,
self._decipher_only
))
@utils.register_interface(ExtensionType)
class NameConstraints(object):
oid = ExtensionOID.NAME_CONSTRAINTS
def __init__(self, permitted_subtrees, excluded_subtrees):
if permitted_subtrees is not None:
permitted_subtrees = list(permitted_subtrees)
if not all(
isinstance(x, GeneralName) for x in permitted_subtrees
):
raise TypeError(
"permitted_subtrees must be a list of GeneralName objects "
"or None"
)
self._validate_ip_name(permitted_subtrees)
if excluded_subtrees is not None:
excluded_subtrees = list(excluded_subtrees)
if not all(
isinstance(x, GeneralName) for x in excluded_subtrees
):
raise TypeError(
"excluded_subtrees must be a list of GeneralName objects "
"or None"
)
self._validate_ip_name(excluded_subtrees)
if permitted_subtrees is None and excluded_subtrees is None:
raise ValueError(
"At least one of permitted_subtrees and excluded_subtrees "
"must not be None"
)
self._permitted_subtrees = permitted_subtrees
self._excluded_subtrees = excluded_subtrees
def __eq__(self, other):
if not isinstance(other, NameConstraints):
return NotImplemented
return (
self.excluded_subtrees == other.excluded_subtrees and
self.permitted_subtrees == other.permitted_subtrees
)
def __ne__(self, other):
return not self == other
def _validate_ip_name(self, tree):
if any(isinstance(name, IPAddress) and not isinstance(
name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network)
) for name in tree):
raise TypeError(
"IPAddress name constraints must be an IPv4Network or"
" IPv6Network object"
)
def __repr__(self):
return (
u"<NameConstraints(permitted_subtrees={0.permitted_subtrees}, "
u"excluded_subtrees={0.excluded_subtrees})>".format(self)
)
def __hash__(self):
if self.permitted_subtrees is not None:
ps = tuple(self.permitted_subtrees)
else:
ps = None
if self.excluded_subtrees is not None:
es = tuple(self.excluded_subtrees)
else:
es = None
return hash((ps, es))
permitted_subtrees = utils.read_only_property("_permitted_subtrees")
excluded_subtrees = utils.read_only_property("_excluded_subtrees")
class Extension(object):
def __init__(self, oid, critical, value):
if not isinstance(oid, ObjectIdentifier):
raise TypeError(
"oid argument must be an ObjectIdentifier instance."
)
if not isinstance(critical, bool):
raise TypeError("critical must be a boolean value")
self._oid = oid
self._critical = critical
self._value = value
oid = utils.read_only_property("_oid")
critical = utils.read_only_property("_critical")
value = utils.read_only_property("_value")
def __repr__(self):
return ("<Extension(oid={0.oid}, critical={0.critical}, "
"value={0.value})>").format(self)
def __eq__(self, other):
if not isinstance(other, Extension):
return NotImplemented
return (
self.oid == other.oid and
self.critical == other.critical and
self.value == other.value
)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.oid, self.critical, self.value))
class GeneralNames(object):
def __init__(self, general_names):
general_names = list(general_names)
if not all(isinstance(x, GeneralName) for x in general_names):
raise TypeError(
"Every item in the general_names list must be an "
"object conforming to the GeneralName interface"
)
self._general_names = general_names
__len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
def get_values_for_type(self, type):
# Return the value of each GeneralName, except for OtherName instances
# which we return directly because it has two important properties not
# just one value.
objs = (i for i in self if isinstance(i, type))
if type != OtherName:
objs = (i.value for i in objs)
return list(objs)
def __repr__(self):
return "<GeneralNames({})>".format(self._general_names)
def __eq__(self, other):
if not isinstance(other, GeneralNames):
return NotImplemented
return self._general_names == other._general_names
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(tuple(self._general_names))
@utils.register_interface(ExtensionType)
class SubjectAlternativeName(object):
oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME
def __init__(self, general_names):
self._general_names = GeneralNames(general_names)
__len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
def get_values_for_type(self, type):
return self._general_names.get_values_for_type(type)
def __repr__(self):
return "<SubjectAlternativeName({})>".format(self._general_names)
def __eq__(self, other):
if not isinstance(other, SubjectAlternativeName):
return NotImplemented
return self._general_names == other._general_names
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self._general_names)
@utils.register_interface(ExtensionType)
class IssuerAlternativeName(object):
oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME
def __init__(self, general_names):
self._general_names = GeneralNames(general_names)
__len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
def get_values_for_type(self, type):
return self._general_names.get_values_for_type(type)
def __repr__(self):
return "<IssuerAlternativeName({})>".format(self._general_names)
def __eq__(self, other):
if not isinstance(other, IssuerAlternativeName):
return NotImplemented
return self._general_names == other._general_names
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self._general_names)
@utils.register_interface(ExtensionType)
class CertificateIssuer(object):
oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER
def __init__(self, general_names):
self._general_names = GeneralNames(general_names)
__len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
def get_values_for_type(self, type):
return self._general_names.get_values_for_type(type)
def __repr__(self):
return "<CertificateIssuer({})>".format(self._general_names)
def __eq__(self, other):
if not isinstance(other, CertificateIssuer):
return NotImplemented
return self._general_names == other._general_names
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self._general_names)
@utils.register_interface(ExtensionType)
class CRLReason(object):
oid = CRLEntryExtensionOID.CRL_REASON
def __init__(self, reason):
if not isinstance(reason, ReasonFlags):
raise TypeError("reason must be an element from ReasonFlags")
self._reason = reason
def __repr__(self):
return "<CRLReason(reason={})>".format(self._reason)
def __eq__(self, other):
if not isinstance(other, CRLReason):
return NotImplemented
return self.reason == other.reason
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.reason)
reason = utils.read_only_property("_reason")
@utils.register_interface(ExtensionType)
class InvalidityDate(object):
oid = CRLEntryExtensionOID.INVALIDITY_DATE
def __init__(self, invalidity_date):
if not isinstance(invalidity_date, datetime.datetime):
raise TypeError("invalidity_date must be a datetime.datetime")
self._invalidity_date = invalidity_date
def __repr__(self):
return "<InvalidityDate(invalidity_date={})>".format(
self._invalidity_date
)
def __eq__(self, other):
if not isinstance(other, InvalidityDate):
return NotImplemented
return self.invalidity_date == other.invalidity_date
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.invalidity_date)
invalidity_date = utils.read_only_property("_invalidity_date")
@utils.register_interface(ExtensionType)
class PrecertificateSignedCertificateTimestamps(object):
oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS
def __init__(self, signed_certificate_timestamps):
signed_certificate_timestamps = list(signed_certificate_timestamps)
if not all(
isinstance(sct, SignedCertificateTimestamp)
for sct in signed_certificate_timestamps
):
raise TypeError(
"Every item in the signed_certificate_timestamps list must be "
"a SignedCertificateTimestamp"
)
self._signed_certificate_timestamps = signed_certificate_timestamps
__len__, __iter__, __getitem__ = _make_sequence_methods(
"_signed_certificate_timestamps"
)
def __repr__(self):
return (
"<PrecertificateSignedCertificateTimestamps({})>".format(
list(self)
)
)
def __hash__(self):
return hash(tuple(self._signed_certificate_timestamps))
def __eq__(self, other):
if not isinstance(other, PrecertificateSignedCertificateTimestamps):
return NotImplemented
return (
self._signed_certificate_timestamps ==
other._signed_certificate_timestamps
)
def __ne__(self, other):
return not self == other
@utils.register_interface(ExtensionType)
class OCSPNonce(object):
oid = OCSPExtensionOID.NONCE
def __init__(self, nonce):
if not isinstance(nonce, bytes):
raise TypeError("nonce must be bytes")
self._nonce = nonce
def __eq__(self, other):
if not isinstance(other, OCSPNonce):
return NotImplemented
return self.nonce == other.nonce
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.nonce)
def __repr__(self):
return "<OCSPNonce(nonce={0.nonce!r})>".format(self)
nonce = utils.read_only_property("_nonce")
@utils.register_interface(ExtensionType)
class IssuingDistributionPoint(object):
oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT
def __init__(self, full_name, relative_name, only_contains_user_certs,
only_contains_ca_certs, only_some_reasons, indirect_crl,
only_contains_attribute_certs):
if (
only_some_reasons and (
not isinstance(only_some_reasons, frozenset) or not all(
isinstance(x, ReasonFlags) for x in only_some_reasons
)
)
):
raise TypeError(
"only_some_reasons must be None or frozenset of ReasonFlags"
)
if only_some_reasons and (
ReasonFlags.unspecified in only_some_reasons or
ReasonFlags.remove_from_crl in only_some_reasons
):
raise ValueError(
"unspecified and remove_from_crl are not valid reasons in an "
"IssuingDistributionPoint"
)
if not (
isinstance(only_contains_user_certs, bool) and
isinstance(only_contains_ca_certs, bool) and
isinstance(indirect_crl, bool) and
isinstance(only_contains_attribute_certs, bool)
):
raise TypeError(
"only_contains_user_certs, only_contains_ca_certs, "
"indirect_crl and only_contains_attribute_certs "
"must all be boolean."
)
crl_constraints = [
only_contains_user_certs, only_contains_ca_certs,
indirect_crl, only_contains_attribute_certs
]
if len([x for x in crl_constraints if x]) > 1:
raise ValueError(
"Only one of the following can be set to True: "
"only_contains_user_certs, only_contains_ca_certs, "
"indirect_crl, only_contains_attribute_certs"
)
if (
not any([
only_contains_user_certs, only_contains_ca_certs,
indirect_crl, only_contains_attribute_certs, full_name,
relative_name, only_some_reasons
])
):
raise ValueError(
"Cannot create empty extension: "
"if only_contains_user_certs, only_contains_ca_certs, "
"indirect_crl, and only_contains_attribute_certs are all False"
", then either full_name, relative_name, or only_some_reasons "
"must have a value."
)
self._only_contains_user_certs = only_contains_user_certs
self._only_contains_ca_certs = only_contains_ca_certs
self._indirect_crl = indirect_crl
self._only_contains_attribute_certs = only_contains_attribute_certs
self._only_some_reasons = only_some_reasons
self._full_name = full_name
self._relative_name = relative_name
def __repr__(self):
return (
"<IssuingDistributionPoint(full_name={0.full_name}, "
"relative_name={0.relative_name}, "
"only_contains_user_certs={0.only_contains_user_certs}, "
"only_contains_ca_certs={0.only_contains_ca_certs}, "
"only_some_reasons={0.only_some_reasons}, "
"indirect_crl={0.indirect_crl}, "
"only_contains_attribute_certs="
"{0.only_contains_attribute_certs})>".format(self)
)
def __eq__(self, other):
if not isinstance(other, IssuingDistributionPoint):
return NotImplemented
return (
self.full_name == other.full_name and
self.relative_name == other.relative_name and
self.only_contains_user_certs == other.only_contains_user_certs and
self.only_contains_ca_certs == other.only_contains_ca_certs and
self.only_some_reasons == other.only_some_reasons and
self.indirect_crl == other.indirect_crl and
self.only_contains_attribute_certs ==
other.only_contains_attribute_certs
)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((
self.full_name,
self.relative_name,
self.only_contains_user_certs,
self.only_contains_ca_certs,
self.only_some_reasons,
self.indirect_crl,
self.only_contains_attribute_certs,
))
full_name = utils.read_only_property("_full_name")
relative_name = utils.read_only_property("_relative_name")
only_contains_user_certs = utils.read_only_property(
"_only_contains_user_certs"
)
only_contains_ca_certs = utils.read_only_property(
"_only_contains_ca_certs"
)
only_some_reasons = utils.read_only_property("_only_some_reasons")
indirect_crl = utils.read_only_property("_indirect_crl")
only_contains_attribute_certs = utils.read_only_property(
"_only_contains_attribute_certs"
)
@utils.register_interface(ExtensionType)
class UnrecognizedExtension(object):
def __init__(self, oid, value):
if not isinstance(oid, ObjectIdentifier):
raise TypeError("oid must be an ObjectIdentifier")
self._oid = oid
self._value = value
oid = utils.read_only_property("_oid")
value = utils.read_only_property("_value")
def __repr__(self):
return (
"<UnrecognizedExtension(oid={0.oid}, value={0.value!r})>".format(
self
)
)
def __eq__(self, other):
if not isinstance(other, UnrecognizedExtension):
return NotImplemented
return self.oid == other.oid and self.value == other.value
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.oid, self.value))
| 31.494987
| 79
| 0.648072
|
6e2219099741bde4aa9c49ba41f08ec7c990462e
| 3,111
|
py
|
Python
|
packit/utils/repo.py
|
cheese/packit
|
790b01a30575b6bfd680fdc991542ba60e40a9f2
|
[
"MIT"
] | 1
|
2020-12-28T18:00:22.000Z
|
2020-12-28T18:00:22.000Z
|
packit/utils/repo.py
|
cheese/packit
|
790b01a30575b6bfd680fdc991542ba60e40a9f2
|
[
"MIT"
] | 7
|
2020-12-28T19:57:35.000Z
|
2021-04-17T14:43:15.000Z
|
packit/utils/repo.py
|
cheese/packit
|
790b01a30575b6bfd680fdc991542ba60e40a9f2
|
[
"MIT"
] | null | null | null |
# Copyright Contributors to the Packit project.
# SPDX-License-Identifier: MIT
import logging
import tempfile
from pathlib import Path
from typing import Tuple, Optional, Union, List
import git
from ogr.parsing import parse_git_repo
from packit.exceptions import PackitException
logger = logging.getLogger(__name__)
def is_git_repo(directory: Union[Path, str]) -> bool:
"""
Test, if the directory is a git repo.
(Has .git subdirectory?)
"""
return Path(directory, ".git").is_dir()
def get_repo(url: str, directory: Union[Path, str] = None) -> git.Repo:
"""
Use directory as a git repo or clone repo to the tempdir.
"""
directory = str(directory) if directory else tempfile.mkdtemp()
if is_git_repo(directory=directory):
logger.debug(f"Repo already exists in {directory}.")
repo = git.repo.Repo(directory)
else:
logger.debug(f"Cloning repo {url} -> {directory}")
# TODO: optimize cloning: single branch and last n commits?
repo = git.repo.Repo.clone_from(url=url, to_path=directory, tags=True)
return repo
def get_namespace_and_repo_name(url: str) -> Tuple[Optional[str], str]:
parsed_git_repo = parse_git_repo(url)
if parsed_git_repo is None or not parsed_git_repo.repo:
raise PackitException(
f"Invalid URL format, can't obtain namespace and repository name: {url}"
)
return parsed_git_repo.namespace, parsed_git_repo.repo
def is_a_git_ref(repo: git.Repo, ref: str) -> bool:
try:
commit = repo.commit(ref)
return bool(commit)
except git.BadName:
return False
def git_remote_url_to_https_url(inp: str) -> str:
"""
turn provided git remote URL to https URL:
returns empty string if the input can't be processed
"""
parsed_repo = parse_git_repo(inp)
if not parsed_repo or not parsed_repo.hostname:
logger.warning(f"{inp!r} is not an URL we recognize.")
return ""
if inp.startswith(("http", "https")):
logger.debug(f"Provided input {inp!r} is an url.")
return inp
optional_suffix = ".git" if inp.endswith(".git") else ""
url_str = "https://{}/{}/{}{}".format(
parsed_repo.hostname, parsed_repo.namespace, parsed_repo.repo, optional_suffix
)
logger.debug(f"URL {inp!r} turned into HTTPS {url_str!r}")
return url_str
def get_current_version_command(
glob_pattern: str, refs: Optional[str] = "tags"
) -> List[str]:
"""
Returns command that find latest git reference matching given pattern.
:param glob_pattern: pattern that is used to find latest ref
:param refs: specifies what kind of ref is used; \
default is `"tags"` that searches through all tags (including non-annotated), \
pass `None` to search only annotated tags or `"all"` to search through \
all refs (including branches and remote refs)
:return: command to find latest ref
"""
return [
"git",
"describe",
"--abbrev=0",
f"--{refs}" if refs else "",
"--match",
glob_pattern,
]
| 30.203883
| 87
| 0.660559
|
bfcac2f292a5ae64e20851bbb719fa57aa75698e
| 3,353
|
py
|
Python
|
model/embedding/generator.py
|
ab3llini/News2Title
|
80b5117d5495890425ddeaddd77ef64624df5855
|
[
"MIT"
] | null | null | null |
model/embedding/generator.py
|
ab3llini/News2Title
|
80b5117d5495890425ddeaddd77ef64624df5855
|
[
"MIT"
] | null | null | null |
model/embedding/generator.py
|
ab3llini/News2Title
|
80b5117d5495890425ddeaddd77ef64624df5855
|
[
"MIT"
] | 1
|
2019-11-04T01:17:40.000Z
|
2019-11-04T01:17:40.000Z
|
import os
import sys
import pickle
from sklearn.model_selection import train_test_split
from tqdm import tqdm
import numpy as np
this_path = os.path.dirname(os.path.realpath(__file__))
root_path = os.path.abspath(os.path.join(this_path, os.pardir, os.pardir))
sys.path.append(root_path)
from model import config
from model.embedding.output_generator import get_inputs_outputs
config = config.embedding_cfg
class DataGenerator():
def __init__(self, max_decoder_seq_len, decoder_tokens, embeddings, glove_embedding_len, test_size=0.33):
embedding_prefix = 'EMB_'
tokenized_prefix = 'A'
tokenized_path = os.path.join(root_path, config.preprocess_folder)
self.embeddings = embeddings
self.glove_embedding_len = glove_embedding_len
filelist = []
import ntpath
for f in os.listdir(tokenized_path):
if ntpath.basename(f).startswith(embedding_prefix + tokenized_prefix):
filelist.append(os.path.join(tokenized_path, f))
train_list, test_list = train_test_split(filelist, test_size=test_size, random_state=42)
self.train_list = train_list
self.test_list = test_list
self.max_decoder_seq_len = max_decoder_seq_len
self.decoder_tokens = decoder_tokens
def get_steps_per_epoch(self):
return len(self.train_list)
def get_steps_validation(self):
return len(self.test_list)
def __len__(self):
'Denotes the number of batches per epoch'
return 100
def load_tokens(self, file):
with open(file, 'rb') as handle:
data = np.array(pickle.load(handle))
headlines = list(data[:, 0])
articles = list(data[:, 1])
return headlines, articles, data.shape[0]
def generate_train(self):
while True:
for file in tqdm(self.train_list):
headline, articles, file_length = self.load_tokens(file)
encoder_input_data, decoder_input_data, decoder_target_data = get_inputs_outputs(x=articles, y=headline,
max_decoder_seq_len=self.max_decoder_seq_len,
glove_embedding_len=self.glove_embedding_len,
embeddings=self.embeddings)
yield [encoder_input_data, decoder_input_data], decoder_target_data
def generate_test(self):
while True:
for file in tqdm(self.test_list):
headline, articles, file_length = self.load_tokens(file)
encoder_input_data, decoder_input_data, decoder_target_data = get_inputs_outputs(x=articles, y=headline,
max_decoder_seq_len=self.max_decoder_seq_len,
glove_embedding_len=self.glove_embedding_len,
embeddings=self.embeddings)
yield [encoder_input_data, decoder_input_data], decoder_target_data
| 44.706667
| 142
| 0.584551
|
b575dda7ea0321844b657459f6924f4ee783733e
| 25
|
py
|
Python
|
main.py
|
fhk/lamborghini
|
e053204a4906c9b78d078359e9d7f6ca916d1690
|
[
"MIT"
] | 2
|
2015-10-13T05:13:55.000Z
|
2018-06-22T22:54:59.000Z
|
main.py
|
fhk/lamborghini
|
e053204a4906c9b78d078359e9d7f6ca916d1690
|
[
"MIT"
] | 2
|
2015-10-13T05:14:27.000Z
|
2015-10-13T06:10:13.000Z
|
main.py
|
fhk/lamborghini
|
e053204a4906c9b78d078359e9d7f6ca916d1690
|
[
"MIT"
] | null | null | null |
def main():
return 0
| 8.333333
| 12
| 0.56
|
b8a93f5f44a1815d9fae147abde1a02d749d0c3a
| 4,148
|
py
|
Python
|
model.py
|
felixnon/foveated-visual-attention
|
7e7d9a5ef24ec42eb76ba72f783bb2227bdb4851
|
[
"MIT"
] | null | null | null |
model.py
|
felixnon/foveated-visual-attention
|
7e7d9a5ef24ec42eb76ba72f783bb2227bdb4851
|
[
"MIT"
] | null | null | null |
model.py
|
felixnon/foveated-visual-attention
|
7e7d9a5ef24ec42eb76ba72f783bb2227bdb4851
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
from modules import baseline_network
from modules import glimpse_network, core_network
from modules import action_network, location_network
class RecurrentAttention(nn.Module):
"""
A Recurrent Model of Visual Attention (RAM) [1].
RAM is a recurrent neural network that processes
inputs sequentially, attending to different locations
within the image one at a time, and incrementally
combining information from these fixations to build
up a dynamic internal representation of the image.
References
----------
- Minh et. al., https://arxiv.org/abs/1406.6247
"""
def __init__(self,
g,
k,
s,
c,
h_g,
h_l,
std,
hidden_size,
num_classes):
"""
Initialize the recurrent attention model and its
different components.
Args
----
- g: size of the square patches in the glimpses extracted
by the retina.
- k: number of patches to extract per glimpse.
- s: scaling factor that controls the size of successive patches.
- c: number of channels in each image.
- h_g: hidden layer size of the fc layer for `phi`.
- h_l: hidden layer size of the fc layer for `l`.
- std: standard deviation of the Gaussian policy.
- hidden_size: hidden size of the rnn.
- num_classes: number of classes in the dataset.
- num_glimpses: number of glimpses to take per image,
i.e. number of BPTT steps.
"""
super(RecurrentAttention, self).__init__()
self.std = std
self.sensor = glimpse_network(h_g, h_l, g, k, s, c)
self.rnn = core_network(hidden_size, hidden_size)
self.locator = location_network(hidden_size, 2, std)
self.classifier = action_network(hidden_size, num_classes)
self.baseliner = baseline_network(hidden_size, 1)
def forward(self, x, l_t_prev, h_t_prev, last=False):
"""
Run the recurrent attention model for 1 timestep
on the minibatch of images `x`.
Args
----
- x: a 4D Tensor of shape (B, H, W, C). The minibatch
of images.
- l_t_prev: a 2D tensor of shape (B, 2). The location vector
containing the glimpse coordinates [x, y] for the previous
timestep `t-1`.
- h_t_prev: a 2D tensor of shape (B, hidden_size). The hidden
state vector for the previous timestep `t-1`.
- last: a bool indicating whether this is the last timestep.
If True, the action network returns an output probability
vector over the classes and the baseline `b_t` for the
current timestep `t`. Else, the core network returns the
hidden state vector for the next timestep `t+1` and the
location vector for the next timestep `t+1`.
Returns
-------
- h_t: a 2D tensor of shape (B, hidden_size). The hidden
state vector for the current timestep `t`.
- mu: a 2D tensor of shape (B, 2). The mean that parametrizes
the Gaussian policy.
- l_t: a 2D tensor of shape (B, 2). The location vector
containing the glimpse coordinates [x, y] for the
current timestep `t`.
- b_t: a vector of length (B,). The baseline for the
current time step `t`.
- log_probas: a 2D tensor of shape (B, num_classes). The
output log probability vector over the classes.
- log_pi: a vector of length (B,).
"""
g_t = self.sensor(x, l_t_prev)
h_t = self.rnn(g_t, h_t_prev)
# we assume both dimensions are independent
# 1. pdf of the joint is the product of the pdfs
# 2. log of the product is the sum of the logs
log_pi, l_t = self.locator(h_t)
b_t = self.baseliner(h_t).squeeze()
# return log_probas always for plotting
log_probas = self.classifier(h_t)
return h_t, l_t, b_t, log_probas, log_pi
| 38.407407
| 73
| 0.606557
|
7402b3d8afe25ea2ed099a1f31e179ca7ae793e7
| 2,566
|
py
|
Python
|
rpyc/core/channel.py
|
DanielShaulov/rpyc
|
a5f146a36a02da0492db6e3d40c673ccd33a05b4
|
[
"MIT"
] | null | null | null |
rpyc/core/channel.py
|
DanielShaulov/rpyc
|
a5f146a36a02da0492db6e3d40c673ccd33a05b4
|
[
"MIT"
] | null | null | null |
rpyc/core/channel.py
|
DanielShaulov/rpyc
|
a5f146a36a02da0492db6e3d40c673ccd33a05b4
|
[
"MIT"
] | null | null | null |
"""*Channel* is an abstraction layer over streams that works with *packets of data*,
rather than an endless stream of bytes, and adds support for compression.
"""
from rpyc.lib import safe_import
from rpyc.lib.compat import Struct, BYTES_LITERAL
zlib = safe_import("zlib")
# * 64 bit length field?
# * separate \n into a FlushingChannel subclass?
# * add thread safety as a subclass?
class Channel(object):
"""Channel implementation.
Note: In order to avoid problems with all sorts of line-buffered transports,
we deliberately add ``\\n`` at the end of each frame.
"""
COMPRESSION_THRESHOLD = 3000
COMPRESSION_LEVEL = 1
FRAME_HEADER = Struct("!LB")
FLUSHER = BYTES_LITERAL("\n") # cause any line-buffered layers below us to flush
__slots__ = ["stream", "compress"]
def __init__(self, stream, compress=True):
self.stream = stream
if not zlib:
compress = False
self.compress = compress
def close(self):
"""closes the channel and underlying stream"""
self.stream.close()
@property
def closed(self):
"""indicates whether the underlying stream has been closed"""
return self.stream.closed
def fileno(self):
"""returns the file descriptor of the underlying stream"""
return self.stream.fileno()
def poll(self, timeout):
"""polls the underlying steam for data, waiting up to *timeout* seconds"""
return self.stream.poll(timeout)
def recv(self):
"""Receives the next packet (or *frame*) from the underlying stream.
This method will block until the packet has been read completely
:returns: string of data
"""
header = self.stream.read(self.FRAME_HEADER.size)
length, compressed = self.FRAME_HEADER.unpack(header)
data = self.stream.read(length + len(self.FLUSHER))[:-len(self.FLUSHER)]
if compressed:
data = zlib.decompress(data)
return data
def send(self, data):
"""Sends the given string of data as a packet over the underlying
stream. Blocks until the packet has been sent.
:param data: the byte string to send as a packet
"""
if self.compress and len(data) > self.COMPRESSION_THRESHOLD:
compressed = 1
data = zlib.compress(data, self.COMPRESSION_LEVEL)
else:
compressed = 0
self.stream.write(self.FRAME_HEADER.pack(len(data), compressed))
self.stream.write(data)
self.stream.write(self.FLUSHER)
| 33.763158
| 85
| 0.650429
|
c2f87b9c0efb1d2a3f4c5d938cec653f26fcdb13
| 2,870
|
py
|
Python
|
client/client.py
|
ray1888/Python-socket-programming
|
6df052597ab552d1104b160f0c1d33439950fe87
|
[
"MIT"
] | 1
|
2017-06-13T17:09:43.000Z
|
2017-06-13T17:09:43.000Z
|
client/client.py
|
ray1888/Python-socket-programming
|
6df052597ab552d1104b160f0c1d33439950fe87
|
[
"MIT"
] | 2
|
2017-06-16T16:12:46.000Z
|
2017-06-18T10:16:21.000Z
|
client/client.py
|
ray1888/Python-socket-programming
|
6df052597ab552d1104b160f0c1d33439950fe87
|
[
"MIT"
] | null | null | null |
import socket
import json
import os
import re
from random import randint
class Control():
def __init__(self):
self.s = socket.socket() #此处s为控制信道的socket
self.pwd = os.getcwd()
self.Connect(self.s)
self.InputCmd(self.s,self.host)
def Getconfig(self):
Path = os.getcwd()
with open(Path+'/clientconfig.json','r') as f:
config = f.read()
config = json.loads(config)
host = config["host"]
port = config["port"]
mode = config["mode"]
self.host = host
self.sport = port
self.mode = mode
def Connect(self,socket): #连接服务器
host,port = getconfig()
socket.connect((host, port))
content = socket.recv(1024)
print(content)
socket.send("mode="+self.mode)
serport = socket.recv(1024)
self.serport = serport
def Createport(): #生成主动模式的连接端口
dataport = randint(4096,65535)
if dataport == socport:
Createport()
else:
return dataport
def InputCmd(self,socket,port): #此处使用的是控制信道
state = True
addr, socport = socket.getsockname()
dataport = Createport()
socket.send(b"DataPort="+bytes(dataport))
while state:
cmd = input('请输入命令')
socket.send(bytes(cmd, encoding="utf-8")) ##传输命令到服务器端
result = socket.recv(1024)
if result == b"0":
state = False
print("you are quit")
else:
DataTranfer(dataport,host,cmd)
def DataTranfer(self,port1=None,host,cmd,mode,lhost,sport=None): #port1为生成的端口,默认为空;sport为服务器被动模式端口,默认为空
pwd = self.pwd
self.ts = socket.socket() #此处ts为传输信道的socket
ts = self.ts
if mode == "PASV":
ts.bind((lhost,port1)) #此处为主动模式
ts.listen(5)
tsc, addr = ts.accept()
if re.match("upload",cmd):
cmd_split = cmd.split(" ")
filepath = cmd_split[1]
Send(tsc,filepath)
elif re.match("download",cmd):
cmd_split = cmd.split(" ")
filename = cmd_split[1]
Receive(tsc,pwd,filename)
else:
Flag = True
total_data
while Flag:
data = tsc.recv(1024)
if data == "":
break
else:
total_data += data
print(total_data)
else:
ts.connect((host,))
def Receive(self,datasocket,path,filename): ##下载功能使用此方法,因为使用的是数据通道
Flag = True
while Flag:
with open(self.pwd+"/"+filename,"ab") as f:
data = datasocket.recv(1024)
if data == "":
break
else:
f.write(data)
datasocket.close()
print("Receive has been complete,Data Tunnel has been shut down")
def Send(self,datasocket,filepath): #只有上传功能才会使用此方法,因为使用的是数据通道
with open(filepath,"rb") as f:
Blank = True
while Blank:
data = f.read(1024)
if data == "":
Blank = False
else:
datasocket.send(data)
datasocket.close()
print("Send has been complete,Data Tunnel has been shut down")
| 24.741379
| 109
| 0.60453
|
f21ec45ca6b619271ebd69b2bdedff3d1d450db7
| 369
|
py
|
Python
|
event_manager/events/migrations/0009_auto_20200219_0030.py
|
abeeshp/eventSignUpRestApp
|
77e7769628af23feaeea33eed4a4d48a01acdec7
|
[
"MIT"
] | null | null | null |
event_manager/events/migrations/0009_auto_20200219_0030.py
|
abeeshp/eventSignUpRestApp
|
77e7769628af23feaeea33eed4a4d48a01acdec7
|
[
"MIT"
] | 6
|
2021-03-19T00:28:45.000Z
|
2021-09-22T18:44:08.000Z
|
event_manager/events/migrations/0009_auto_20200219_0030.py
|
abeeshp/eventSignUpRestApp
|
77e7769628af23feaeea33eed4a4d48a01acdec7
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.3 on 2020-02-19 00:30
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0008_remove_registration_last_name'),
]
operations = [
migrations.AlterUniqueTogether(
name='registration',
unique_together={('event', 'email')},
),
]
| 20.5
| 57
| 0.615176
|
4b5b84e2f2e98b429ac20f43bfd10f4435528214
| 1,667
|
py
|
Python
|
setup.py
|
hashbrowncipher/spectator-py
|
65590d5367bd58f40f77d02534ceda0f25d2b4e5
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
hashbrowncipher/spectator-py
|
65590d5367bd58f40f77d02534ceda0f25d2b4e5
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
hashbrowncipher/spectator-py
|
65590d5367bd58f40f77d02534ceda0f25d2b4e5
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import os
from setuptools import setup
# Utility function to read the README file.
# https://pythonhosted.org/an_example_pypi_project/setuptools.html#setting-up-setup-py
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='netflix-spectator-py',
version='0.1.13',
description='Python library for reporting metrics to Atlas.',
long_description=read('README.md'),
long_description_content_type='text/markdown',
author='Brian Harrington',
author_email='netflix-atlas@googlegroups.com',
license='Apache 2.0',
url='https://github.com/netflix/spectator-py/',
packages=['spectator', 'spectator.histogram'],
install_requires=['future'],
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: Apache Software License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
| 34.020408
| 86
| 0.647271
|
bf8911c0562f8d283eef3daa1ec3c07fd615e045
| 30,722
|
py
|
Python
|
electrum/plugins/trezor/qt.py
|
p3ngu19z/electrum
|
427b396c24ec1a3cfdca8e1a70c94537b35ad882
|
[
"MIT"
] | 12
|
2020-11-12T08:53:05.000Z
|
2021-07-06T17:30:39.000Z
|
electrum/plugins/trezor/qt.py
|
p3ngu19z/electrum
|
427b396c24ec1a3cfdca8e1a70c94537b35ad882
|
[
"MIT"
] | 209
|
2020-09-23T06:58:18.000Z
|
2021-11-18T11:25:41.000Z
|
electrum/plugins/trezor/qt.py
|
p3ngu19z/electrum
|
427b396c24ec1a3cfdca8e1a70c94537b35ad882
|
[
"MIT"
] | 19
|
2020-10-13T11:42:26.000Z
|
2022-02-06T01:26:34.000Z
|
from functools import partial
import threading
from PyQt5.QtCore import Qt, QEventLoop, pyqtSignal
from PyQt5.QtWidgets import (QVBoxLayout, QLabel, QGridLayout, QPushButton,
QHBoxLayout, QButtonGroup, QGroupBox, QDialog,
QLineEdit, QRadioButton, QCheckBox, QWidget,
QMessageBox, QFileDialog, QSlider, QTabWidget)
from electrum.gui.qt.util import (WindowModalDialog, WWLabel, Buttons, CancelButton,
OkButton, CloseButton, PasswordLineEdit)
from electrum.i18n import _
from electrum.plugin import hook
from electrum.util import bh2u
from ..hw_wallet.qt import QtHandlerBase, QtPluginBase
from ..hw_wallet.plugin import only_hook_if_libraries_available
from .trezor import (TrezorPlugin, TIM_NEW, TIM_RECOVER, TrezorInitSettings,
PASSPHRASE_ON_DEVICE, Capability, BackupType, RecoveryDeviceType)
PASSPHRASE_HELP_SHORT =_(
"Passphrases allow you to access new wallets, each "
"hidden behind a particular case-sensitive passphrase.")
PASSPHRASE_HELP = PASSPHRASE_HELP_SHORT + " " + _(
"You need to create a separate Electrum wallet for each passphrase "
"you use as they each generate different addresses. Changing "
"your passphrase does not lose other wallets, each is still "
"accessible behind its own passphrase.")
RECOMMEND_PIN = _(
"You should enable PIN protection. Your PIN is the only protection "
"for your bitcoins if your device is lost or stolen.")
PASSPHRASE_NOT_PIN = _(
"If you forget a passphrase you will be unable to access any "
"bitcoins in the wallet behind it. A passphrase is not a PIN. "
"Only change this if you are sure you understand it.")
MATRIX_RECOVERY = _(
"Enter the recovery words by pressing the buttons according to what "
"the device shows on its display. You can also use your NUMPAD.\n"
"Press BACKSPACE to go back a choice or word.\n")
SEEDLESS_MODE_WARNING = _(
"In seedless mode, the mnemonic seed words are never shown to the user.\n"
"There is no backup, and the user has a proof of this.\n"
"This is an advanced feature, only suggested to be used in redundant multisig setups.")
class MatrixDialog(WindowModalDialog):
def __init__(self, parent):
super(MatrixDialog, self).__init__(parent)
self.setWindowTitle(_("Trezor Matrix Recovery"))
self.num = 9
self.loop = QEventLoop()
vbox = QVBoxLayout(self)
vbox.addWidget(WWLabel(MATRIX_RECOVERY))
grid = QGridLayout()
grid.setSpacing(0)
self.char_buttons = []
for y in range(3):
for x in range(3):
button = QPushButton('?')
button.clicked.connect(partial(self.process_key, ord('1') + y * 3 + x))
grid.addWidget(button, 3 - y, x)
self.char_buttons.append(button)
vbox.addLayout(grid)
self.backspace_button = QPushButton("<=")
self.backspace_button.clicked.connect(partial(self.process_key, Qt.Key_Backspace))
self.cancel_button = QPushButton(_("Cancel"))
self.cancel_button.clicked.connect(partial(self.process_key, Qt.Key_Escape))
buttons = Buttons(self.backspace_button, self.cancel_button)
vbox.addSpacing(40)
vbox.addLayout(buttons)
self.refresh()
self.show()
def refresh(self):
for y in range(3):
self.char_buttons[3 * y + 1].setEnabled(self.num == 9)
def is_valid(self, key):
return key >= ord('1') and key <= ord('9')
def process_key(self, key):
self.data = None
if key == Qt.Key_Backspace:
self.data = '\010'
elif key == Qt.Key_Escape:
self.data = 'x'
elif self.is_valid(key):
self.char_buttons[key - ord('1')].setFocus()
self.data = '%c' % key
if self.data:
self.loop.exit(0)
def keyPressEvent(self, event):
self.process_key(event.key())
if not self.data:
QDialog.keyPressEvent(self, event)
def get_matrix(self, num):
self.num = num
self.refresh()
self.loop.exec_()
class QtHandler(QtHandlerBase):
pin_signal = pyqtSignal(object, object)
matrix_signal = pyqtSignal(object)
close_matrix_dialog_signal = pyqtSignal()
def __init__(self, win, pin_matrix_widget_class, device):
super(QtHandler, self).__init__(win, device)
self.pin_signal.connect(self.pin_dialog)
self.matrix_signal.connect(self.matrix_recovery_dialog)
self.close_matrix_dialog_signal.connect(self._close_matrix_dialog)
self.pin_matrix_widget_class = pin_matrix_widget_class
self.matrix_dialog = None
self.passphrase_on_device = False
def get_pin(self, msg, *, show_strength=True):
self.done.clear()
self.pin_signal.emit(msg, show_strength)
self.done.wait()
return self.response
def get_matrix(self, msg):
self.done.clear()
self.matrix_signal.emit(msg)
self.done.wait()
data = self.matrix_dialog.data
if data == 'x':
self.close_matrix_dialog()
return data
def _close_matrix_dialog(self):
if self.matrix_dialog:
self.matrix_dialog.accept()
self.matrix_dialog = None
def close_matrix_dialog(self):
self.close_matrix_dialog_signal.emit()
def pin_dialog(self, msg, show_strength):
# Needed e.g. when resetting a device
self.clear_dialog()
dialog = WindowModalDialog(self.top_level_window(), _("Enter PIN"))
matrix = self.pin_matrix_widget_class(show_strength)
vbox = QVBoxLayout()
vbox.addWidget(QLabel(msg))
vbox.addWidget(matrix)
vbox.addLayout(Buttons(CancelButton(dialog), OkButton(dialog)))
dialog.setLayout(vbox)
dialog.exec_()
self.response = str(matrix.get_value())
self.done.set()
def matrix_recovery_dialog(self, msg):
if not self.matrix_dialog:
self.matrix_dialog = MatrixDialog(self.top_level_window())
self.matrix_dialog.get_matrix(msg)
self.done.set()
def passphrase_dialog(self, msg, confirm):
# If confirm is true, require the user to enter the passphrase twice
parent = self.top_level_window()
d = WindowModalDialog(parent, _('Enter Passphrase'))
OK_button = OkButton(d, _('Enter Passphrase'))
OnDevice_button = QPushButton(_('Enter Passphrase on Device'))
new_pw = PasswordLineEdit()
conf_pw = PasswordLineEdit()
vbox = QVBoxLayout()
label = QLabel(msg + "\n")
label.setWordWrap(True)
grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnMinimumWidth(0, 150)
grid.setColumnMinimumWidth(1, 100)
grid.setColumnStretch(1,1)
vbox.addWidget(label)
grid.addWidget(QLabel(_('Passphrase:')), 0, 0)
grid.addWidget(new_pw, 0, 1)
if confirm:
grid.addWidget(QLabel(_('Confirm Passphrase:')), 1, 0)
grid.addWidget(conf_pw, 1, 1)
vbox.addLayout(grid)
def enable_OK():
if not confirm:
ok = True
else:
ok = new_pw.text() == conf_pw.text()
OK_button.setEnabled(ok)
new_pw.textChanged.connect(enable_OK)
conf_pw.textChanged.connect(enable_OK)
vbox.addWidget(OK_button)
if self.passphrase_on_device:
vbox.addWidget(OnDevice_button)
d.setLayout(vbox)
self.passphrase = None
def ok_clicked():
self.passphrase = new_pw.text()
def on_device_clicked():
self.passphrase = PASSPHRASE_ON_DEVICE
OK_button.clicked.connect(ok_clicked)
OnDevice_button.clicked.connect(on_device_clicked)
OnDevice_button.clicked.connect(d.accept)
d.exec_()
self.done.set()
class QtPlugin(QtPluginBase):
# Derived classes must provide the following class-static variables:
# icon_file
# pin_matrix_widget_class
@only_hook_if_libraries_available
@hook
def receive_menu(self, menu, addrs, wallet):
if len(addrs) != 1:
return
for keystore in wallet.get_keystores():
if type(keystore) == self.keystore_class:
def show_address(keystore=keystore):
keystore.thread.add(partial(self.show_address, wallet, addrs[0], keystore))
device_name = "{} ({})".format(self.device, keystore.label)
menu.addAction(_("Show on {}").format(device_name), show_address)
def show_settings_dialog(self, window, keystore):
def connect():
device_id = self.choose_device(window, keystore)
return device_id
def show_dialog(device_id):
if device_id:
SettingsDialog(window, self, keystore, device_id).exec_()
keystore.thread.add(connect, on_success=show_dialog)
def request_trezor_init_settings(self, wizard, method, device_id):
vbox = QVBoxLayout()
next_enabled = True
devmgr = self.device_manager()
client = devmgr.client_by_id(device_id)
if not client:
raise Exception(_("The device was disconnected."))
model = client.get_trezor_model()
fw_version = client.client.version
capabilities = client.client.features.capabilities
have_shamir = Capability.Shamir in capabilities
# label
label = QLabel(_("Enter a label to name your device:"))
name = QLineEdit()
hl = QHBoxLayout()
hl.addWidget(label)
hl.addWidget(name)
hl.addStretch(1)
vbox.addLayout(hl)
# Backup type
gb_backuptype = QGroupBox()
hbox_backuptype = QHBoxLayout()
gb_backuptype.setLayout(hbox_backuptype)
vbox.addWidget(gb_backuptype)
gb_backuptype.setTitle(_('Select backup type:'))
bg_backuptype = QButtonGroup()
rb_single = QRadioButton(gb_backuptype)
rb_single.setText(_('Single seed (BIP39)'))
bg_backuptype.addButton(rb_single)
bg_backuptype.setId(rb_single, BackupType.Bip39)
hbox_backuptype.addWidget(rb_single)
rb_single.setChecked(True)
rb_shamir = QRadioButton(gb_backuptype)
rb_shamir.setText(_('Shamir'))
bg_backuptype.addButton(rb_shamir)
bg_backuptype.setId(rb_shamir, BackupType.Slip39_Basic)
hbox_backuptype.addWidget(rb_shamir)
rb_shamir.setEnabled(Capability.Shamir in capabilities)
rb_shamir.setVisible(False) # visible with "expert settings"
rb_shamir_groups = QRadioButton(gb_backuptype)
rb_shamir_groups.setText(_('Super Shamir'))
bg_backuptype.addButton(rb_shamir_groups)
bg_backuptype.setId(rb_shamir_groups, BackupType.Slip39_Advanced)
hbox_backuptype.addWidget(rb_shamir_groups)
rb_shamir_groups.setEnabled(Capability.ShamirGroups in capabilities)
rb_shamir_groups.setVisible(False) # visible with "expert settings"
# word count
word_count_buttons = {}
gb_numwords = QGroupBox()
hbox1 = QHBoxLayout()
gb_numwords.setLayout(hbox1)
vbox.addWidget(gb_numwords)
gb_numwords.setTitle(_("Select seed/share length:"))
bg_numwords = QButtonGroup()
for count in (12, 18, 20, 24, 33):
rb = QRadioButton(gb_numwords)
word_count_buttons[count] = rb
rb.setText(_("{:d} words").format(count))
bg_numwords.addButton(rb)
bg_numwords.setId(rb, count)
hbox1.addWidget(rb)
rb.setChecked(True)
def configure_word_counts():
if model == "1":
checked_wordcount = 24
else:
checked_wordcount = 12
if method == TIM_RECOVER:
if have_shamir:
valid_word_counts = (12, 18, 20, 24, 33)
else:
valid_word_counts = (12, 18, 24)
elif rb_single.isChecked():
valid_word_counts = (12, 18, 24)
gb_numwords.setTitle(_('Select seed length:'))
else:
valid_word_counts = (20, 33)
checked_wordcount = 20
gb_numwords.setTitle(_('Select share length:'))
word_count_buttons[checked_wordcount].setChecked(True)
for c, btn in word_count_buttons.items():
btn.setVisible(c in valid_word_counts)
bg_backuptype.buttonClicked.connect(configure_word_counts)
configure_word_counts()
# set up conditional visibility:
# 1. backup_type is only visible when creating new seed
gb_backuptype.setVisible(method == TIM_NEW)
# 2. word_count is not visible when recovering on TT
if method == TIM_RECOVER and model != "1":
gb_numwords.setVisible(False)
# PIN
cb_pin = QCheckBox(_('Enable PIN protection'))
cb_pin.setChecked(True)
vbox.addWidget(WWLabel(RECOMMEND_PIN))
vbox.addWidget(cb_pin)
# "expert settings" button
expert_vbox = QVBoxLayout()
expert_widget = QWidget()
expert_widget.setLayout(expert_vbox)
expert_widget.setVisible(False)
expert_button = QPushButton(_("Show expert settings"))
def show_expert_settings():
expert_button.setVisible(False)
expert_widget.setVisible(True)
rb_shamir.setVisible(True)
rb_shamir_groups.setVisible(True)
expert_button.clicked.connect(show_expert_settings)
vbox.addWidget(expert_button)
# passphrase
passphrase_msg = WWLabel(PASSPHRASE_HELP_SHORT)
passphrase_warning = WWLabel(PASSPHRASE_NOT_PIN)
passphrase_warning.setStyleSheet("color: red")
cb_phrase = QCheckBox(_('Enable passphrases'))
cb_phrase.setChecked(False)
expert_vbox.addWidget(passphrase_msg)
expert_vbox.addWidget(passphrase_warning)
expert_vbox.addWidget(cb_phrase)
# ask for recovery type (random word order OR matrix)
bg_rectype = None
if method == TIM_RECOVER and model == '1':
gb_rectype = QGroupBox()
hbox_rectype = QHBoxLayout()
gb_rectype.setLayout(hbox_rectype)
expert_vbox.addWidget(gb_rectype)
gb_rectype.setTitle(_("Select recovery type:"))
bg_rectype = QButtonGroup()
rb1 = QRadioButton(gb_rectype)
rb1.setText(_('Scrambled words'))
bg_rectype.addButton(rb1)
bg_rectype.setId(rb1, RecoveryDeviceType.ScrambledWords)
hbox_rectype.addWidget(rb1)
rb1.setChecked(True)
rb2 = QRadioButton(gb_rectype)
rb2.setText(_('Matrix'))
bg_rectype.addButton(rb2)
bg_rectype.setId(rb2, RecoveryDeviceType.Matrix)
hbox_rectype.addWidget(rb2)
# no backup
cb_no_backup = None
if method == TIM_NEW:
cb_no_backup = QCheckBox(f'''{_('Enable seedless mode')}''')
cb_no_backup.setChecked(False)
if (model == '1' and fw_version >= (1, 7, 1)
or model == 'T' and fw_version >= (2, 0, 9)):
cb_no_backup.setToolTip(SEEDLESS_MODE_WARNING)
else:
cb_no_backup.setEnabled(False)
cb_no_backup.setToolTip(_('Firmware version too old.'))
expert_vbox.addWidget(cb_no_backup)
vbox.addWidget(expert_widget)
wizard.exec_layout(vbox, next_enabled=next_enabled)
return TrezorInitSettings(
word_count=bg_numwords.checkedId(),
label=name.text(),
pin_enabled=cb_pin.isChecked(),
passphrase_enabled=cb_phrase.isChecked(),
recovery_type=bg_rectype.checkedId() if bg_rectype else None,
backup_type=bg_backuptype.checkedId(),
no_backup=cb_no_backup.isChecked() if cb_no_backup else False,
)
class Plugin(TrezorPlugin, QtPlugin):
icon_unpaired = "trezor_unpaired.png"
icon_paired = "trezor.png"
def create_handler(self, window):
return QtHandler(window, self.pin_matrix_widget_class(), self.device)
@classmethod
def pin_matrix_widget_class(self):
from trezorlib.qt.pinmatrix import PinMatrixWidget
return PinMatrixWidget
class SettingsDialog(WindowModalDialog):
'''This dialog doesn't require a device be paired with a wallet.
We want users to be able to wipe a device even if they've forgotten
their PIN.'''
def __init__(self, window, plugin, keystore, device_id):
title = _("{} Settings").format(plugin.device)
super(SettingsDialog, self).__init__(window, title)
self.setMaximumWidth(540)
devmgr = plugin.device_manager()
config = devmgr.config
handler = keystore.handler
thread = keystore.thread
hs_cols, hs_rows = (128, 64)
def invoke_client(method, *args, **kw_args):
unpair_after = kw_args.pop('unpair_after', False)
def task():
client = devmgr.client_by_id(device_id)
if not client:
raise RuntimeError("Device not connected")
if method:
getattr(client, method)(*args, **kw_args)
if unpair_after:
devmgr.unpair_id(device_id)
return client.features
thread.add(task, on_success=update)
def update(features):
self.features = features
set_label_enabled()
if features.bootloader_hash:
bl_hash = bh2u(features.bootloader_hash)
bl_hash = "\n".join([bl_hash[:32], bl_hash[32:]])
else:
bl_hash = "N/A"
noyes = [_("No"), _("Yes")]
endis = [_("Enable Passphrases"), _("Disable Passphrases")]
disen = [_("Disabled"), _("Enabled")]
setchange = [_("Set a PIN"), _("Change PIN")]
version = "%d.%d.%d" % (features.major_version,
features.minor_version,
features.patch_version)
device_label.setText(features.label)
pin_set_label.setText(noyes[features.pin_protection])
passphrases_label.setText(disen[features.passphrase_protection])
bl_hash_label.setText(bl_hash)
label_edit.setText(features.label)
device_id_label.setText(features.device_id)
initialized_label.setText(noyes[features.initialized])
version_label.setText(version)
clear_pin_button.setVisible(features.pin_protection)
clear_pin_warning.setVisible(features.pin_protection)
pin_button.setText(setchange[features.pin_protection])
pin_msg.setVisible(not features.pin_protection)
passphrase_button.setText(endis[features.passphrase_protection])
language_label.setText(features.language)
def set_label_enabled():
label_apply.setEnabled(label_edit.text() != self.features.label)
def rename():
invoke_client('change_label', label_edit.text())
def toggle_passphrase():
title = _("Confirm Toggle Passphrase Protection")
currently_enabled = self.features.passphrase_protection
if currently_enabled:
msg = _("After disabling passphrases, you can only pair this "
"Electrum wallet if it had an empty passphrase. "
"If its passphrase was not empty, you will need to "
"create a new wallet with the install wizard. You "
"can use this wallet again at any time by re-enabling "
"passphrases and entering its passphrase.")
else:
msg = _("Your current Electrum wallet can only be used with "
"an empty passphrase. You must create a separate "
"wallet with the install wizard for other passphrases "
"as each one generates a new set of addresses.")
msg += "\n\n" + _("Are you sure you want to proceed?")
if not self.question(msg, title=title):
return
invoke_client('toggle_passphrase', unpair_after=currently_enabled)
def change_homescreen():
dialog = QFileDialog(self, _("Choose Homescreen"))
filename, __ = dialog.getOpenFileName()
if not filename:
return # user cancelled
if filename.endswith('.toif'):
img = open(filename, 'rb').read()
if img[:8] != b'TOIf\x90\x00\x90\x00':
handler.show_error('File is not a TOIF file with size of 144x144')
return
else:
from PIL import Image # FIXME
im = Image.open(filename)
if im.size != (128, 64):
handler.show_error('Image must be 128 x 64 pixels')
return
im = im.convert('1')
pix = im.load()
img = bytearray(1024)
for j in range(64):
for i in range(128):
if pix[i, j]:
o = (i + j * 128)
img[o // 8] |= (1 << (7 - o % 8))
img = bytes(img)
invoke_client('change_homescreen', img)
def clear_homescreen():
invoke_client('change_homescreen', b'\x00')
def set_pin():
invoke_client('set_pin', remove=False)
def clear_pin():
invoke_client('set_pin', remove=True)
def wipe_device():
wallet = window.wallet
if wallet and sum(wallet.get_balance()):
title = _("Confirm Device Wipe")
msg = _("Are you SURE you want to wipe the device?\n"
"Your wallet still has bitcoins in it!")
if not self.question(msg, title=title,
icon=QMessageBox.Critical):
return
invoke_client('wipe_device', unpair_after=True)
def slider_moved():
mins = timeout_slider.sliderPosition()
timeout_minutes.setText(_("{:2d} minutes").format(mins))
def slider_released():
config.set_session_timeout(timeout_slider.sliderPosition() * 60)
# Information tab
info_tab = QWidget()
info_layout = QVBoxLayout(info_tab)
info_glayout = QGridLayout()
info_glayout.setColumnStretch(2, 1)
device_label = QLabel()
pin_set_label = QLabel()
passphrases_label = QLabel()
version_label = QLabel()
device_id_label = QLabel()
bl_hash_label = QLabel()
bl_hash_label.setWordWrap(True)
language_label = QLabel()
initialized_label = QLabel()
rows = [
(_("Device Label"), device_label),
(_("PIN set"), pin_set_label),
(_("Passphrases"), passphrases_label),
(_("Firmware Version"), version_label),
(_("Device ID"), device_id_label),
(_("Bootloader Hash"), bl_hash_label),
(_("Language"), language_label),
(_("Initialized"), initialized_label),
]
for row_num, (label, widget) in enumerate(rows):
info_glayout.addWidget(QLabel(label), row_num, 0)
info_glayout.addWidget(widget, row_num, 1)
info_layout.addLayout(info_glayout)
# Settings tab
settings_tab = QWidget()
settings_layout = QVBoxLayout(settings_tab)
settings_glayout = QGridLayout()
# Settings tab - Label
label_msg = QLabel(_("Name this {}. If you have multiple devices "
"their labels help distinguish them.")
.format(plugin.device))
label_msg.setWordWrap(True)
label_label = QLabel(_("Device Label"))
label_edit = QLineEdit()
label_edit.setMinimumWidth(150)
label_edit.setMaxLength(plugin.MAX_LABEL_LEN)
label_apply = QPushButton(_("Apply"))
label_apply.clicked.connect(rename)
label_edit.textChanged.connect(set_label_enabled)
settings_glayout.addWidget(label_label, 0, 0)
settings_glayout.addWidget(label_edit, 0, 1, 1, 2)
settings_glayout.addWidget(label_apply, 0, 3)
settings_glayout.addWidget(label_msg, 1, 1, 1, -1)
# Settings tab - PIN
pin_label = QLabel(_("PIN Protection"))
pin_button = QPushButton()
pin_button.clicked.connect(set_pin)
settings_glayout.addWidget(pin_label, 2, 0)
settings_glayout.addWidget(pin_button, 2, 1)
pin_msg = QLabel(_("PIN protection is strongly recommended. "
"A PIN is your only protection against someone "
"stealing your bitcoins if they obtain physical "
"access to your {}.").format(plugin.device))
pin_msg.setWordWrap(True)
pin_msg.setStyleSheet("color: red")
settings_glayout.addWidget(pin_msg, 3, 1, 1, -1)
# Settings tab - Homescreen
homescreen_label = QLabel(_("Homescreen"))
homescreen_change_button = QPushButton(_("Change..."))
homescreen_clear_button = QPushButton(_("Reset"))
homescreen_change_button.clicked.connect(change_homescreen)
try:
import PIL
except ImportError:
homescreen_change_button.setDisabled(True)
homescreen_change_button.setToolTip(
_("Required package 'PIL' is not available - Please install it or use the Trezor website instead.")
)
homescreen_clear_button.clicked.connect(clear_homescreen)
homescreen_msg = QLabel(_("You can set the homescreen on your "
"device to personalize it. You must "
"choose a {} x {} monochrome black and "
"white image.").format(hs_cols, hs_rows))
homescreen_msg.setWordWrap(True)
settings_glayout.addWidget(homescreen_label, 4, 0)
settings_glayout.addWidget(homescreen_change_button, 4, 1)
settings_glayout.addWidget(homescreen_clear_button, 4, 2)
settings_glayout.addWidget(homescreen_msg, 5, 1, 1, -1)
# Settings tab - Session Timeout
timeout_label = QLabel(_("Session Timeout"))
timeout_minutes = QLabel()
timeout_slider = QSlider(Qt.Horizontal)
timeout_slider.setRange(1, 60)
timeout_slider.setSingleStep(1)
timeout_slider.setTickInterval(5)
timeout_slider.setTickPosition(QSlider.TicksBelow)
timeout_slider.setTracking(True)
timeout_msg = QLabel(
_("Clear the session after the specified period "
"of inactivity. Once a session has timed out, "
"your PIN and passphrase (if enabled) must be "
"re-entered to use the device."))
timeout_msg.setWordWrap(True)
timeout_slider.setSliderPosition(config.get_session_timeout() // 60)
slider_moved()
timeout_slider.valueChanged.connect(slider_moved)
timeout_slider.sliderReleased.connect(slider_released)
settings_glayout.addWidget(timeout_label, 6, 0)
settings_glayout.addWidget(timeout_slider, 6, 1, 1, 3)
settings_glayout.addWidget(timeout_minutes, 6, 4)
settings_glayout.addWidget(timeout_msg, 7, 1, 1, -1)
settings_layout.addLayout(settings_glayout)
settings_layout.addStretch(1)
# Advanced tab
advanced_tab = QWidget()
advanced_layout = QVBoxLayout(advanced_tab)
advanced_glayout = QGridLayout()
# Advanced tab - clear PIN
clear_pin_button = QPushButton(_("Disable PIN"))
clear_pin_button.clicked.connect(clear_pin)
clear_pin_warning = QLabel(
_("If you disable your PIN, anyone with physical access to your "
"{} device can spend your bitcoins.").format(plugin.device))
clear_pin_warning.setWordWrap(True)
clear_pin_warning.setStyleSheet("color: red")
advanced_glayout.addWidget(clear_pin_button, 0, 2)
advanced_glayout.addWidget(clear_pin_warning, 1, 0, 1, 5)
# Advanced tab - toggle passphrase protection
passphrase_button = QPushButton()
passphrase_button.clicked.connect(toggle_passphrase)
passphrase_msg = WWLabel(PASSPHRASE_HELP)
passphrase_warning = WWLabel(PASSPHRASE_NOT_PIN)
passphrase_warning.setStyleSheet("color: red")
advanced_glayout.addWidget(passphrase_button, 3, 2)
advanced_glayout.addWidget(passphrase_msg, 4, 0, 1, 5)
advanced_glayout.addWidget(passphrase_warning, 5, 0, 1, 5)
# Advanced tab - wipe device
wipe_device_button = QPushButton(_("Wipe Device"))
wipe_device_button.clicked.connect(wipe_device)
wipe_device_msg = QLabel(
_("Wipe the device, removing all data from it. The firmware "
"is left unchanged."))
wipe_device_msg.setWordWrap(True)
wipe_device_warning = QLabel(
_("Only wipe a device if you have the recovery seed written down "
"and the device wallet(s) are empty, otherwise the bitcoins "
"will be lost forever."))
wipe_device_warning.setWordWrap(True)
wipe_device_warning.setStyleSheet("color: red")
advanced_glayout.addWidget(wipe_device_button, 6, 2)
advanced_glayout.addWidget(wipe_device_msg, 7, 0, 1, 5)
advanced_glayout.addWidget(wipe_device_warning, 8, 0, 1, 5)
advanced_layout.addLayout(advanced_glayout)
advanced_layout.addStretch(1)
tabs = QTabWidget(self)
tabs.addTab(info_tab, _("Information"))
tabs.addTab(settings_tab, _("Settings"))
tabs.addTab(advanced_tab, _("Advanced"))
dialog_vbox = QVBoxLayout(self)
dialog_vbox.addWidget(tabs)
dialog_vbox.addLayout(Buttons(CloseButton(self)))
# Update information
invoke_client(None)
| 40.002604
| 115
| 0.617896
|
c60a52a1cdd765368070d806be1644fa520e36d9
| 46
|
py
|
Python
|
tests/factorymind/test_exceptions.py
|
factorymind/factorymind
|
0c9fccac853ea0e0cee27bce18f641ca9436b788
|
[
"MIT"
] | 1
|
2021-11-07T09:39:43.000Z
|
2021-11-07T09:39:43.000Z
|
tests/factorymind/test_exceptions.py
|
factorymind/factorymind
|
0c9fccac853ea0e0cee27bce18f641ca9436b788
|
[
"MIT"
] | null | null | null |
tests/factorymind/test_exceptions.py
|
factorymind/factorymind
|
0c9fccac853ea0e0cee27bce18f641ca9436b788
|
[
"MIT"
] | null | null | null |
"""
Test suite for factorymind.exceptions
"""
| 11.5
| 37
| 0.717391
|
5b528e2199a7c19b8406a551366a8f92afe154b6
| 2,306
|
py
|
Python
|
ingesters/resolve_redirects.py
|
beefoo/sort-media
|
d72beeb81d206347b7f09ca5125c2ee2775bbdfa
|
[
"MIT"
] | 13
|
2019-12-09T07:56:13.000Z
|
2021-08-03T01:45:53.000Z
|
ingesters/resolve_redirects.py
|
beefoo/sort-media
|
d72beeb81d206347b7f09ca5125c2ee2775bbdfa
|
[
"MIT"
] | 1
|
2020-04-29T00:00:14.000Z
|
2021-07-09T14:24:19.000Z
|
ingesters/resolve_redirects.py
|
beefoo/sort-media
|
d72beeb81d206347b7f09ca5125c2ee2775bbdfa
|
[
"MIT"
] | 3
|
2020-04-27T15:36:36.000Z
|
2021-03-29T17:52:35.000Z
|
# -*- coding: utf-8 -*-
import argparse
import inspect
import math
import os
from pprint import pprint
import sys
import time
# add parent directory to sys path to import relative modules
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0,parentdir)
from lib.io_utils import *
from lib.processing_utils import *
# input
parser = argparse.ArgumentParser()
parser.add_argument('-in', dest="INPUT_FILE", default="tmp/metadata.csv", help="Path to csv file")
parser.add_argument('-url', dest="URL_KEY", default="record_link", help="Key to retrieve url from")
parser.add_argument('-new', dest="NEW_URL_KEY", default="resolved_url", help="New key to store resolved URL")
parser.add_argument('-out', dest="OUTPUT_FILE", default="", help="Path to csv file; leave blank to update INPUT_FILE")
parser.add_argument('-delay', dest="DELAY", type=float, default=0.25, help="How many seconds to delay requests (to avoid rate limiting)?")
parser.add_argument('-progressive', dest="PROGRESSIVE_DOWNLOAD", action="store_true", help="Save results as you get them?")
parser.add_argument('-overwrite', dest="OVERWRITE", action="store_true", help="Overwrite existing values?")
a = parser.parse_args()
OUTPUT_FILE = a.OUTPUT_FILE if len(a.OUTPUT_FILE) > 0 else a.INPUT_FILE
fieldNames, rows = readCsv(a.INPUT_FILE)
rowCount = len(rows)
if a.NEW_URL_KEY not in fieldNames:
fieldNames.append(a.NEW_URL_KEY)
# Make sure output dirs exist
makeDirectories(OUTPUT_FILE)
for i, row in enumerate(rows):
url = row[a.URL_KEY]
existingValue = row[a.NEW_URL_KEY] if a.NEW_URL_KEY in row else ""
newURL = False
if existingValue != "" and not a.OVERWRITE:
continue
if isinstance(url, str) and url != "":
newURL = resolveRedirect(url)
print(f' {url} -> {newURL}')
if not newURL:
print(f' Invalid URL: {newURL}')
newURL = ""
rows[i][a.NEW_URL_KEY] = newURL
if a.PROGRESSIVE_DOWNLOAD:
writeCsv(OUTPUT_FILE, rows, headings=fieldNames, verbose=False)
printProgress(i+1, rowCount)
if a.DELAY > 0:
time.sleep(a.DELAY)
if not a.PROGRESSIVE_DOWNLOAD:
print("Writing file...")
writeCsv(OUTPUT_FILE, rows, headings=fieldNames)
print("Done.")
| 35.476923
| 138
| 0.717693
|
5ae5b05aa7dfd113cee6eb500921d2888236b1c2
| 317
|
py
|
Python
|
models/models/__init__.py
|
Minys233/GCN-BMP
|
21b64a3c8cc9bc33718ae09c65aa917e575132eb
|
[
"MIT"
] | null | null | null |
models/models/__init__.py
|
Minys233/GCN-BMP
|
21b64a3c8cc9bc33718ae09c65aa917e575132eb
|
[
"MIT"
] | null | null | null |
models/models/__init__.py
|
Minys233/GCN-BMP
|
21b64a3c8cc9bc33718ae09c65aa917e575132eb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 3/15/2019 1:37 PM
# @Author : chinshin
# @FileName: __init__.py.py
import sys
from os.path import abspath, dirname
ROOT_PATH = dirname(dirname(dirname(abspath(__file__))))
sys.path.insert(0, ROOT_PATH)
from ggnn import GGNN
from nfp import NFP
| 24.384615
| 57
| 0.681388
|
f87796709e1c6853ca0fca7455935b7e726b6a5b
| 4,854
|
py
|
Python
|
src/gdds/app/api/v1/resources/oem_bulk_imeis_download.py
|
munawwaranwar/Genuine-Device-Detection-Subsystem
|
5fa26a869d2155c4b592dfd8de6cf7d3aeaa445f
|
[
"Unlicense"
] | null | null | null |
src/gdds/app/api/v1/resources/oem_bulk_imeis_download.py
|
munawwaranwar/Genuine-Device-Detection-Subsystem
|
5fa26a869d2155c4b592dfd8de6cf7d3aeaa445f
|
[
"Unlicense"
] | null | null | null |
src/gdds/app/api/v1/resources/oem_bulk_imeis_download.py
|
munawwaranwar/Genuine-Device-Detection-Subsystem
|
5fa26a869d2155c4b592dfd8de6cf7d3aeaa445f
|
[
"Unlicense"
] | null | null | null |
"""
Copyright (c) 2018-2021 Qualcomm Technologies, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the
limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of Qualcomm Technologies, Inc. nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
* The origin of this software must not be misrepresented; you must not claim that you wrote the original software.
If you use this software in a product, an acknowledgment is required by displaying the trademark/log as per the details
provided here: https://www.qualcomm.com/documents/dirbs-logo-and-brand-guidelines
* Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
* This notice may not be removed or altered from any source distribution.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY
THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import os
import tempfile
from flask_babel import _
from time import strftime
from shutil import rmtree
from flask_restful import Resource
from flask_apispec import use_kwargs
from flask import make_response, send_file
from .....app import db, app
from ..models.oem_logins import OemLogins
from ..models.oem_response import OemResponse
from ..schema.input_schema import BulkImeisSchema
from gdds.app.api.common.response import STATUS_CODES, MIME_TYPES
from gdds.app.api.common.error_handlers import custom_json_response
# noinspection PyUnboundLocalVariable,PyComparisonWithNone
class BulkImeiDownload(Resource):
"""Flask resource to download file of bulk IMEIs for OEM."""
@staticmethod
@use_kwargs(BulkImeisSchema().fields_dict, locations=['querystring'])
def get(**kwargs):
"""method to provide bulk-imeis for OEMs to download as a csv file."""
try:
tmp_dir = ''
chk_login_detail = OemLogins.query.filter(OemLogins.oem_name == kwargs['login_name'],
OemLogins.oem_id == kwargs['login_id'],
OemLogins.oem_status != 'deleted').first()
if chk_login_detail:
imeis = [o.oem_imei for o in OemResponse.query.filter(OemResponse.oem_serial_no == None,
OemResponse.oem_id == kwargs['login_id']).all()]
try:
filename = "IMEI-List_" + kwargs['login_name'] + '_' + strftime("%Y-%m-%d_%H-%M-%S") + '.csv'
tmp_dir = tempfile.mkdtemp()
filepath = os.path.join(tmp_dir, filename)
with open(filepath, 'w') as file:
file.write('IMEI,Serial_no,Color,Brand,Model,RAT,MAC,Other_IMEIs\n')
file.write(',\n'.join(imeis))
file.close()
response = make_response(send_file(filepath, as_attachment=True))
response.headers['Cache-Control'] = 'no-store'
return response
finally:
rmtree(tmp_dir)
else:
return custom_json_response(_("Login credentials are not matched"),
status=STATUS_CODES.get('NOT_FOUND'),
mimetype=MIME_TYPES.get('JSON'))
except Exception as e:
app.logger.info(_("Error occurred while downloading a file."))
app.logger.exception(e)
db.session.rollback()
finally:
db.session.close()
| 51.094737
| 120
| 0.678822
|
5d5d353c07956e045e57a54172623ab0a3b1735b
| 7,425
|
py
|
Python
|
tests/test_items/test_bulk.py
|
lmverity/exchangelib
|
15002565dfea30705c74b256b3a601f95e2afb00
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_items/test_bulk.py
|
lmverity/exchangelib
|
15002565dfea30705c74b256b3a601f95e2afb00
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_items/test_bulk.py
|
lmverity/exchangelib
|
15002565dfea30705c74b256b3a601f95e2afb00
|
[
"BSD-2-Clause"
] | null | null | null |
from exchangelib.errors import ErrorItemNotFound, ErrorInvalidChangeKey, ErrorInvalidIdMalformed
from exchangelib.ewsdatetime import EWSDate
from exchangelib.fields import FieldPath
from exchangelib.folders import Inbox, Folder, Calendar
from exchangelib.items import Item, Message, SAVE_ONLY, SEND_ONLY, SEND_AND_SAVE_COPY, CalendarItem
from .test_basics import BaseItemTest
class BulkMethodTest(BaseItemTest):
TEST_FOLDER = 'inbox'
FOLDER_CLASS = Inbox
ITEM_CLASS = Message
def test_fetch(self):
item = self.get_test_item()
self.test_folder.bulk_create(items=[item, item])
ids = self.test_folder.filter(categories__contains=item.categories)
items = list(self.account.fetch(ids=ids))
for item in items:
self.assertIsInstance(item, self.ITEM_CLASS)
self.assertEqual(len(items), 2)
items = list(self.account.fetch(ids=ids, only_fields=['subject']))
self.assertEqual(len(items), 2)
items = list(self.account.fetch(ids=ids, only_fields=[FieldPath.from_string('subject', self.test_folder)]))
self.assertEqual(len(items), 2)
items = list(self.account.fetch(ids=ids, only_fields=['id', 'changekey']))
self.assertEqual(len(items), 2)
def test_no_account(self):
# Test bulk operations on items with no self.account
item = self.get_test_item()
item.account = None
res = self.test_folder.bulk_create(items=[item])[0]
item.id, item.changekey = res.id, res.changekey
item.account = None
self.assertEqual(list(self.account.fetch(ids=[item]))[0].id, item.id)
item.account = None
res = self.account.bulk_update(items=[(item, ('subject',))])[0]
item.id, item.changekey = res
item.account = None
res = self.account.bulk_copy(ids=[item], to_folder=self.account.trash)[0]
item.id, item.changekey = res
item.account = None
res = self.account.bulk_move(ids=[item], to_folder=self.test_folder)[0]
item.id, item.changekey = res
item.account = None
self.assertEqual(self.account.bulk_delete(ids=[item]), [True])
item = self.get_test_item().save()
item.account = None
self.assertEqual(self.account.bulk_send(ids=[item]), [True])
def test_empty_args(self):
# We allow empty sequences for these methods
self.assertEqual(self.test_folder.bulk_create(items=[]), [])
self.assertEqual(list(self.account.fetch(ids=[])), [])
self.assertEqual(self.account.bulk_create(folder=self.test_folder, items=[]), [])
self.assertEqual(self.account.bulk_update(items=[]), [])
self.assertEqual(self.account.bulk_delete(ids=[]), [])
self.assertEqual(self.account.bulk_send(ids=[]), [])
self.assertEqual(self.account.bulk_copy(ids=[], to_folder=self.account.trash), [])
self.assertEqual(self.account.bulk_move(ids=[], to_folder=self.account.trash), [])
self.assertEqual(self.account.upload(data=[]), [])
self.assertEqual(self.account.export(items=[]), [])
def test_qs_args(self):
# We allow querysets for these methods
qs = self.test_folder.none()
self.assertEqual(list(self.account.fetch(ids=qs)), [])
with self.assertRaises(ValueError):
# bulk_create() does not allow queryset input
self.account.bulk_create(folder=self.test_folder, items=qs)
with self.assertRaises(ValueError):
# bulk_update() does not allow queryset input
self.account.bulk_update(items=qs)
self.assertEqual(self.account.bulk_delete(ids=qs), [])
self.assertEqual(self.account.bulk_send(ids=qs), [])
self.assertEqual(self.account.bulk_copy(ids=qs, to_folder=self.account.trash), [])
self.assertEqual(self.account.bulk_move(ids=qs, to_folder=self.account.trash), [])
self.assertEqual(self.account.upload(data=qs), [])
self.assertEqual(self.account.export(items=qs), [])
def test_no_kwargs(self):
self.assertEqual(self.test_folder.bulk_create([]), [])
self.assertEqual(list(self.account.fetch([])), [])
self.assertEqual(self.account.bulk_create(self.test_folder, []), [])
self.assertEqual(self.account.bulk_update([]), [])
self.assertEqual(self.account.bulk_delete([]), [])
self.assertEqual(self.account.bulk_send([]), [])
self.assertEqual(self.account.bulk_copy([], to_folder=self.account.trash), [])
self.assertEqual(self.account.bulk_move([], to_folder=self.account.trash), [])
self.assertEqual(self.account.upload([]), [])
self.assertEqual(self.account.export([]), [])
def test_invalid_bulk_args(self):
# Test bulk_create
with self.assertRaises(ValueError):
# Folder must belong to account
self.account.bulk_create(folder=Folder(root=None), items=[1])
with self.assertRaises(AttributeError):
# Must have folder on save
self.account.bulk_create(folder=None, items=[1], message_disposition=SAVE_ONLY)
# Test that we can send_and_save with a default folder
self.account.bulk_create(folder=None, items=[], message_disposition=SEND_AND_SAVE_COPY)
with self.assertRaises(AttributeError):
# Must not have folder on send-only
self.account.bulk_create(folder=self.test_folder, items=[1], message_disposition=SEND_ONLY)
# Test bulk_update
with self.assertRaises(ValueError):
# Cannot update in send-only mode
self.account.bulk_update(items=[1], message_disposition=SEND_ONLY)
def test_bulk_failure(self):
# Test that bulk_* can handle EWS errors and return the errors in order without losing non-failure results
items1 = [self.get_test_item().save() for _ in range(3)]
items1[1].changekey = 'XXX'
for i, res in enumerate(self.account.bulk_delete(items1)):
if i == 1:
self.assertIsInstance(res, ErrorInvalidChangeKey)
else:
self.assertEqual(res, True)
items2 = [self.get_test_item().save() for _ in range(3)]
items2[1].id = 'AAAA=='
for i, res in enumerate(self.account.bulk_delete(items2)):
if i == 1:
self.assertIsInstance(res, ErrorInvalidIdMalformed)
else:
self.assertEqual(res, True)
items3 = [self.get_test_item().save() for _ in range(3)]
items3[1].id = items1[0].id
for i, res in enumerate(self.account.fetch(items3)):
if i == 1:
self.assertIsInstance(res, ErrorItemNotFound)
else:
self.assertIsInstance(res, Item)
class CalendarBulkMethodTest(BaseItemTest):
TEST_FOLDER = 'calendar'
FOLDER_CLASS = Calendar
ITEM_CLASS = CalendarItem
def test_no_account(self):
# Test corner cases with bulk operations on items with no self.account
item = self.get_test_item()
item.recurrence = None
item.is_all_day = True
item.start, item.end = EWSDate(2020, 1, 1), EWSDate(2020, 1, 2)
item.account = None
res = self.test_folder.bulk_create(items=[item])[0]
item.id, item.changekey = res.id, res.changekey
item.account = None
self.account.bulk_update(items=[(item, ('start',))])
| 46.993671
| 115
| 0.652256
|
16a0d2859942991906299a5ab5f7fdfb99883ffc
| 1,640
|
py
|
Python
|
mmdet2trt/converters/DeformPool.py
|
DableUTeeF/mmdetection-to-tensorrt
|
370afa902534b17e94aa692cc30f87f128688b98
|
[
"Apache-2.0"
] | null | null | null |
mmdet2trt/converters/DeformPool.py
|
DableUTeeF/mmdetection-to-tensorrt
|
370afa902534b17e94aa692cc30f87f128688b98
|
[
"Apache-2.0"
] | null | null | null |
mmdet2trt/converters/DeformPool.py
|
DableUTeeF/mmdetection-to-tensorrt
|
370afa902534b17e94aa692cc30f87f128688b98
|
[
"Apache-2.0"
] | null | null | null |
from torch2trt_dynamic.torch2trt_dynamic import (get_arg, tensorrt_converter,
trt_)
from .plugins import create_deformable_pool_plugin
@tensorrt_converter('mmdet2trt.models.roi_heads.roi_extractors.pooling_layers'
'.deform_roi_pool_extractor.deformable_roi_pool_wrap')
# @tensorrt_converter('mmcv.ops.deform_roi_pool')
def convert_DeformPool(ctx):
input = get_arg(ctx, 'input', pos=0, default=None)
rois = get_arg(ctx, 'rois', pos=1, default=None)
offset = get_arg(ctx, 'offset', pos=2, default=None)
out_size = get_arg(ctx, 'output_size', pos=3, default=(7, 7))
spatial_scale = get_arg(ctx, 'spatial_scale', pos=4, default=1.)
sampling_ratio = get_arg(ctx, 'sampling_ratio', pos=5, default=0)
gamma = get_arg(ctx, 'gamma', pos=6, default=0.1)
output = ctx.method_return
input_trt = trt_(ctx.network, input)
rois_trt = trt_(ctx.network, rois)
offset_trt = None
if offset is not None and len(offset.shape) > 1:
offset_trt = trt_(ctx.network, offset)
plugin = create_deformable_pool_plugin('deform_roi_pool_' + str(id(input)),
out_size, spatial_scale,
sampling_ratio, gamma)
if offset_trt is None:
custom_layer = ctx.network.add_plugin_v2(inputs=[input_trt, rois_trt],
plugin=plugin)
else:
custom_layer = ctx.network.add_plugin_v2(
inputs=[input_trt, rois_trt, offset_trt], plugin=plugin)
output._trt = custom_layer.get_output(0)
| 42.051282
| 79
| 0.634756
|
423d33bf36bb6753ceb63417c1d8e58533610f37
| 4,958
|
py
|
Python
|
src/tensorrt/python/host2device.py
|
LaudateCorpus1/dlcookbook-dlbs
|
998a44940eb80f534ed682f8ef010e35df6b89a2
|
[
"Apache-2.0"
] | 123
|
2017-11-28T20:21:24.000Z
|
2022-03-22T11:21:04.000Z
|
src/tensorrt/python/host2device.py
|
LaudateCorpus1/dlcookbook-dlbs
|
998a44940eb80f534ed682f8ef010e35df6b89a2
|
[
"Apache-2.0"
] | 17
|
2018-01-05T00:05:13.000Z
|
2020-09-18T05:12:45.000Z
|
src/tensorrt/python/host2device.py
|
LaudateCorpus1/dlcookbook-dlbs
|
998a44940eb80f534ed682f8ef010e35df6b89a2
|
[
"Apache-2.0"
] | 48
|
2018-01-04T20:52:51.000Z
|
2022-03-06T00:47:17.000Z
|
import json
import shlex
import argparse
import itertools
import subprocess
import typing as t
from dlbs.utils import LogEvent
"""
Example configuration file (JSON). GPU format: [CPU_CORES:]GPU (cpu cores are optional, is used for numactl).
```json
{
"gpu": ["0-9:0", "10-10:1"],
"size_mb": [19, 38, 75, 151, 302, 604, 1208],
"pinned_mem": true,
"num_warmup_iterations": 10,
"num_iterations": 500,
"docker": "nvidia-docker",
"image": "dlbs/tensorrt:21.08"
}
```
"""
def parse_arguments() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
'--config', type=str, required=False, default=None,
help="Path to a JSON configuration file. If given, all other parameters are ignored."
)
parser.add_argument(
'--docker', type=str, required=False, default='nvidia-docker',
help="Docker executable."
)
parser.add_argument(
'--image', type=str, required=False, default='dlbs/tensorrt:21.08',
help="Docker image."
)
parser.add_argument(
'--gpu', type=str, required=False, default="0",
help="GPU index to use. The format is '[cpu_affinity:]GPU_ID', where cpu_affinity is the range of cores. If "
"present, pin process to these range of CPUs with numactl and enforce local memory allocation policy. "
"For instance, on a two socket NUMA machine with 18 cores per CPU, setting --gpu = '0-17:0' will "
"effectively pin process to socket #0."
)
parser.add_argument(
'--size_mb', type=float, required=False, default=10.0,
help="Size of a data chunk in MegaBytes. During inference benchmarks, data is transferred as arrays of shape"
"[BatchSize, 3, Wight, Height] of 'float' data type. These are typical sizes for AlexNetOWT where\n"
"Width = Height = 227:\n"
"\tBatch size (images): 32 64 128 256 512 1024\n"
"\tBatch size (MB): 19 38 75 151 302 604\n"
)
parser.add_argument(
'--pinned_mem', '--pinned-mem', required=False, default=False, action='store_true',
help="Allocate buffer in host pinned memory."
)
parser.add_argument(
'--num_warmup_iterations', '--num-warmup-iterations', type=int, required=False, default=10,
help="Number of warmup iterations."
)
parser.add_argument(
'--num_iterations', '--num-iterations', type=int, required=False, default=100,
help="Number of benchmark iterations."
)
return parser.parse_args()
def run(docker: t.Text, image: t.Text, pinned_mem: bool, cpus: t.Optional[t.Text], gpu: t.Text,
num_warmup_iterations: int, num_iterations: int, size_mb: float) -> t.Optional[float]:
docker_cmd: t.Text = f"{docker} run -ti --rm"
benchmark_cmd: t.Text = ""
if cpus:
docker_cmd += " --privileged"
benchmark_cmd += f" numactl --localalloc --physcpubind={cpus}"
benchmark_cmd += f" benchmark_host2device_copy --gpu={gpu} --size={size_mb} --num_batches={num_iterations}"\
f" --num_warmup_batches={num_warmup_iterations}"
if pinned_mem:
benchmark_cmd += " --pinned"
throughput_mb_s: t.Optional[float] = None
docker_cmd += f" {image} /bin/bash -c '{benchmark_cmd}'"
with subprocess.Popen(shlex.split(docker_cmd), universal_newlines=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, encoding='utf-8') as p:
while True:
output = p.stdout.readline()
if output == '' and p.poll() is not None:
break
if output:
dlbs_event = LogEvent.from_string(output)
if dlbs_event is not None and 'host2device' in dlbs_event.labels:
throughput_mb_s = dlbs_event.content['throughput_mb_s']
else:
# sys.stdout.write("LINE '" + output.strip(' \t\n') + "'\n")
# sys.stdout.flush()
...
return throughput_mb_s
def main():
args: argparse.Namespace = parse_arguments()
if args.config:
with open(args.config) as f:
global_config = json.load(f)
else:
global_config = vars(args)
_ = global_config.pop('config')
for param in global_config:
if not isinstance(global_config[param], list):
global_config[param] = [global_config[param]]
params, values = zip(*global_config.items())
configs = (dict(zip(params, v)) for v in itertools.product(*values))
for config in configs:
config['cpus'] = None
cpus_gpu: t.List[t.Text] = config['gpu'].split(':')
if len(cpus_gpu) == 2:
config['cpus'] = cpus_gpu[0]
config['gpu'] = cpus_gpu[1]
config['throughput_mb_s'] = run(**config)
LogEvent(config, labels=['host2device']).log()
if __name__ == '__main__':
main()
| 38.138462
| 117
| 0.612142
|
a51d9e14b7297debb32ff46fd8878137462e9ae1
| 3,860
|
py
|
Python
|
numba_dpcomp/numba_dpcomp/mlir/builtin/funcs.py
|
nbpatel/mlir-extensions
|
1270a2550694a53a0c70fd5b17d518eef133802b
|
[
"Apache-2.0"
] | null | null | null |
numba_dpcomp/numba_dpcomp/mlir/builtin/funcs.py
|
nbpatel/mlir-extensions
|
1270a2550694a53a0c70fd5b17d518eef133802b
|
[
"Apache-2.0"
] | null | null | null |
numba_dpcomp/numba_dpcomp/mlir/builtin/funcs.py
|
nbpatel/mlir-extensions
|
1270a2550694a53a0c70fd5b17d518eef133802b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..linalg_builder import FuncRegistry, is_int, is_float, broadcast_type
from ..func_registry import add_func
import math
add_func(slice, "slice")
add_func(range, "range")
registry = FuncRegistry()
def register_func(name, orig_func=None):
global registry
return registry.register_func(name, orig_func)
@register_func("bool", bool)
def bool_cast_impl(builder, arg):
return builder.cast(arg, builder.bool)
@register_func("int", int)
def int_cast_impl(builder, arg):
return builder.cast(arg, builder.int64)
@register_func("float", float)
def float_cast_impl(builder, arg):
return builder.cast(arg, builder.float64)
@register_func("len", len)
def len_impl(builder, arg):
return builder.cast(len(arg), builder.int64)
def _get_type(builder, v):
if isinstance(v, float):
return builder.float64
elif isinstance(v, int):
return builder.int64
return v.type
@register_func("min", min)
def min_impl(builder, *args):
if len(args) > 2:
rhs = min_impl(builder, *args[1:])
else:
rhs = args[1]
lhs = args[0]
res_type = broadcast_type(
builder, (_get_type(builder, lhs), _get_type(builder, rhs))
)
lhs = builder.cast(lhs, res_type)
rhs = builder.cast(rhs, res_type)
cond = lhs < rhs
return builder.select(cond, lhs, rhs)
@register_func("max", max)
def max_impl(builder, *args):
if len(args) > 2:
rhs = max_impl(builder, *args[1:])
else:
rhs = args[1]
lhs = args[0]
res_type = broadcast_type(
builder, (_get_type(builder, lhs), _get_type(builder, rhs))
)
lhs = builder.cast(lhs, res_type)
rhs = builder.cast(rhs, res_type)
cond = lhs > rhs
return builder.select(cond, lhs, rhs)
def _gen_math_funcs():
def get_func(name, N):
def func(builder, *args):
if len(args) != N:
return None
t = args[0].type
if not is_int(t, builder) and not is_float(t, builder):
return None
for a in args[1:]:
if a.type != t:
return None
fname = name
if t == builder.float32:
fname = "f" + fname
elif t != builder.float64:
t = builder.float64
args = tuple(builder.cast(arg, builder.float64) for arg in args)
res = builder.cast(0, t)
return builder.external_call(fname, args, res, decorate=False)
return func
math_funcs = [
("log", 1),
("sqrt", 1),
("exp", 1),
("erf", 1),
("sin", 1),
("cos", 1),
("tanh", 1),
("atan2", 2),
]
for func, N in math_funcs:
fname = "math." + func
py_func = eval(fname)
register_func(fname, py_func)(get_func(func, N))
_gen_math_funcs()
del _gen_math_funcs
@register_func("abs", abs)
def abs_impl(builder, arg):
t = arg.type
if is_int(t, builder):
c = arg < 0
return builder.select(c, -arg, arg)
if is_float(t, builder):
fname = "fabs"
if t == builder.float32:
fname = fname + "f"
res = builder.cast(0, t)
return builder.external_call(fname, arg, res, decorate=False)
| 25.228758
| 80
| 0.610104
|
cbbec47e477c17d5f328e20f1dc860f9b810850d
| 6,364
|
py
|
Python
|
src/spn/algorithms/LearningWrappers.py
|
minimrbanana/SPFlow
|
32233bf29d107c62f0f727b0e64aaa74b37cfe1e
|
[
"Apache-2.0"
] | 2
|
2021-04-22T09:20:20.000Z
|
2021-05-17T12:26:59.000Z
|
src/spn/algorithms/LearningWrappers.py
|
minimrbanana/SPFlow
|
32233bf29d107c62f0f727b0e64aaa74b37cfe1e
|
[
"Apache-2.0"
] | null | null | null |
src/spn/algorithms/LearningWrappers.py
|
minimrbanana/SPFlow
|
32233bf29d107c62f0f727b0e64aaa74b37cfe1e
|
[
"Apache-2.0"
] | null | null | null |
"""
Created on March 30, 2018
@author: Alejandro Molina
"""
import numpy as np
from spn.algorithms.StructureLearning import get_next_operation, learn_structure
from spn.algorithms.CnetStructureLearning import get_next_operation_cnet, learn_structure_cnet
from spn.algorithms.Validity import is_valid
from spn.structure.Base import Sum, assign_ids
from spn.structure.leaves.histogram.Histograms import create_histogram_leaf
from spn.structure.leaves.parametric.Parametric import create_parametric_leaf
from spn.structure.leaves.piecewise.PiecewiseLinear import create_piecewise_leaf
from spn.structure.leaves.cltree.CLTree import create_cltree_leaf
from spn.algorithms.splitting.Conditioning import (
get_split_rows_naive_mle_conditioning,
get_split_rows_random_conditioning,
)
import logging
logger = logging.getLogger(__name__)
def learn_classifier(data, ds_context, spn_learn_wrapper, label_idx, **kwargs):
spn = Sum()
for label, count in zip(*np.unique(data[:, label_idx], return_counts=True)):
branch = spn_learn_wrapper(data[data[:, label_idx] == label, :], ds_context, **kwargs)
spn.children.append(branch)
spn.weights.append(count / data.shape[0])
spn.scope.extend(branch.scope)
assign_ids(spn)
valid, err = is_valid(spn)
assert valid, "invalid spn: " + err
return spn
def get_splitting_functions(cols, rows, ohe, threshold, rand_gen, n_jobs):
from spn.algorithms.splitting.Clustering import get_split_rows_KMeans, get_split_rows_TSNE, get_split_rows_GMM
from spn.algorithms.splitting.PoissonStabilityTest import get_split_cols_poisson_py
from spn.algorithms.splitting.RDC import get_split_cols_RDC_py, get_split_rows_RDC_py
if isinstance(cols, str):
if cols == "rdc":
split_cols = get_split_cols_RDC_py(threshold, rand_gen=rand_gen, ohe=ohe, n_jobs=n_jobs)
elif cols == "poisson":
split_cols = get_split_cols_poisson_py(threshold, n_jobs=n_jobs)
else:
raise AssertionError("unknown columns splitting strategy type %s" % str(cols))
else:
split_cols = cols
if isinstance(rows, str):
if rows == "rdc":
split_rows = get_split_rows_RDC_py(rand_gen=rand_gen, ohe=ohe, n_jobs=n_jobs)
elif rows == "kmeans":
split_rows = get_split_rows_KMeans()
elif rows == "tsne":
split_rows = get_split_rows_TSNE()
elif rows == "gmm":
split_rows = get_split_rows_GMM()
else:
raise AssertionError("unknown rows splitting strategy type %s" % str(rows))
else:
split_rows = rows
return split_cols, split_rows
def learn_mspn_with_missing(
data,
ds_context,
cols="rdc",
rows="kmeans",
min_instances_slice=200,
threshold=0.3,
linear=False,
ohe=False,
leaves=None,
memory=None,
rand_gen=None,
cpus=-1,
):
if leaves is None:
# leaves = create_histogram_leaf
leaves = create_piecewise_leaf
if rand_gen is None:
rand_gen = np.random.RandomState(17)
def l_mspn_missing(data, ds_context, cols, rows, min_instances_slice, threshold, linear, ohe):
split_cols, split_rows = get_splitting_functions(cols, rows, ohe, threshold, rand_gen, cpus)
nextop = get_next_operation(min_instances_slice)
return learn_structure(data, ds_context, split_rows, split_cols, leaves, nextop)
if memory:
l_mspn_missing = memory.cache(l_mspn_missing)
return l_mspn_missing(data, ds_context, cols, rows, min_instances_slice, threshold, linear, ohe)
def learn_mspn(
data,
ds_context,
cols="rdc",
rows="kmeans",
min_instances_slice=200,
threshold=0.3,
ohe=False,
leaves=None,
memory=None,
rand_gen=None,
cpus=-1,
):
if leaves is None:
leaves = create_histogram_leaf
if rand_gen is None:
rand_gen = np.random.RandomState(17)
def l_mspn(data, ds_context, cols, rows, min_instances_slice, threshold, ohe):
split_cols, split_rows = get_splitting_functions(cols, rows, ohe, threshold, rand_gen, cpus)
nextop = get_next_operation(min_instances_slice)
return learn_structure(data, ds_context, split_rows, split_cols, leaves, nextop)
if memory:
l_mspn = memory.cache(l_mspn)
return l_mspn(data, ds_context, cols, rows, min_instances_slice, threshold, ohe)
def learn_parametric(
data,
ds_context,
cols="rdc",
rows="kmeans",
min_instances_slice=200,
min_features_slice=1,
multivariate_leaf=False,
threshold=0.3,
ohe=False,
leaves=None,
memory=None,
rand_gen=None,
initial_scope=None,
cpus=-1,
l_rfft=None,
is_2d=False,
):
if leaves is None:
leaves = create_parametric_leaf
if rand_gen is None:
rand_gen = np.random.RandomState(17)
def learn_param(data, ds_context, cols, rows, min_instances_slice, threshold, ohe, initial_scope, l_rfft, is_2d):
split_cols, split_rows = get_splitting_functions(cols, rows, ohe, threshold, rand_gen, cpus)
nextop = get_next_operation(min_instances_slice, min_features_slice, multivariate_leaf)
return learn_structure(data, ds_context, split_rows, split_cols, leaves, nextop, initial_scope, l_rfft=l_rfft, is_2d=is_2d)
if memory:
learn_param = memory.cache(learn_param)
return learn_param(data, ds_context, cols, rows, min_instances_slice, threshold, ohe, initial_scope, l_rfft, is_2d)
def learn_cnet(
data,
ds_context,
cond="naive_mle",
min_instances_slice=200,
min_features_slice=1,
memory=None,
rand_gen=None,
cpus=-1,
):
leaves = create_cltree_leaf
if cond == "naive_mle":
conditioning = get_split_rows_naive_mle_conditioning()
elif cond == "random":
conditioning = get_split_rows_random_conditioning()
if rand_gen is None:
rand_gen = np.random.RandomState(17)
def learn_param(data, ds_context, conditioning, min_instances_slice):
nextop = get_next_operation_cnet(min_instances_slice, min_features_slice)
return learn_structure_cnet(data, ds_context, conditioning, leaves, nextop)
if memory:
learn_param = memory.cache(learn_param)
return learn_param(data, ds_context, conditioning, min_instances_slice)
| 30.893204
| 131
| 0.709145
|
f494dc3d93ae2dd04b6d55ba6b0401a274f5315e
| 316
|
py
|
Python
|
polling_stations/apps/pollingstations/migrations/0015_delete_residential_address.py
|
smsmith97/UK-Polling-Stations
|
ecbd98cb99e89e97354da3960b0063aa36181b11
|
[
"BSD-3-Clause"
] | 29
|
2015-03-10T08:41:34.000Z
|
2022-01-12T08:51:38.000Z
|
polling_stations/apps/pollingstations/migrations/0015_delete_residential_address.py
|
smsmith97/UK-Polling-Stations
|
ecbd98cb99e89e97354da3960b0063aa36181b11
|
[
"BSD-3-Clause"
] | 4,112
|
2015-04-01T21:27:38.000Z
|
2022-03-31T19:22:11.000Z
|
polling_stations/apps/pollingstations/migrations/0015_delete_residential_address.py
|
smsmith97/UK-Polling-Stations
|
ecbd98cb99e89e97354da3960b0063aa36181b11
|
[
"BSD-3-Clause"
] | 31
|
2015-03-18T14:52:50.000Z
|
2022-02-24T10:31:07.000Z
|
# Generated by Django 2.2.16 on 2020-11-03 21:54
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("pollingstations", "0014_auto_20180314_1100"),
]
operations = [
migrations.DeleteModel(
name="ResidentialAddress",
),
]
| 18.588235
| 55
| 0.629747
|
9e5d922403a186bca6b2cc3d0397260ebf00cc2a
| 2,216
|
py
|
Python
|
webapp/api/gps.py
|
zorglub42/marv2plage
|
436fb8b6b05a1c28011786ce390b45b4a5567483
|
[
"Apache-2.0"
] | null | null | null |
webapp/api/gps.py
|
zorglub42/marv2plage
|
436fb8b6b05a1c28011786ce390b45b4a5567483
|
[
"Apache-2.0"
] | null | null | null |
webapp/api/gps.py
|
zorglub42/marv2plage
|
436fb8b6b05a1c28011786ce390b45b4a5567483
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Zorglub42 {contact(at)zorglub42.fr}.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
"""FFBC8 weatherstation receive GPS data."""
import logging
from flask import request
from flask_restx import Resource
import requests
from api.restx import API
import settings
NS = API.namespace(
'GPS',
description='receive GPS data'
)
@NS.route('/')
class GPS(Resource):
"""Receive GPS data API Class."""
logger = None
# pylint: disable=keyword-arg-before-vararg
def __init__(self, api=None, *args, **kwargs):
Resource.__init__(self, api, kwargs)
self.logger = logging.getLogger(__name__)
def _send_influx(self, data):
if data == "":
return False
if settings.conf["INFLUX"]["user"] is not None:
auth = (
settings.conf["INFLUX"]["user"],
settings.conf["INFLUX"]["pass"])
else:
auth = None
influx_url = settings.conf["INFLUX"]["host"]+"/write?db="
influx_url += settings.conf["INFLUX"]["db"]
response = requests.post(
influx_url,
data=data.encode("utf-8"),
auth=auth,
headers={
"Content-Type": "application/x-www-form-urlencoded; " +
"charset=UTF-8"
},
verify=False)
if response.status_code != 204:
log_msg = "Error while storing measurment: {}"
log_msg = log_msg.format(response.text)
self.logger.error(log_msg)
def post(self):
"""Return list of last values for all sensors."""
data = request.json
self.logger.debug("\t%s", data)
spd = data["speed"]
if spd is None:
spd = "0"
else:
spd = str(spd)
influx_data = "GPS_S value=" + spd + " " + str(data["timestamp"]*1000000)
self.logger.debug(influx_data)
self._send_influx(influx_data)
return "OK"
| 29.546667
| 81
| 0.580776
|
91c1d44280b9715d92af5c97076fb1dc1b0865ca
| 393
|
py
|
Python
|
docs/conftest.py
|
FHTMitchell/scrapy
|
c911e802097ecd3309bb826d48b7b08ce108f4ce
|
[
"BSD-3-Clause"
] | 1
|
2019-11-12T19:25:33.000Z
|
2019-11-12T19:25:33.000Z
|
docs/conftest.py
|
gauthamsaimr/scrapy
|
c911e802097ecd3309bb826d48b7b08ce108f4ce
|
[
"BSD-3-Clause"
] | null | null | null |
docs/conftest.py
|
gauthamsaimr/scrapy
|
c911e802097ecd3309bb826d48b7b08ce108f4ce
|
[
"BSD-3-Clause"
] | 1
|
2020-03-01T14:59:56.000Z
|
2020-03-01T14:59:56.000Z
|
from doctest import ELLIPSIS
from sybil import Sybil
from sybil.parsers.codeblock import CodeBlockParser
from sybil.parsers.doctest import DocTestParser
from sybil.parsers.skip import skip
pytest_collect_file = Sybil(
parsers=[
DocTestParser(optionflags=ELLIPSIS),
CodeBlockParser(future_imports=['print_function']),
skip,
],
pattern='*.rst',
).pytest()
| 23.117647
| 59
| 0.735369
|
0a643b5d1f2084e0e79455df4bfbd1d2b1325e66
| 529
|
py
|
Python
|
leet/greedy/productExceptSelf.py
|
peterlamar/python-cp-cheatsheet
|
f9f854064a3c657c04fab27d0a496401bfa97da1
|
[
"Apache-2.0"
] | 140
|
2020-10-21T13:23:52.000Z
|
2022-03-31T15:09:45.000Z
|
leet/greedy/productExceptSelf.py
|
stacykutyepov/python-cp-cheatsheet
|
a00a57e1b36433648d1cace331e15ff276cef189
|
[
"Apache-2.0"
] | 1
|
2021-07-22T14:01:25.000Z
|
2021-07-22T14:01:25.000Z
|
leet/greedy/productExceptSelf.py
|
stacykutyepov/python-cp-cheatsheet
|
a00a57e1b36433648d1cace331e15ff276cef189
|
[
"Apache-2.0"
] | 33
|
2020-10-21T14:17:02.000Z
|
2022-03-25T11:25:03.000Z
|
"""
time: 5 min
errors: forgot range needed len to process list!
Product of Array Except Self
"""
class Solution:
def productExceptSelf(self, nums: List[int]) -> List[int]:
rtn = [0] * len(nums)
if len(nums) <= 1:
return nums
cur = 1
for i in range(len(nums)):
rtn[i] = cur
cur *= nums[i]
cur = 1
for i in reversed(range(len(nums))):
rtn[i] *= cur
cur *= nums[i]
return rtn
| 23
| 62
| 0.468809
|
1fc5646ba18b7e5bf4e81791b279015c9fc7ad08
| 3,649
|
py
|
Python
|
tests/utils.py
|
yzernik/squeaknode
|
f437939895f1f15dfeaa2bb50cab29efe5f7f612
|
[
"MIT"
] | 14
|
2021-01-13T19:52:32.000Z
|
2021-12-01T14:30:20.000Z
|
tests/utils.py
|
yzernik/squeaknode
|
f437939895f1f15dfeaa2bb50cab29efe5f7f612
|
[
"MIT"
] | 603
|
2020-10-30T03:35:54.000Z
|
2021-12-12T08:04:06.000Z
|
tests/utils.py
|
yzernik/squeaknode
|
f437939895f1f15dfeaa2bb50cab29efe5f7f612
|
[
"MIT"
] | 5
|
2020-11-02T21:33:35.000Z
|
2021-11-02T00:54:13.000Z
|
# MIT License
#
# Copyright (c) 2020 Jonathan Zernik
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import hashlib
import os
import random
import uuid
from bitcoin.core import CBlockHeader
from squeak.core.signing import CSigningKey
from squeak.core.signing import CSqueakAddress
from squeaknode.core.peer_address import Network
from squeaknode.core.peer_address import PeerAddress
from squeaknode.core.peers import create_saved_peer
from squeaknode.core.profiles import create_contact_profile
from squeaknode.core.profiles import create_signing_profile
from squeaknode.core.squeaks import HASH_LENGTH
from squeaknode.core.squeaks import make_squeak_with_block
def gen_signing_key():
return CSigningKey.generate()
def gen_random_hash():
return os.urandom(HASH_LENGTH)
def sha256(data):
return hashlib.sha256(data).digest()
def address_from_signing_key(signing_key):
verifying_key = signing_key.get_verifying_key()
return CSqueakAddress.from_verifying_key(verifying_key)
def gen_address():
signing_key = gen_signing_key()
return address_from_signing_key(signing_key)
def gen_squeak_addresses(n):
return [gen_address() for i in range(n)]
def gen_squeak(signing_key, block_height, replyto_hash=None):
random_content = "random_content_{}".format(uuid.uuid1())
random_hash = gen_random_hash()
squeak, secret_key = make_squeak_with_block(
signing_key,
random_content,
block_height,
random_hash,
replyto_hash=replyto_hash,
)
return squeak
def gen_block_header(block_height):
return CBlockHeader(
nTime=block_height * 10, # So that block times are increasing.
)
def gen_squeak_with_block_header(signing_key, block_height, replyto_hash=None):
""" Return a tuple with a CSqueak and a CBlockHeader.
"""
squeak = gen_squeak(
signing_key=signing_key,
block_height=block_height,
replyto_hash=replyto_hash,
)
block_info = gen_block_header(
block_height=block_height,
)
return squeak, block_info
def gen_signing_profile(profile_name, signing_key):
return create_signing_profile(
profile_name,
signing_key,
)
def gen_contact_profile(profile_name, address):
return create_contact_profile(
profile_name,
address,
)
def gen_squeak_peer(peer_name):
host = "random_host_{}".format(uuid.uuid1())
port = random.randint(1, 10000)
peer_address = PeerAddress(
network=Network.IPV4,
host=host,
port=port,
)
return create_saved_peer(
peer_name,
peer_address,
)
| 29.192
| 80
| 0.747054
|
cd414f60a3556847d15cb5c7a3a66728ce4c7e95
| 2,303
|
py
|
Python
|
tests/testapp/tests/test_cache_to_use.py
|
starexpress/django-admin-caching
|
7247bc1242c42381de413c9b3ca1e20e893d2fe3
|
[
"BSD-3-Clause"
] | 15
|
2016-09-27T09:20:16.000Z
|
2019-10-28T06:59:24.000Z
|
tests/testapp/tests/test_cache_to_use.py
|
starexpress/django-admin-caching
|
7247bc1242c42381de413c9b3ca1e20e893d2fe3
|
[
"BSD-3-Clause"
] | 17
|
2016-09-27T15:10:09.000Z
|
2019-06-21T13:05:52.000Z
|
tests/testapp/tests/test_cache_to_use.py
|
starexpress/django-admin-caching
|
7247bc1242c42381de413c9b3ca1e20e893d2fe3
|
[
"BSD-3-Clause"
] | 5
|
2017-06-29T10:52:34.000Z
|
2020-10-27T07:59:48.000Z
|
from django.core import cache
from django.core.cache.backends.dummy import DummyCache
from django.core.cache.backends.base import InvalidCacheBackendError
from django.test.utils import override_settings
from django_admin_caching.caching import CacheConfig
import pytest
from testapp.sixmock import patch
class TestAdminClassCanSpecifyWhichCacheToUse(object):
def test_nothing_specified_default_cache_is_used(self):
class NoAttributeAdmin(object):
pass
cfg = CacheConfig(model_admin=NoAttributeAdmin())
assert cfg.cache_to_use_name() == 'default'
assert cfg.cache == cache.caches['default']
def test_specified_cache_is_used(self):
class AttributeSpecifiesCacheToUse(object):
admin_caching_cache_name = 'foo'
with self.caches('foo'):
cfg = CacheConfig(model_admin=AttributeSpecifiesCacheToUse())
assert cfg.cache_to_use_name() == 'foo'
assert cfg.cache == cache.caches['foo']
def test_if_wrong_cache_is_specified_there_is_an_error(self):
class AttributeSpecifiesCacheToUse(object):
admin_caching_cache_name = 'bar'
with self.caches('default'):
cfg = CacheConfig(model_admin=AttributeSpecifiesCacheToUse())
with pytest.raises(InvalidCacheBackendError):
cfg.cache # accessing the cache
def test_allows_other_apps_to_wrap_the_cache(self, django_caches):
manual_cache = DummyCache('dummy', {})
to_patch = 'django.core.cache.CacheHandler.__getitem__'
with patch(to_patch, return_value=manual_cache) as mock:
cfg = CacheConfig(model_admin=None)
assert cache.caches['default'] == manual_cache
assert cfg.cache == manual_cache
assert mock.called
class caches(override_settings):
def __init__(self, *names):
self.names = names
self.caches_dict = dict(
(name, {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': name,
})
for name in names
)
super(
TestAdminClassCanSpecifyWhichCacheToUse.caches, self).__init__(
CACHES=self.caches_dict)
| 41.125
| 79
| 0.660443
|
4b3c016c7ef444898f5da6f026c91b333cec123a
| 4,684
|
py
|
Python
|
scripts/pklhisto2root.py
|
umd-lhcb/lhcb-ntuples-gen
|
d306895a0dc6bad2def19ca3d7d1304a5a9be239
|
[
"BSD-2-Clause"
] | null | null | null |
scripts/pklhisto2root.py
|
umd-lhcb/lhcb-ntuples-gen
|
d306895a0dc6bad2def19ca3d7d1304a5a9be239
|
[
"BSD-2-Clause"
] | 105
|
2018-12-20T19:09:19.000Z
|
2022-03-19T09:53:06.000Z
|
scripts/pklhisto2root.py
|
umd-lhcb/lhcb-ntuples-gen
|
d306895a0dc6bad2def19ca3d7d1304a5a9be239
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python3
#
# Stolen almost verbatim from:
# https://gitlab.cern.ch/lhcb-rta/pidcalib2/-/blob/master/src/pidcalib2/pklhisto2root.py
###############################################################################
# (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration #
# #
# This software is distributed under the terms of the GNU General Public #
# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
# #
# In applying this licence, CERN does not waive the privileges and immunities #
# granted to it by virtue of its status as an Intergovernmental Organization #
# or submit itself to any jurisdiction. #
###############################################################################
"""Convert pickled PIDCalib2 histograms to TH*D & save them in a ROOT file.
Only 1D, 2D, and 3D histograms are supported by ROOT. Attempting to convert
higher-dimensional histograms will result in an exception.
"""
import itertools
import math
import pathlib
import pickle
import sys
import boost_histogram as bh
import ROOT
def convert_to_root_histo(name: str, bh_histo: bh.Histogram):
"""Convert boost_histogram histogram to a ROOT histogram.
Only 1D, 2D, and 3D histograms are supported by ROOT. Attempting to convert
higher-dimensional histograms will result in an exception.
Furthermore, the boost histogram must have a storage type that stores
variance, e.g., Weight.
Args:
name: Name of the new ROOT histogram.
bh_histo: The histogram to convert.
Returns:
The converted ROOT histogram. Type depends on dimensionality.
"""
histo = None
if len(bh_histo.axes) == 1:
histo = ROOT.TH1D(name, name, 3, 0, 1)
histo.SetBins(bh_histo.axes[0].size, bh_histo.axes[0].edges)
histo.GetXaxis().SetTitle(bh_histo.axes[0].metadata["name"])
elif len(bh_histo.axes) == 2:
histo = ROOT.TH2D(name, name, 3, 0, 1, 3, 0, 1)
histo.SetBins(
bh_histo.axes[0].size,
bh_histo.axes[0].edges,
bh_histo.axes[1].size,
bh_histo.axes[1].edges,
)
histo.GetXaxis().SetTitle(bh_histo.axes[0].metadata["name"])
histo.GetYaxis().SetTitle(bh_histo.axes[1].metadata["name"])
elif len(bh_histo.axes) == 3:
histo = ROOT.TH3D(name, name, 3, 0, 1, 3, 0, 1, 3, 0, 1)
histo.SetBins(
bh_histo.axes[0].size,
bh_histo.axes[0].edges,
bh_histo.axes[1].size,
bh_histo.axes[1].edges,
bh_histo.axes[2].size,
bh_histo.axes[2].edges,
)
histo.GetXaxis().SetTitle(bh_histo.axes[0].metadata["name"])
histo.GetYaxis().SetTitle(bh_histo.axes[1].metadata["name"])
histo.GetZaxis().SetTitle(bh_histo.axes[2].metadata["name"])
else:
raise Exception(f"{len(bh_histo.axes)}D histograms not supported by ROOT")
indices_ranges = [list(range(n)) for n in bh_histo.axes.size]
for indices_tuple in itertools.product(*indices_ranges):
root_indices = [index + 1 for index in indices_tuple]
histo.SetBinContent(
histo.GetBin(*root_indices), bh_histo[indices_tuple].value # type: ignore
)
histo.SetBinError(
histo.GetBin(*root_indices), math.sqrt(bh_histo[indices_tuple].variance) # type: ignore # noqa
)
return histo
def convert_pklfile_to_rootfile(path: str, output_path: str):
pkl_path = pathlib.Path(path)
root_path = pathlib.Path(output_path)
eff_histos = {}
with open(pkl_path, "rb") as f:
eff_histos["eff"] = pickle.load(f)
eff_histos["passing"] = pickle.load(f)
eff_histos["total"] = pickle.load(f)
for item in eff_histos.values():
assert isinstance(item, bh.Histogram)
root_file = ROOT.TFile(str(root_path), "RECREATE")
eff_histo = convert_to_root_histo("eff", eff_histos["eff"])
eff_histo.Write()
passing_histo = convert_to_root_histo("passing", eff_histos["passing"])
passing_histo.Write()
total_histo = convert_to_root_histo("total", eff_histos["total"])
total_histo.Write()
root_file.Close()
def main():
file_in = sys.argv[1]
try:
file_out = sys.argv[2]
except IndexError:
file_out = pathlib.Path(sys.argv[1]).with_suffix('.root')
convert_pklfile_to_rootfile(file_in, file_out)
if __name__ == "__main__":
main()
| 35.755725
| 107
| 0.604825
|
d3cd4ab91dd45a4246709b824949900077672f81
| 22,159
|
py
|
Python
|
lib/processing.py
|
Aziroshin/blockchaintools
|
72bf833b500628f35502914d5ef2d2248af84055
|
[
"MIT"
] | null | null | null |
lib/processing.py
|
Aziroshin/blockchaintools
|
72bf833b500628f35502914d5ef2d2248af84055
|
[
"MIT"
] | null | null | null |
lib/processing.py
|
Aziroshin/blockchaintools
|
72bf833b500628f35502914d5ef2d2248af84055
|
[
"MIT"
] | null | null | null |
#-*- coding: utf-8 -*-
#=======================================================================================
# Imports
#=======================================================================================
# Python.
from collections import namedtuple, UserList, UserDict
from subprocess import Popen, PIPE
from pathlib import Path
from pwd import getpwuid
from grp import getgrgid
import os
import sys
# Debug
from lib.debugging import dprint #NOTE: DEBUG
#=======================================================================================
# Datatypes
#=======================================================================================
# collections.namedtuple changed from Python version 3.6 to 3.7:
# In 3.6, "verbose" can be directly passed to the constructor, in 3.7, that has
# to be done by means of the "defaults" argument. This is a hack-around to ensure
# compatibility with 3.6 and beyond.
if sys.version_info.minor <= 6:
_dataTypesConfig = {"verbose": False, "rename": False}
else:
_dataTypesConfig = {"defaults": {"verbose": False}, "rename": False}
TestTuple = namedtuple("TestTuple", "a b")
ProcessOutput = namedtuple("ProcessOutput", "stdout stderr")
# SplitPair
SplitPair = namedtuple("SplitPair", "key value")
UnsplitPair = namedtuple("UnsplitPair", "key")
# SplitVar
SplitVar = namedtuple("SplitVar", "var value")
UnsplitVar = namedtuple("UnsplitVar", "var value")
# SplitArg
SplitArg = namedtuple("SplitArg", "param value")
UnsplitArg = namedtuple("UnsplitArg", "param")
# Proc status
ProcStatus = namedtuple("ProcStatus", "name data")
# Proc status: UID & GUID
ProcStatusPerms = namedtuple("ProcStatusPerms", "real effective savedSet filesystem")
#=======================================================================================
# Library
#=======================================================================================
#=========================================================
# Errors
#=========================================================
class NoSuchProcessError(Exception): pass
#=========================================================
# Internal Processes
#=============================
# processes that we start
#=========================================================
#=========================================================
class Process(object):
#=============================
"""Represents a system process started by this script.
Note: Refrain from calling .communicate() directly on the process from outside of this object."""
#=============================
def __init__(self, commandLine, run=True, env=None):
self.commandLine = commandLine
if run == True:
self.run()
self._communicated = False
self._stdout = None
self._stderr = None
if not env is None:
self.env = os.environ.copy()
self.env.update(env)
else:
self.env = os.environ.copy()
def run(self):
self.process = Popen(self.commandLine, env=self.env, stdout=PIPE, stderr=PIPE)
return self.process
def waitAndGetOutput(self, timeout=None):
if not self._communicated:
self._stdout, self._stderr = self.process.communicate(timeout=timeout)
self._communicated = True
return ProcessOutput(self._stdout, self._stderr)
def waitAndGetStdout(self, timeout=None):
return self.waitAndGetOutput(timeout).stdout
def waitAndGetStderr(self, timeout=None):
return self.waitAndGetOutput(timeout).stderr
#=========================================================
# Internal Processes
#=============================
# Processes started by things outside our scope.
#=========================================================
#=========================================================
class LinuxProcessList(UserList):
#=============================
"""A handle to listing the processes running on a machine."""
#=============================
def __init__(self, processes=[], initWithAll=True, raw=False, splitArgs=False):
# If you want to change raw and splitArgs defaults, you might also want to
# change them in self.getAll and ExternalLinuxProcess.__init__.
super().__init__(processes)
self.initWithAll = initWithAll
if not self and initWithAll:
self.data = self.data+self.getAll(raw=raw, splitArgs=splitArgs)
def getAllPids(self):
"""Return the PIDs of all running processes as integers."""
return [path.name for path in Path("/proc").iterdir() if str(path.name).isdigit()]
def getAll(self, raw=False, splitArgs=False):
"""Get a list of ExternalLinuxProcess objects for all running processes."""
processes = []
for pid in self.getAllPids():
try:
processes.append(ExternalLinuxProcess(pid, raw=raw, splitArgs=splitArgs))
except NoSuchProcessError:
pass
return processes
def byPid(self, pid):
"""Returns the process matching the specified PID."""
return type(self)(processes=\
[p for p in self if p.pid == pid],\
initWithAll=False)
def byName(self, name, raw=None):
"""Return type(self) object of all processes matching the specified name."""
return type(self)(processes=\
[p for p in self if p.getName(raw=raw) == name],\
initWithAll=False)
def byPath(self, path, raw=None):
"""Return type(self) object of all processes matching the specified path.
Path in this case refers to the path of the executable, represented
as the first element of the processes' argv."""
return type(self)(processes=\
[p for p in self if p.getPath(raw=raw) == path],\
initWithAll=False)
def byArg(self, arg, raw=None, splitArgs=None):
"""Return type(self) object of all processes having the specified argument."""
return type(self)(processes=\
[p for p in self if p.hasArg(arg, raw=raw, splitArgs=splitArgs)],\
initWithAll=False)
def byArgPart(self, argPart, raw=None):
"""Return type(self) object of all processes with this substring in one of their args."""
return type(self)(processes=\
[p for p in self if p.inArg(argPart, raw=raw, splitArgs=True)],\
initWithAll=False)
def byArgvPart(self, argvPart, raw=None, splitArgs=None):
"""Return type(self) object of all processes with the specified argv subset."""
return type(self)(processes=\
[p for p in self if p.inArgv(argvPart, raw=raw, splitArgs=splitArgs)],\
initWithAll=False)
def byHome(self, home):
"""Return type(self) object of all processes with the specified home dir path."""
return type(self)(processes=\
[p for p in self if p.home == home],\
initWithAll=False)
#TODO #NOTE: The list isn't live, but the processes are. This needs to change.
#=========================================================
class LinuxProcessInfo(object):
#=============================
"""A static representation of /proc process information for a process.
Represents the state of the process found upon instantiation (not live).
If the process is found to be dead during init, raises NoSuchProcessError.
If permission issues are encountered during init, None for the fields
in question.
"""
#=============================
def __init__(self, pid):
self.pid = pid
# Get info from /proc.
try:
self.cmdline = self._readProc(["cmdline"])
self.comm = self._readProc(["comm"])
self.cwd = self._resolveSymlink(["cwd"])
self.environ = self._readProc(["environ"])
self.status = self._readProc(["status"])
except FileNotFoundError:
raise NoSuchProcessError("A process with the PID {0} doesn't exist (anymore?)."\
.format(self.pid))
def _readProc(self, pathElements):
"""Read a /proc/<self.pid> process file by its name."""
try:
with Path("/proc", self.pid, *pathElements).open("rb") as procFile:
return procFile.read()
except PermissionError:
return None
def _resolveSymlink(self, pathElements):
try:
return os.path.realpath(Path("/proc", self.pid, *pathElements))
except PermissionError:
return None
#=========================================================
class Pair(object):
"""Equal-sign separated key/value pair in its split and unsplit form.
Takes:
- value: Bytestring or string potentially containing an equal sign separated
key/value pair.
- raw: True value is a bytestring, False for string.
- unsplitFiller (None): If not None, .split uses the value specified here for
a substitute if value is either not an equal sign separated key/value pair
or doesn't contain an rvalue.
A KeyValueDataError is raised if the UnsplitType for this class
doesn't take at least two items."""
#NOTE: Consider merging Pair into KeyValueData. Too much redundancy.
def __init__(self, value, raw, SplitType, UnsplitType, unsplitFiller=None):
self.value = value
self.raw = raw
self.SplitType = SplitType
self.UnsplitType = UnsplitType
self.unsplitFiller = unsplitFiller
if self.raw:
self.equalSign = b"="
else:
self.equalSign = "="
@property
def split(self):
"""If applicable, splits the pair by equal sign and returns the specified SplitType.
Otherwise, returns UnsplitType with the entire argument as its one value."""
key, value = self.value.partition(self.equalSign)[0::2]
if value:
return self.SplitType(key, value)
else:
if self.unsplitFiller is None:
return self.UnsplitType(self.value)
else:
try:
return self.UnsplitType(self.value, self.unsplitFiller)
except TypeError:
raise KeyValueDataError(\
"When unsplitFiller is specified, UnsplitType needs to have a size of 2."
"Type that caused the error: {Type}".format(Type=self.UnsplitType.__name__))
#=========================================================
class KeyValueDataError(Exception): pass
#=========================================================
class KeyValueData(object):
"""A set of equal-sign separated key/value pairs in their split and unsplit forms.
.split returns a list of SplitPair(key, value) and UnsplitPair(key).
There are two important class attributes you might want to consider when subclassing:
- SplitType (SplitPair): A 2-tuple returned by .split for elements in the specified
data list that could be split.
- UnsplitType (UnsplitPair): A tuple of either size 1 or 2. Size 2 is required when
unsplitFiller is specified. Is used for the return value of .split for
elements of the specified data list that couldn't be split.
This class is designed with namedtuples in mind.
Takes:
- data: List potentally containing equal-sign separated key-value pairs.
- raw: True if data is in bytestrings, False for strings.
- unsplitFiller (None): If not None, substitutes the missing value of data elements
that either have no equal sign or no rvalue with what's specified for this
parameter. A KeyValueDataError is raised if the UnsplitType for this class
doesn't take at least two items."""
SplitType = SplitPair
UnsplitType = UnsplitPair
def __init__(self, data, raw, unsplitFiller=None):
self.data = data
self.raw = raw
self.unsplitFiller = unsplitFiller
@property
def unsplit(self):
"""Get the unsplit data this object was initialized with."""
return self.data
@property
def split(self):
"""Returns a list with all key/value pairs split by equal sign, where applicable.
Doesn't split if the left hand side includes non-alphanumeric characters, except
dashes, in order not to mess with quoted strings, shell variables and subshells, etc."""
data = []
for pair in self.data:
for element in\
Pair(pair, self.raw, type(self).SplitType, type(self).UnsplitType,\
unsplitFiller=self.unsplitFiller).split:
data.append(element)
return data
#=========================================================
class Argv(KeyValueData):
"""The argv of a process, optionally with equal sign seperated args split.
.split returns a list of splitArg(param, value) and UnsplitArg(param)."""
SplitType = SplitArg
UnsplitType = UnsplitArg
def __init__(self, data, raw, withComm=True):
if not withComm: data.pop(0)
super().__init__(data, raw)
#=========================================================
class Env(KeyValueData):
"""The environment vars of a process, optionally with equal sign separated vars split.
.split returns a list of SplitVar(var, value) and UnsplitVar(var)."""
SplitType = SplitVar
UnsplitType = UnsplitVar
def __init__(self, data, raw):
super().__init__(data, raw, unsplitFiller="")
#=========================================================
class LinuxProcessStatus(UserDict):
def __init__(self, procData, raw, multisAreLists=True):
super().__init__(self)
self.procData = procData
self.raw = raw
if multisAreLists:
self.data = self.stringDictMultisAreLists
else:
if self.raw is True:
self.data = self.rawDict
elif self.raw is False:
self.data = self.stringDict
@property
def dataLines(self):
"""List of every line found in the status output."""
return self.procData.split(b"\n")
@property
def dataPairs(self):
"""List of key/value pairs per line."""
return [ProcStatus(*line.partition(b":\t")[0::2]) for line in self.dataLines]
@property
def dataPairsAllAreLists(self):
"""List of key/value pairs per line where lines with multiple values have them listed."""
return [ProcStatus(pair[0], pair[1].strip(b"\x00").split(b"\t")) for pair in self.dataPairs]
@property
def dataPairsMultisAreLists(self):
"""Like dataPairsAllTuples, but only status values with more than one item are listed."""
pairs = []
for pair in self.dataPairsAllAreLists:
if len(pair[1]) > 1:
pairs.append(pair)
else:
pairs.append(ProcStatus(pair[0], pair[1][0]))
return pairs
@property
def rawDict(self):
"""Pairs sorted into a dict in their raw bytes() form."""
return {pair.name: pair.data for pair in self.dataPairsMultisAreLists}
@property
def stringDict(self):
"""Pairs sorted into a dict in string form."""
return {pair.name.decode(): b"\t".join(pair.data).decode() for pair in self.dataPairsAllAreLists}
@property
def stringDictMultisAreLists(self):
"""Dict with str() keys, the values being str(), except multi-field ones, they're lists."""
return {pair.name.decode(): self.stringify(pair.data) for pair in self.dataPairsMultisAreLists}
def stringify(self, statusData):
"""If value is ProcStatus(), returns stringified ProcStatus(). Else, str() (former bytes()).
This is a "magic" method to produce a dict with string keys, and string values if they're
single column values in /proc/*/status, or list values for multi column ones."""
if type(statusData) is list:
return [s.decode() for s in statusData]
else:
return statusData.decode()
#=========================================================
class ExternalLinuxProcess(object):
#=============================
"""A Linux process that may or may not have been started by this program.
This is implemented as a live wrapper around /proc.
The basic assumption is that the process was originally started by something
else, thus some functionality one could expect from the Process class will
not be available.
Takes:
- pid: PID of the process.
- raw (False): True for bytestring, False for string.
- splitArgs (False): True to split args by equal sign by default when applicable.
- splitVars (False): True to split env. vars. by equal sign by default when applicable."""
#=============================
def __init__(self, pid, raw=False, splitArgs=False, splitVars=False):
self.info = LinuxProcessInfo(pid)
self.rawDefaultSetting = raw
self.splitArgsDefaultSetting = splitArgs
self.splitVarsDefaultSetting = splitVars
#=============================
# BEGIN: COMMON
# Convenience properties which wrap around getter functions, returning their value
# in whatever is the resulting default considering our configuration (say, for 'raw').
@property
def pid(self):
return self.getPid()
@property
def name(self):
return self.getName()
@property
def path(self):
return self.getPath()
@property
def env(self):
return self.getEnvDict()
@property
def status(self):
return LinuxProcessStatus(self.info.status, raw=self.raw())
@property
def uids(self):
"""Named tuple of Uids: (real, effective, savedSet, filesystem)."""
return ProcStatusPerms(*self.status["Uid"])
@property
def gids(self):
"""Named tuple of Gids: (real, effective, savedSet, filesystem)."""
return ProcStatusPerms(*self.status["Gid"])
@property
def users(self):
"""Named tuple of user names: (real, effective, savedSet, filesystem)."""
return ProcStatusPerms(*[getpwuid(p).pw_name for p in self.uids])
@property
def groups(self):
"""Named tuple of group names: (real, effective, savedSet, filesystem)."""
return ProcStatusPerms(*[getgrgid(p).gr_name for p in self.gids])
@property
def home(self):
"""If set, returns $HOME. Otherwise, returns home dir path of the effective UID."""
try:
return self.getEnvDict()["HOME"]
except KeyError:
return getpwuid(self.uid.effective).pw_dir
# END: COMMON
#=============================
#=============================
# Default overriders
def raw(self, override=None):
"""Returns "raw" as set for the constructor if override is None, otherwise returns override."""
if override is None:
return self.rawDefaultSetting
else:
return override
def splitArgs(self, override):
if override is None:
return self.splitArgsDefaultSetting
else:
return override
def splitVars(self, override):
if override is None:
return self.splitVarsDefaultSetting
else:
return override
#=============================
def _typeString(self, byteString, raw=None):
"""Make sure the specified string is either bytes or str as specified.
Takes bytes(), determines whether we're currently configured to cast
bytes() into str() and returns accordingly.
If instructed to return non-raw, we will strip the trailing newline."""
if self.raw(raw):
return byteString
else:
return byteString.decode().rstrip("\n")
def _typeList(self, byteStringList, raw=None):
"""Make sure the specified list is either made up of bytes or str as specified.
Takes a list of bytes(), determines whether we're currently configured to cast
bytes() into str() and return the list with its elements changed accordingly."""
if self.raw(raw):
return byteStringList
else:
return [byteString.decode() for byteString in byteStringList]
def getStatus(self, raw=None):
""""""
return {s for s in self.info.status.split(r'\n')}
def getName(self, raw=None):
"""Name of the process without arguments."""
return Path(self.getPath(raw=raw)).name
def getPath(self, raw=None):
"""Executable path as it's found in the first element of the argv."""
return self.getArgv(raw=self.raw(raw))[0]
def getPid(self):
"""PID of the process. Returns as int."""
return int(self.info.pid)
def getArgvSplitByNul(self, raw=None):
"""List of arguments used to start the process, starting with the command name.
Args are split into a list by NUL."""
return self._typeList(self.info.cmdline.strip(b"\x00").split(b"\x00"), raw=self.raw(raw))
def getEnvSplitByNul(self, raw=None):
return self._typeList(self.info.environ.strip(b"\x00").split(b"\x00"), raw=self.raw(raw))
def getArgv(self, raw=None, splitArgs=None, withComm=True):
"""List of arguments used to start the process, optionally starting with the command name.
Args are split into a list by NUL, and, optionally, by equal sign."""
argv = Argv(self.getArgvSplitByNul(raw=self.raw(raw)), raw=self.raw(raw), withComm=withComm)
if self.splitArgs(splitArgs):
return argv.split
else:
return argv.unsplit
def getEnv(self, raw=None, splitVars=None):
"""List of environment variables.
Args are split into a list by NUL, and, optionally, by equal sign."""
env = Env(self.getEnvSplitByNul(raw=self.raw(raw)), raw=self.raw(raw))
if self.splitVars(splitVars):
return env.split
else:
return env.unsplit
def getEnvDict(self, raw=None):
"""Dict of environment variables in key/value pairs."""
# We assume that every environment variable is a key value pair.
# This explodes otherwise.
envDict = {}
env = self.getEnv(raw=self.raw(raw), splitVars=True)
envIndex = 0
while envIndex < len(env):
envDict[env[envIndex]] = env[envIndex+1]
envIndex += 2
return envDict
def hasArg(self, arg, raw=None, splitArgs=None):
"""Is the specified arg in the processes argv?"""
return arg in self.getArgv(raw=self.raw(raw), splitArgs=self.splitArgs(splitArgs), withComm=False)
def hasEnvVar(self, varName):
"""Is the specfied env var in the env of the process?"""
return varName in self.env.keys()
def hasEnvPair(self, varName, varValue, raw=None):
"""Is the given varName/varValue pair in env?"""
envDict = self.getEnvDict(raw=None)
if varName in envDict and envDict[varName] == varValue:
return True
else:
return False
def inArg(self, string, raw=None, splitArgs=None):
"""Is the specified substring in one of the args in argv?
Returns True if it is, False if it's not."""
return any([arg for arg in self.getArgv(raw=self.raw(raw), splitArgs=self.splitArgs(splitArgs)) if string in arg])
def inArgv(self, matchArgs, raw=None, splitArgs=None):
"""Is matchArgs a subset of argv?"""
argv = self.getArgv(raw=raw, splitArgs=splitArgs)
argvIndex = 0
for arg in argv:
matchIndex = 0
for matchArg in matchArgs:
try:
if not argv[argvIndex+matchIndex] == matchArg:
break
else:
matchIndex += 1
except IndexError:
return False # We've looped through all of argv without a match and overstepped.
if matchIndex == len(matchArgs)-1:
return True # matchArgs is a subset of argv.
argvIndex += 1
return False # We've looped through all of argv without a match and didn't overstep.
# TODO: Windows/MacOS X support, if that should ever
# be required.
# Towards that end, these variables should later become factories/metaclassed
# classes that determine the platform and return the appropriate class.
ProcessList = LinuxProcessList
ExternalProcess = ExternalLinuxProcess
| 34.090769
| 116
| 0.656573
|
7a400c47af362a2ef6620ae106ee2dc76eccc7dd
| 1,595
|
py
|
Python
|
dev_scripts/git/gsp.py
|
ajmal017/amp
|
8de7e3b88be87605ec3bad03c139ac64eb460e5c
|
[
"BSD-3-Clause"
] | null | null | null |
dev_scripts/git/gsp.py
|
ajmal017/amp
|
8de7e3b88be87605ec3bad03c139ac64eb460e5c
|
[
"BSD-3-Clause"
] | null | null | null |
dev_scripts/git/gsp.py
|
ajmal017/amp
|
8de7e3b88be87605ec3bad03c139ac64eb460e5c
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
"""
Stash the changes in a Git client without changing the client, besides a reset
of the index.
"""
import argparse
import logging
import helpers.dbg as dbg
import helpers.git as git
import helpers.parser as prsr
import helpers.printing as pri
import helpers.system_interaction as si
_LOG = logging.getLogger(__name__)
# #############################################################################
def _system(cmd, *args, **kwargs):
si.system(cmd, log_level=logging.INFO, *args, **kwargs)
def _print(msg):
msg = pri.color_highlight(msg, "blue")
print("\n" + msg)
def _main(parser: argparse.ArgumentParser) -> None:
args = parser.parse_args()
dbg.init_logger(verbosity=args.log_level)
#
_print("# Saving local changes...")
tag, was_stashed = git.git_stash_push(
"gsp", msg=args.message, log_level=logging.INFO
)
print("tag='%s'" % tag)
if not was_stashed:
# raise RuntimeError(msg)
pass
else:
_print("# Restoring local changes...")
git.git_stash_apply(mode="apply", log_level=logging.INFO)
#
_print("# Stash state ...")
cmd = r"git stash list"
_system(cmd, suppress_output=False)
def _parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument(
"-m", default=None, dest="message", help="Add message to commit"
)
prsr.add_verbosity_arg(parser)
return parser
if __name__ == "__main__":
_main(_parser())
| 24.538462
| 81
| 0.642633
|
5d848e6b082dc633768fb3c07c12fca6f832a20c
| 354
|
py
|
Python
|
src/platform.py
|
Tas-Kit/services
|
da7e3eb7dba39501dd9c3a8ea335f9b60753b74f
|
[
"Apache-2.0"
] | null | null | null |
src/platform.py
|
Tas-Kit/services
|
da7e3eb7dba39501dd9c3a8ea335f9b60753b74f
|
[
"Apache-2.0"
] | null | null | null |
src/platform.py
|
Tas-Kit/services
|
da7e3eb7dba39501dd9c3a8ea335f9b60753b74f
|
[
"Apache-2.0"
] | null | null | null |
"""Summary
"""
import requests
from .generic import APISERVICE
class PLATFORM(APISERVICE):
SCHEME = 'http'
HOST = 'platform'
PORT = 8000
API_VERSION = 'v1'
def get_platform_root_key(self, uid):
resp = requests.get(self.get_full_url('/internal/'), cookies={
'uid': str(uid)
})
return resp.json()
| 19.666667
| 70
| 0.60452
|
a4131c36c3fd2e90309678e468607551c8fbf1ef
| 4,401
|
py
|
Python
|
TESTS/crear_objetos_2.py
|
thiago1623/Tempo_Back-End
|
5ef2e404be776955417ff36e53423f4aaceb248e
|
[
"Unlicense"
] | null | null | null |
TESTS/crear_objetos_2.py
|
thiago1623/Tempo_Back-End
|
5ef2e404be776955417ff36e53423f4aaceb248e
|
[
"Unlicense"
] | null | null | null |
TESTS/crear_objetos_2.py
|
thiago1623/Tempo_Back-End
|
5ef2e404be776955417ff36e53423f4aaceb248e
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/python3
"""
SCRIPT QUE INCIALIZA CON FAKE DATA NUESTRA DB DE TEMPO
"""
import datetime
from models import storage
from models.city import City
from models.venue import Venue
from models.show import Show, ShowArtist
from models.artist import Artist
from models.social_organizer import SocialOrganizer
from models.social_artist import SocialArtist
from models.organizer import Organizer
from werkzeug.security import generate_password_hash, check_password_hash
from pprint import pprint
bogota = storage.session.query(City).filter_by(city_name="Bogotá").first()
print(bogota)
medellin = storage.session.query(City).filter_by(city_name="Medellín").first()
# Organizador
organizer = storage.session.query(Organizer).first()
# Lugares
venue = {
"city_id": bogota.id,
"venue_name": "LOS OCARROS",
"address": "Calle del cartucho",
"phone": "23555",
"capacity": "150",
"latitude": "7.89",
"longitude": "0.98",
"description": "LOS OCARROS ES UN BAR QUE TE OFRECE MULTIPLES SERVICIOS ESTAMOS UBICADOS A DOS CUADRAS DEL PARQUE DE LOS NOVIOS :)"
}
venue_objeto = organizer.create_venue(venue)
venue2 = {
"city_id": bogota.id,
"venue_name": "LOS TINTEROS",
"address": "Comuna 13",
"phone": "376737326",
"capacity": "200",
"latitude": "8.90",
"longitude": "0.100",
"description": "LOS TINTEROS ES UN BAR QUE TE OFRECE MULTIPLES SERVICIOS ESTAMOS UBICADOS A DOS CUADRAS DEL PARQUE DE LAS FLORES :)"
}
venue_objeto2 = organizer.create_venue(venue2)
# Artistas
artista = {
"artist_name": "tigres del norte",
"genre_artist": "Norteña"
}
artista_objeto = organizer.create_artist(artista)
artista2 = {
"artist_name": "tigres del norte",
"genre_artist": "Norteña"
}
artista_objeto2 = organizer.create_artist(artista2)
artista3 = {
"artist_name": "Olivia Rodrigo",
"genre_artist": "Rock"
}
artista_objeto3 = organizer.create_artist(artista3)
# Redes Sociales Artista
social = SocialArtist(
artist_id=artista_objeto.id,
link="facebook.com/los-tigres-del-norte",
description="Facebook"
)
social.save()
social2 = SocialArtist(
artist_id=artista_objeto2.id,
link="facebook.com/olivia",
description="Facebook"
)
social2.save()
social3 = SocialArtist(
artist_id=artista_objeto3.id,
link="facebook.com/olivia",
description="Facebook"
)
social3.save()
# Shows!
date_str = "2021-04-12"
year = int(date_str[0:4])
month = int(date_str[5:7])
day = int(date_str[8:10])
date = datetime.datetime(year, month, day, 0, 0, 0)
show = {
"name_show": "script 2 - show3 2021-04-12 en un mes Norteña bogotá",
"status_show": "en curso",
"price_ticket": "$250.000",
"date": date,
"hour": "8:00 pm",
"venue_id": venue_objeto.id,
}
show_objeto = organizer.create_show(show)
date_str2 = "2021-03-08"
year = int(date_str2[0:4])
month = int(date_str2[5:7])
day = int(date_str2[8:10])
date2 = datetime.datetime(year, month, day, 0, 0, 0)
show2 = {
"name_show": "script 2 - show3 2021-03-08 hoy Norteña Medellín",
"status_show": "cancelado",
"price_ticket": "$400.000",
"date": date2,
"hour": "11:00 pm",
"venue_id": venue_objeto.id,
}
show_objeto2 = organizer.create_show(show2)
date_str3 = "2021-03-12"
year = int(date_str3[0:4])
month = int(date_str3[5:7])
day = int(date_str3[8:10])
date3 = datetime.datetime(year, month, day, 0, 0, 0)
show3 = {
"name_show": "script 2 - show3 2021-03-12 en 4 dias Rock Bogotá",
"status_show": "en proceso",
"price_ticket": "$100.000",
"date": date3,
"hour": "12:00 am",
"venue_id": venue_objeto.id,
}
show_objeto3 = organizer.create_show(show3)
# ShowArtist union artista y shows
show_artist = ShowArtist(
artist_id=artista_objeto.id,
show_id=show_objeto.id
)
# tengo dudas de que esta clase tenga este metodo
show_artist.save()
show_artist2 = ShowArtist(
artist_id=artista_objeto2.id,
show_id=show_objeto2.id
)
# tengo dudas de que esta clase tenga este metodo
show_artist2.save()
show_artist3 = ShowArtist(
artist_id=artista_objeto3.id,
show_id=show_objeto3.id
)
# tengo dudas de que esta clase tenga este metodo
show_artist3.save()
# filtrando shows y artistas :D
# print(show_artist2.artists) # artista olivia
# print(show_artist.artists) # artista togres del norte
#print('----->')
#pprint(show_objeto.artists())
# deberia sacar al artista tigres del norte
| 26.196429
| 136
| 0.70484
|
c96be82b3759e1a709e59f8514779b6c97d9770a
| 11,309
|
py
|
Python
|
openGaussBase/testcase/AI/AI_INDEXADV/Opengauss_Function_Ai_Indexadv_Ustore_Case0001.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/AI/AI_INDEXADV/Opengauss_Function_Ai_Indexadv_Ustore_Case0001.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/AI/AI_INDEXADV/Opengauss_Function_Ai_Indexadv_Ustore_Case0001.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
"""
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : 智能运维ai_indexadv模块
Case Name : 创建ustore表,组合使用where条件,比较运算符,inner join连接,order by
排序,逻辑运算符与模糊查询like进行查询索引推荐
Description :
1.修改参数enable_default_ustore_table为on,重启数据库
2.查看参数enable_default_ustore_table
3.建表1
4.建表2
5.建存储过程1
6.建存储过程2
7.向表1中插入100000条数据,向表2中插入数据并统计数据的数量
8.使用where条件,比较运算符,inner join连接,order by排序进行查询索引推荐
9.使用where条件,比较运算符,inner join连接,order by排序,逻辑运算符与
模糊查询like进行查询索引推荐
10.清理环境
Expect :
1.修改参数enable_default_ustore_table为on,重启数据库成功
2.返回参数enable_default_ustore_table为on
3.建表1成功
4.建表2成功
5.建存储过程1成功
6.建存储过程2成功
7.向表1中插入100000条数据,向表2中插入数据成功,返回表1,表2数据的数量
8.返回推荐的索引列
9.返回推荐的索引列
10.清理环境成功
History :
"""
import os
import unittest
from yat.test import Node
from testcase.utils.Common import Common
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger
class SQL(unittest.TestCase):
def setUp(self):
self.logger = Logger()
self.dbuserNode = Node('PrimaryDbUser')
self.primary_sh = CommonSH('PrimaryDbUser')
self.common = Common()
self.Con = Constant()
self.table1 = 't_table_ai_indexadv_ustore_0001'
self.table2 = 't_table_ai_indexadv1_ustore_0001'
self.procedure1 = 'p_procedure_ustore_insert_0001'
self.procedure2 = 'p_procedure_ustore1_insert_0001'
self.index_advise = 'gs_index_advise'
def test_ai(self):
self.logger.info(f'-----{os.path.basename(__file__)} start-----')
step = 'step1:修改参数enable_default_ustore_table为on,重启数据库 ' \
'expect:修改参数enable_default_ustore_table为on,' \
'重启数据库成功'
self.logger.info(step)
self.init_param = self.common.show_param(
'enable_default_ustore_table')
if 'on' not in self.init_param:
modify_para1 = self.primary_sh.execute_gsguc("reload",
f'''{self.Con.
GSGUC_SUCCESS_MSG}''',
f"enable_default_"
f"ustore_"
f"table=on",
single=True)
self.logger.info(modify_para1)
self.assertTrue(modify_para1, '执行失败' + step)
msg = self.primary_sh.restart_db_cluster()
self.logger.info(msg)
self.assertTrue(msg, '执行失败:' + step)
step = 'step2:查看参数enable_default_ustore_table' \
' expect:返回参数enable_default_ustore_table为on'
self.logger.info(step)
show_para = self.common.show_param(
'enable_default_ustore_table')
self.logger.info(show_para)
self.assertIn('on', show_para, "修改参数enable_default_ustore_table"
+ step)
step = 'step3:建表1 expect:建表1成功'
self.logger.info(step)
create_table1 = self.primary_sh.execut_db_sql(f'''
drop table if exists {self.table1};
create table {self.table1}(col_int int,col_numeric numeric,
col_float float,col_char char(10),col_text text,col_time time
);''')
self.logger.info(create_table1)
self.assertIn(self.Con.TABLE_CREATE_SUCCESS, create_table1,
"建表1失败" + step)
step = 'step4:建表2 expect:建表2成功'
self.logger.info(step)
create_table2 = self.primary_sh.execut_db_sql(f'''
drop table if exists {self.table2};
create table {self.table2}(col_int int,col_dec dec,
col_money money,col_boolean boolean,col_char char(10),
col_clob clob);''')
self.logger.info(create_table2)
self.assertIn(self.Con.TABLE_CREATE_SUCCESS, create_table2,
"建表2失败" + step)
step = 'step5:建存储过程1 expect:建存储过程1成功'
self.logger.info(step)
create_procedure1 = self.primary_sh.execut_db_sql(f'''
create or replace procedure {self.procedure1}(a int) is
V_int int;
V_numeric numeric;
V_float float;
V_char char(10);
V_text text;
V_time time;
begin
for i in 1..a
loop
V_int :=i;
V_numeric :=i+1.11;
V_float :=i*5.55;
V_char :='x_'|| i;
V_text :='V_text_'|| i;
V_time :='19:41:20';
execute immediate 'insert into {self.table1} values
(:p1,:p2,:p3,:p4,:p5,:p6)
'using V_int,V_numeric,V_float,V_char,V_text,V_time;
end loop;
end;''')
self.logger.info(create_procedure1)
self.assertIn(self.Con.CREATE_PROCEDURE_SUCCESS_MSG,
create_procedure1, "建存储过程1失败" + step)
step = 'step6:建存储过程2 expect:建存储过程2成功'
self.logger.info(step)
create_procedure2 = self.primary_sh.execut_db_sql(f'''
create or replace procedure {self.procedure2}() is
V_int int;
V_dec dec;
V_money money;
V_boolean boolean;
V_char char(10);
V_clob clob;
begin
for i in 2000..100000
loop
V_int :=i;
V_dec :=i*8.88;
V_money :=i*2;
V_boolean :='t';
V_char :=i ||'_x';
V_clob :='V_clob_'|| i;
execute immediate 'insert into {self.table2} values
(:p1,:p2,:p3,:p4,:p5,:p6)
'using V_int,V_dec,V_money,V_boolean,V_char,V_clob;
end loop;
end;''')
self.logger.info(create_procedure2)
self.assertIn(self.Con.CREATE_PROCEDURE_SUCCESS_MSG,
create_procedure2, "建存储过程2失败" + step)
step = 'step7:向表1中插入100000条数据,向表2中插入数据并统计数据的数量' \
'expect:向表1中插入100000条数据,向表2中插入数据成功,返回' \
'表1,表2数据的数量'
self.logger.info(step)
insert_data = self.primary_sh.execut_db_sql(f'''
call {self.procedure1}(100000);
select count(*) from {self.table1} ;
call {self.procedure2}();
select count(*) from {self.table2} ;''')
self.logger.info(insert_data)
text = '向表1中插入100000条数据,向表2中插入数据并统计数据的数量失败'
self.assertIn('p_procedure_ustore_insert_0001', insert_data,
text + step)
self.assertIn('p_procedure_ustore1_insert_0001', insert_data,
text + step)
self.assertIn('100000', insert_data, text + step)
self.assertIn('98001', insert_data, text + step)
step = 'step8:使用where条件,比较运算符,inner join连接,' \
'order by排序进行查询索引推荐 ' \
'expect:返回推荐的索引列'
self.logger.info(step)
index_advise = self.primary_sh.execut_db_sql(f'''
select * from {self.index_advise}('select {self.table1}.col_int,
{self.table1}.col_numeric,{self.table2}.col_money
from {self.table1} inner join {self.table2}
on {self.table1}.col_int = {self.table2}.col_int
where {self.table1}.col_time=''19:41:20''
order by {self.table1}.col_int desc limit 20;');
select * from {self.index_advise}('select {self.table1}.col_int,
{self.table1}.col_numeric,{self.table2}.col_money
from {self.table1} inner join {self.table2}
on {self.table1}.col_int = {self.table2}.col_int
where {self.table1}.col_int>20000
order by {self.table1}.col_int desc limit 20;');''')
self.logger.info(index_advise)
str1 = f'{self.table1} | col_int'
str2 = f'{self.table2} | col_int'
self.assertEqual(index_advise.count(str1), 2, "执行失败" + step)
self.assertEqual(index_advise.count(str2), 2, "执行失败" + step)
step = 'step9:使用where条件,比较运算符,left join连接,order by排序,' \
'逻辑运算符与模糊查询like进行查询索引推荐 ' \
'expect:返回推荐的索引列'
self.logger.info(step)
index_advise = self.primary_sh.execut_db_sql(f'''
select * from {self.index_advise}('select {self.table1}.col_int,
{self.table1}.col_numeric,{self.table2}.col_money
from {self.table1} left join {self.table2}
on {self.table1}.col_int = {self.table2}.col_int
where {self.table1}.col_text like ''V_text_2999%''
order by {self.table1}.col_int desc limit 20;');
select * from {self.index_advise}('select {self.table1}.col_int,
{self.table1}.col_numeric,{self.table2}.col_money
from {self.table1} left join {self.table2}
on {self.table1}.col_int = {self.table2}.col_int
where {self.table2}.col_dec like ''%.88''
order by {self.table1}.col_int desc limit 20;');''')
self.logger.info(index_advise)
str1 = f'{self.table1} | col_int'
str2 = f'{self.table2} | col_int'
self.assertEqual(index_advise.count(str1), 2, "执行失败" + step)
self.assertEqual(index_advise.count(str2), 2, "执行失败" + step)
def tearDown(self):
step = 'step10:清理环境 expect:清理环境成功'
self.logger.info(step)
de_table = self.primary_sh.execut_db_sql(f'''
drop table {self.table1};
drop table {self.table2};''')
self.logger.info(de_table)
de_procedure = self.primary_sh.execut_db_sql(f'''
drop procedure {self.procedure1};
drop procedure {self.procedure2};''')
self.logger.info(de_procedure)
para = self.common.show_param(
'enable_default_ustore_table')
self.logger.info(para)
init_param = self.init_param
if init_param not in para:
modify_para = self.primary_sh.execute_gsguc("reload",
f'''{self.Con.
GSGUC_SUCCESS_MSG}''',
f"enable_default_"
f"ustore_"
f"table="
f"{init_param}",
single=True)
self.logger.info(modify_para)
self.assertTrue(modify_para, '执行失败' + step)
self.assertTrue(de_table.count('DROP TABLE') == 2
and de_procedure.count('DROP PROCEDURE') == 2,
"执行失败" + step)
self.logger.info(f'-----{os.path.basename(__file__)} end-----')
| 41.577206
| 84
| 0.568927
|
ef31b882ed3d467d88a2f39dad29ea856f02d1f4
| 708
|
py
|
Python
|
weather/managers/measure_manager.py
|
Arthuchaut/weather-monitor
|
736dfcd51c5c14a923e410a374dc08121eb0811c
|
[
"MIT"
] | null | null | null |
weather/managers/measure_manager.py
|
Arthuchaut/weather-monitor
|
736dfcd51c5c14a923e410a374dc08121eb0811c
|
[
"MIT"
] | null | null | null |
weather/managers/measure_manager.py
|
Arthuchaut/weather-monitor
|
736dfcd51c5c14a923e410a374dc08121eb0811c
|
[
"MIT"
] | null | null | null |
from django.db import models
class MeasureManager(models.Manager):
'''The measure management class.'''
def latest_measure_num(self) -> int:
'''Returns the latest measure_num.
Returns:
int: The latest measure num. None if no measure exists yet.
'''
measure: dict[str, int] = self.aggregate(models.Max('measure_num'))
return measure['measure_num__max']
def next_measure_num(self) -> int:
'''Returns the next measure_num.
Returns:
int: The next measure num. 1 if no measure is registered yet.
'''
measure_num: int = self.latest_measure_num()
return measure_num + 1 if measure_num else 1
| 27.230769
| 75
| 0.628531
|
848b9d1cf5c2f1548c3fb6d7b7abbd0ee97c0edc
| 1,821
|
py
|
Python
|
tests/optimization_test.py
|
Paethon/pytorch-pretrained-BERT
|
2152bfeae82439600dc5b5deab057a3c4331c62d
|
[
"Apache-2.0"
] | 244
|
2019-09-06T07:53:57.000Z
|
2022-03-28T19:32:15.000Z
|
tests/optimization_test.py
|
Paethon/pytorch-pretrained-BERT
|
2152bfeae82439600dc5b5deab057a3c4331c62d
|
[
"Apache-2.0"
] | 13
|
2019-10-19T06:02:16.000Z
|
2022-03-10T12:46:08.000Z
|
tests/optimization_test.py
|
Paethon/pytorch-pretrained-BERT
|
2152bfeae82439600dc5b5deab057a3c4331c62d
|
[
"Apache-2.0"
] | 61
|
2019-09-14T07:06:57.000Z
|
2022-03-16T07:02:52.000Z
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
import torch
from pytorch_pretrained_bert import BertAdam
class OptimizationTest(unittest.TestCase):
def assertListAlmostEqual(self, list1, list2, tol):
self.assertEqual(len(list1), len(list2))
for a, b in zip(list1, list2):
self.assertAlmostEqual(a, b, delta=tol)
def test_adam(self):
w = torch.tensor([0.1, -0.2, -0.1], requires_grad=True)
target = torch.tensor([0.4, 0.2, -0.5])
criterion = torch.nn.MSELoss()
# No warmup, constant schedule, no gradient clipping
optimizer = BertAdam(params=[w], lr=2e-1,
weight_decay=0.0,
max_grad_norm=-1)
for _ in range(100):
loss = criterion(w, target)
loss.backward()
optimizer.step()
w.grad.detach_() # No zero_grad() function on simple tensors. we do it ourselves.
w.grad.zero_()
self.assertListAlmostEqual(w.tolist(), [0.4, 0.2, -0.5], tol=1e-2)
if __name__ == "__main__":
unittest.main()
| 35.705882
| 93
| 0.654585
|
ec9cd194207764029fc656705bde74f82ce83cb4
| 13,125
|
py
|
Python
|
trainers/DepthTrainer.py
|
anglixjtu/MSG_CHN_WACV20
|
6910894cf3caed2ffde27586f96b132b0c1d1a98
|
[
"Apache-2.0"
] | 61
|
2020-06-16T07:20:23.000Z
|
2022-02-14T22:33:13.000Z
|
trainers/DepthTrainer.py
|
anglixjtu/MSG_CHN_WACV20
|
6910894cf3caed2ffde27586f96b132b0c1d1a98
|
[
"Apache-2.0"
] | 13
|
2020-06-19T02:31:18.000Z
|
2022-01-11T12:42:21.000Z
|
trainers/DepthTrainer.py
|
anglixjtu/MSG_CHN_WACV20
|
6910894cf3caed2ffde27586f96b132b0c1d1a98
|
[
"Apache-2.0"
] | 10
|
2020-07-11T04:08:59.000Z
|
2021-11-15T13:25:31.000Z
|
"""
This script is modified from the work of Abdelrahman Eldesokey.
Find more details from https://github.com/abdo-eldesokey/nconv
"""
########################################
__author__ = "Abdelrahman Eldesokey"
__license__ = "GNU GPLv3"
__version__ = "0.1"
__maintainer__ = "Abdelrahman Eldesokey"
__email__ = "abdo.eldesokey@gmail.com"
########################################
from trainers.trainer import Trainer # from CVLPyDL repo
import torch
import matplotlib.pyplot as plt
import os.path
from utils.AverageMeter import AverageMeter
from utils.saveTensorToImage import *
from utils.ErrorMetrics import *
import time
from modules.losses import *
import cv2
err_metrics = ['MAE()', 'RMSE()','iMAE()', 'iRMSE()']
class KittiDepthTrainer(Trainer):
def __init__(self, net, params, optimizer, objective, lr_scheduler, dataloaders, dataset_sizes,
workspace_dir, sets=['train', 'val'], use_load_checkpoint=None, K= None):
# Call the constructor of the parent class (trainer)
super(KittiDepthTrainer, self).__init__(net, optimizer, lr_scheduler, objective, use_gpu=params['use_gpu'],
workspace_dir=workspace_dir)
self.lr_scheduler = lr_scheduler
self.dataloaders = dataloaders
self.dataset_sizes = dataset_sizes
self.use_load_checkpoint = use_load_checkpoint
self.params = params
self.save_chkpt_each = params['save_chkpt_each']
self.sets = sets
self.save_images = params['save_out_imgs']
self.load_rgb = params['load_rgb'] if 'load_rgb' in params else False
self.exp_name = params['exp_name']
for s in self.sets: self.stats[s + '_loss'] = []
####### Training Function #######
def train(self, max_epochs):
print('#############################\n### Experiment Parameters ###\n#############################')
for k, v in self.params.items(): print('{0:<22s} : {1:}'.format(k, v))
# Load last save checkpoint
if self.use_load_checkpoint != None:
if isinstance(self.use_load_checkpoint, int):
if self.use_load_checkpoint > 0:
print('=> Loading checkpoint {} ...'.format(self.use_load_checkpoint))
if self.load_checkpoint(self.use_load_checkpoint):
print('Checkpoint was loaded successfully!\n')
else:
print('Evaluating using initial parameters')
elif self.use_load_checkpoint == -1:
print('=> Loading last checkpoint ...')
if self.load_checkpoint():
print('Checkpoint was loaded successfully!\n')
else:
print('Evaluating using initial parameters')
elif isinstance(self.use_load_checkpoint, str):
print('loading checkpoint from : ' + self.use_load_checkpoint)
if self.load_checkpoint(self.use_load_checkpoint):
print('Checkpoint was loaded successfully!\n')
else:
print('Evaluating using initial parameters')
start_full_time = time.time()
print('Start the %d th epoch at ' % self.epoch)
print(time.strftime('%m.%d.%H:%M:%S', time.localtime(time.time())))
for epoch in range(self.epoch, max_epochs + 1): # range function returns max_epochs-1
start_epoch_time = time.time()
self.epoch = epoch
# Decay Learning Rate
self.lr_scheduler.step() # LR decay
print('\nTraining Epoch {}: (lr={}) '.format(epoch, self.optimizer.param_groups[0]['lr'])) # , end=' '
# Train the epoch
loss_meter = self.train_epoch()
# Add the average loss for this epoch to stats
for s in self.sets: self.stats[s + '_loss'].append(loss_meter[s].avg)
# Save checkpoint
if self.use_save_checkpoint and (self.epoch) % self.save_chkpt_each == 0:
self.save_checkpoint()
print('\n => Checkpoint was saved successfully!\n')
end_epoch_time = time.time()
print('End the %d th epoch at ' % self.epoch)
print(time.strftime('%m.%d.%H:%M:%S\n', time.localtime(time.time())))
epoch_duration = end_epoch_time - start_epoch_time
self.training_time += epoch_duration
if self.params['print_time_each_epoch']:
print(
'Have trained %.2f HRs, and %.2f HRs per epoch, [%s]\n' % (
self.training_time / 3600, epoch_duration / 3600, self.exp_name))
# Save the final model
torch.save(self.net, self.workspace_dir + '/final_model.pth')
print("Training [%s] Finished using %.2f HRs." % (self.exp_name, self.training_time / 3600))
return self.net
def train_epoch(self):
device = torch.device("cuda:" + str(self.params['gpu_id']) if torch.cuda.is_available() else "cpu")
loss_meter = {}
for s in self.sets: loss_meter[s] = AverageMeter()
for s in self.sets:
# Iterate over data.
for data in self.dataloaders[s]:
start_iter_time = time.time()
inputs_d, C, labels, item_idxs, inputs_rgb = data
inputs_d = inputs_d.to(device)
C = C.to(device)
labels = labels.to(device)
inputs_rgb = inputs_rgb.to(device)
outputs = self.net(inputs_d, inputs_rgb)
# Calculate loss for valid pixel in the ground truth
loss11 = self.objective(outputs[0], labels)
loss12 = self.objective(outputs[1], labels)
loss14 = self.objective(outputs[2], labels)
if self.epoch < 6:
loss = loss14 + loss12 + loss11
elif self.epoch < 11:
loss = 0.1 * loss14 + 0.1 * loss12 + loss11
else:
loss = loss11
# backward + optimize only if in training phase
loss.backward()
self.optimizer.step()
self.optimizer.zero_grad()
# statistics
loss_meter[s].update(loss11.item(), inputs_d.size(0))
end_iter_time = time.time()
iter_duration = end_iter_time - start_iter_time
if self.params['print_time_each_iter']:
print('finish the iteration in %.2f s.\n' % (
iter_duration))
print('Loss within the curt iter: {:.8f}\n'.format(loss_meter[s].avg))
print('[{}] Loss: {:.8f}'.format(s, loss_meter[s].avg))
torch.cuda.empty_cache()
return loss_meter
####### Evaluation Function #######
def evaluate(self):
print('< Evaluate mode ! >')
# Load last save checkpoint
if self.use_load_checkpoint != None:
if isinstance(self.use_load_checkpoint, int):
if self.use_load_checkpoint > 0:
print('=> Loading checkpoint {} ...'.format(self.use_load_checkpoint))
if self.load_checkpoint(self.use_load_checkpoint):
print('Checkpoint was loaded successfully!\n')
else:
print('Evaluating using initial parameters')
elif self.use_load_checkpoint == -1:
print('=> Loading last checkpoint ...')
if self.load_checkpoint():
print('Checkpoint was loaded successfully!\n')
else:
print('Evaluating using initial parameters')
elif isinstance(self.use_load_checkpoint, str):
print('loading checkpoint from : ' + self.use_load_checkpoint)
if self.load_checkpoint(self.use_load_checkpoint):
print('Checkpoint was loaded successfully!\n')
else:
print('Evaluating using initial parameters')
self.net.train(False)
# AverageMeters for Loss
loss_meter = {}
for s in self.sets: loss_meter[s] = AverageMeter()
# AverageMeters for error metrics
err = {}
for m in err_metrics: err[m] = AverageMeter()
# AverageMeters for time
times = AverageMeter()
device = torch.device("cuda:" + str(self.params['gpu_id']) if torch.cuda.is_available() else "cpu")
with torch.no_grad():
for s in self.sets:
print('Evaluating on [{}] set, Epoch [{}] ! \n'.format(s, str(self.epoch - 1)))
# Iterate over data.
Start_time = time.time()
for data in self.dataloaders[s]:
torch.cuda.synchronize()
start_time = time.time()
inputs_d, C, labels, item_idxs, inputs_rgb = data
inputs_d = inputs_d.to(device)
C = C.to(device)
labels = labels.to(device)
inputs_rgb = inputs_rgb.to(device)
outputs = self.net(inputs_d, inputs_rgb)
if len(outputs) > 1:
outputs = outputs[0]
torch.cuda.synchronize()
duration = time.time() - start_time
times.update(duration / inputs_d.size(0), inputs_d.size(0))
if s == 'selval' or s == 'val' or s == 'test':
# Calculate loss for valid pixel in the ground truth
loss = self.objective(outputs, labels, self.epoch)
# statistics
loss_meter[s].update(loss.item(), inputs_d.size(0))
# Convert data to depth in meters before error metrics
outputs[outputs == 0] = -1
if not self.load_rgb:
outputs[outputs == outputs[0, 0, 0, 0]] = -1
labels[labels == 0] = -1
if self.params['invert_depth']:
outputs = 1 / outputs
labels = 1 / labels
outputs[outputs == -1] = 0
labels[labels == -1] = 0
outputs *= self.params['data_normalize_factor'] / 256
labels *= self.params['data_normalize_factor'] / 256
# Calculate error metrics
for m in err_metrics:
if m.find('Delta') >= 0:
fn = globals()['Deltas']()
error = fn(outputs, labels)
err['Delta1'].update(error[0], inputs_d.size(0))
err['Delta2'].update(error[1], inputs_d.size(0))
err['Delta3'].update(error[2], inputs_d.size(0))
break
else:
fn = eval(m) # globals()[m]()
error = fn(outputs, labels)
err[m].update(error.item(), inputs_d.size(0))
# Save output images (optional)
if s in ['test']:
outputs = outputs.data
outputs *= 256
saveTensorToImage(outputs, item_idxs, os.path.join(self.workspace_dir,
s + '_output_' + 'epoch_' + str(
self.epoch)))
average_time = (time.time() - Start_time) / len(self.dataloaders[s].dataset)
print('Evaluation results on [{}]:\n============================='.format(s))
print('[{}]: {:.8f}'.format('Loss', loss_meter[s].avg))
for m in err_metrics: print('[{}]: {:.8f}'.format(m, err[m].avg))
print('[{}]: {:.4f}'.format('Time', times.avg))
print('[{}]: {:.4f}'.format('Time_av', average_time))
# Save evaluation metric to text file
fname = 'error_' + s + '_epoch_' + str(self.epoch - 1) + '.txt'
with open(os.path.join(self.workspace_dir, fname), 'w') as text_file:
text_file.write(
'Evaluation results on [{}], Epoch [{}]:\n==========================================\n'.format(
s, str(self.epoch - 1)))
text_file.write('[{}]: {:.8f}\n'.format('Loss', loss_meter[s].avg))
for m in err_metrics: text_file.write('[{}]: {:.8f}\n'.format(m, err[m].avg))
text_file.write('[{}]: {:.4f}\n'.format('Time', times.avg))
torch.cuda.empty_cache()
| 41.273585
| 119
| 0.507352
|
5dcc63ff27c2064e7afd9208da56ff7c0a63cb86
| 1,745
|
py
|
Python
|
discouple/broker.py
|
Gelbpunkt/discouple
|
d753fe88ca46ba29d8204fab775e662c1156e0bc
|
[
"MIT"
] | null | null | null |
discouple/broker.py
|
Gelbpunkt/discouple
|
d753fe88ca46ba29d8204fab775e662c1156e0bc
|
[
"MIT"
] | null | null | null |
discouple/broker.py
|
Gelbpunkt/discouple
|
d753fe88ca46ba29d8204fab775e662c1156e0bc
|
[
"MIT"
] | null | null | null |
import asyncio
import traceback
from abc import ABC
import aio_pika
import orjson
class Broker(ABC):
def __init__(self, callback=None):
async def _default_callback(*_, **__):
pass
self.callback = callback or _default_callback
async def connect(self, group, *args, **kwargs):
pass
async def subscribe(self, queue, *events):
pass
async def send(self, data):
pass
class AMQPBroker(Broker):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.connection = None
self.channel = None
self.exchange = None
async def connect(self, group, *args, **kwargs):
self.connection = await aio_pika.connect_robust(*args, **kwargs)
self.channel = await self.connection.channel()
self.exchange = await self.channel.declare_exchange(
group, type="direct", durable=True
)
async def subscribe(self, queue, *events):
queue = await self.channel.declare_queue(queue, auto_delete=not queue)
for event in events:
await queue.bind(self.exchange, event.upper())
async with queue.iterator() as queue_iter:
async for msg in queue_iter:
try:
async with msg.process():
data = orjson.loads(msg.body)
await self.callback(msg.routing_key.upper(), data)
except asyncio.CancelledError:
raise
except Exception:
traceback.print_exc()
async def send(self, data):
await self.exchange.publish(
aio_pika.Message(body=orjson.dumps(data)), routing_key="SEND"
)
| 27.698413
| 78
| 0.590831
|
de8f2b21183a21c17e9a0152eb080795619c7923
| 6,763
|
py
|
Python
|
algo.py
|
hashlib/Prokudin-Gorsky
|
4d95b36a1913d217d9d2e288667e824c4e59d5df
|
[
"MIT"
] | 1
|
2019-03-08T19:58:29.000Z
|
2019-03-08T19:58:29.000Z
|
algo.py
|
hashlib/Prokudin-Gorsky
|
4d95b36a1913d217d9d2e288667e824c4e59d5df
|
[
"MIT"
] | 1
|
2019-10-23T19:03:48.000Z
|
2019-10-23T19:03:48.000Z
|
algo.py
|
hashlib/Prokudin-Gorsky
|
4d95b36a1913d217d9d2e288667e824c4e59d5df
|
[
"MIT"
] | 1
|
2021-02-22T22:16:43.000Z
|
2021-02-22T22:16:43.000Z
|
import threading
import sys
import os
import skimage
import skimage.io
import skimage.color
import skimage.transform
import numpy
import warnings
from time import time
class ThreadWithReturnValue(threading.Thread):
def __init__(self, group=None, target=None, name=None,
args=(), kwargs={}, Verbose=None):
threading.Thread.__init__(self, group, target, name, args, kwargs)
self._return = None
def run(self):
if self._target is not None:
self._return = self._target(*self._args,
**self._kwargs)
def join(self, *args):
threading.Thread.join(self, *args)
return self._return
def get_the_best_shifts(channel_a, channel_b, row_shift_search_range,
col_shift_search_range):
"""The function brute forcing row and column shifts for channel_a and
finding such a shift that gives the maximal correlation coefficient."""
min_row_shift, max_row_shift = row_shift_search_range
min_col_shift, max_col_shift = col_shift_search_range
max_correlation = None
the_best_row_shift = min_row_shift - 1
the_best_col_shift = min_col_shift - 1
channel_a_shifted = numpy.roll(
channel_a, (min_row_shift - 1, min_col_shift - 1), axis=(0, 1))
direction = -1
"""Brute force search to find the best shifts"""
for row_shift in range(min_row_shift, max_row_shift + 1):
channel_a_shifted = numpy.roll(channel_a_shifted, 1, axis=0)
"""Entering rolling direction helps to speed up algorithm.
We making numpy roll only for one column every iteration
instead rolling the whole channel on min_col_shift."""
direction = -direction
if direction == -1:
min_col_shift, max_col_shift = max_col_shift, min_col_shift
for col_shift in range(min_col_shift, max_col_shift + direction,
direction):
channel_a_shifted = numpy.roll(
channel_a_shifted, direction, axis=1)
cur_correlation = (channel_a_shifted * channel_b).sum()
if max_correlation is None or cur_correlation > max_correlation:
max_correlation = cur_correlation
the_best_row_shift = row_shift
the_best_col_shift = col_shift
if direction == -1:
min_col_shift, max_col_shift = max_col_shift, min_col_shift
return (the_best_row_shift, the_best_col_shift)
def pyramid_shifts_search(channel_a, channel_b):
"""Searching the best shift for channel_a to rich the maximal
correlation coefficient with channel_b.
The function uses image pyramid to solve the problem."""
"""Setting image pyramid's depth"""
depth = 3
if channel_a.shape[0] > 1000 and channel_a.shape[1] > 1000:
depth = 5
"""Creating image pyramids"""
channel_a_pyramid = tuple(skimage.transform.pyramid_gaussian(
channel_a, max_layer=depth, downscale=2, multichannel=False))
channel_b_pyramid = tuple(skimage.transform.pyramid_gaussian(
channel_b, max_layer=depth, downscale=2, multichannel=False))
row_shift_search_range = (-7, 7)
col_shift_search_range = (-7, 7)
"""Calculating the best shifts from the smallest to the largest image"""
for cur_a, cur_b in reversed(tuple(zip(channel_a_pyramid,
channel_b_pyramid))):
the_best_shifts = get_the_best_shifts(cur_a, cur_b,
row_shift_search_range,
col_shift_search_range)
"""Transition to larger image"""
row_shift_search_range = (the_best_shifts[0] * 2 - 2,
the_best_shifts[0] * 2 + 2)
col_shift_search_range = (the_best_shifts[1] * 2 - 2,
the_best_shifts[1] * 2 + 2)
return the_best_shifts
def main(path):
img = skimage.img_as_float(skimage.io.imread(path))
img_r, img_c = img.shape[:2]
img_r -= img_r % 3
cut = 0.1
channel_row_cut = int(img_r // 3 * cut)
channel_col_cut = int(img_c * cut)
channel_size = img_r // 3
"""Splitting image into 3 channels (Blue, Green, Red) and getting rid
of borders."""
b = img[channel_row_cut: channel_size - channel_row_cut,
channel_col_cut: -channel_col_cut]
g = img[channel_size + channel_row_cut: 2 * channel_size - channel_row_cut,
channel_col_cut: -channel_col_cut]
r = img[2 * channel_size + channel_row_cut: img_r - channel_row_cut,
channel_col_cut: -channel_col_cut]
"""Setting up two threads to calculate the best shifts using image
pyramid"""
find_the_best_b_shifts_thread = ThreadWithReturnValue(
target=pyramid_shifts_search, args=(b, g))
find_the_best_r_shifts_thread = ThreadWithReturnValue(
target=pyramid_shifts_search, args=(r, g))
find_the_best_b_shifts_thread.start()
find_the_best_r_shifts_thread.start()
b_shifts = find_the_best_b_shifts_thread.join()
r_shifts = find_the_best_r_shifts_thread.join()
b = numpy.roll(b, b_shifts, axis=(0, 1))
r = numpy.roll(r, r_shifts, axis=(0, 1))
"""Calculating final image size"""
total_cut = (max(abs(b_shifts[0]), abs(r_shifts[0])), max(
abs(b_shifts[1]), abs(r_shifts[1])))
"""Substraction in slices needs for cases
when total_cut[0] or total_cut[1] == 0"""
b = b[total_cut[0]: b.shape[0] - total_cut[0],
total_cut[1]: b.shape[1] - total_cut[1]]
g = g[total_cut[0]: g.shape[0] - total_cut[0],
total_cut[1]: g.shape[1] - total_cut[1]]
r = r[total_cut[0]: r.shape[0] - total_cut[0],
total_cut[1]: r.shape[1] - total_cut[1]]
"""Saving final image"""
with warnings.catch_warnings():
warnings.simplefilter("ignore")
skimage.io.imsave(
"result/" + path[path.rfind('/') + 1:],
skimage.img_as_uint(numpy.dstack((r, g, b))))
if __name__ == "__main__":
if "result" in os.listdir("./"):
already_colored = os.listdir("./result")
else:
os.mkdir("./result")
already_colored = []
for path in sys.argv[1:]:
try:
print(f"Started to work with {path}!", end=' ', flush=True)
if path[path.rfind('/') + 1:] in already_colored:
print(f"Skipping {path}, because already colored")
continue
start_time = time()
main(path)
print(f"Time spent: {round(time() - start_time, 3)} sec.",
flush=True)
except FileNotFoundError:
print(f'File with path "{path}" not found!')
| 34.860825
| 79
| 0.630046
|
e1ea871e9cc01bace108f0d7f3a662f14fb05155
| 28,531
|
py
|
Python
|
jsonschema/validators.py
|
e2forks/jsonschema
|
055c7f6e70d2a463a8fc9a9bc50e4cb98900a5de
|
[
"MIT"
] | null | null | null |
jsonschema/validators.py
|
e2forks/jsonschema
|
055c7f6e70d2a463a8fc9a9bc50e4cb98900a5de
|
[
"MIT"
] | null | null | null |
jsonschema/validators.py
|
e2forks/jsonschema
|
055c7f6e70d2a463a8fc9a9bc50e4cb98900a5de
|
[
"MIT"
] | 1
|
2019-08-29T00:57:45.000Z
|
2019-08-29T00:57:45.000Z
|
from __future__ import division
from warnings import warn
import contextlib
import json
import numbers
from six import add_metaclass
from jsonschema import (
_legacy_validators,
_types,
_utils,
_validators,
exceptions,
)
from jsonschema.compat import (
Sequence,
int_types,
iteritems,
lru_cache,
str_types,
unquote,
urldefrag,
urljoin,
urlopen,
urlsplit,
)
# Sigh. https://gitlab.com/pycqa/flake8/issues/280
# https://github.com/pyga/ebb-lint/issues/7
# Imported for backwards compatibility.
from jsonschema.exceptions import ErrorTree
ErrorTree
class _DontDoThat(Exception):
"""
Raised when a Validators with non-default type checker is misused.
Asking one for DEFAULT_TYPES doesn't make sense, since type checkers exist
for the unrepresentable cases where DEFAULT_TYPES can't represent the type
relationship.
"""
def __str__(self):
return "DEFAULT_TYPES cannot be used on Validators using TypeCheckers"
validators = {}
meta_schemas = _utils.URIDict()
def _generate_legacy_type_checks(types=()):
"""
Generate newer-style type checks out of JSON-type-name-to-type mappings.
Arguments:
types (dict):
A mapping of type names to their Python types
Returns:
A dictionary of definitions to pass to `TypeChecker`
"""
types = dict(types)
def gen_type_check(pytypes):
pytypes = _utils.flatten(pytypes)
def type_check(checker, instance):
if isinstance(instance, bool):
if bool not in pytypes:
return False
return isinstance(instance, pytypes)
return type_check
definitions = {}
for typename, pytypes in iteritems(types):
definitions[typename] = gen_type_check(pytypes)
return definitions
_DEPRECATED_DEFAULT_TYPES = {
u"array": list,
u"boolean": bool,
u"integer": int_types,
u"null": type(None),
u"number": numbers.Number,
u"object": dict,
u"string": str_types,
}
_TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES = _types.TypeChecker(
type_checkers=_generate_legacy_type_checks(_DEPRECATED_DEFAULT_TYPES),
)
def validates(version):
"""
Register the decorated validator for a ``version`` of the specification.
Registered validators and their meta schemas will be considered when
parsing ``$schema`` properties' URIs.
Arguments:
version (str):
An identifier to use as the version's name
Returns:
collections.Callable:
a class decorator to decorate the validator with the version
"""
def _validates(cls):
validators[version] = cls
meta_schema_id = cls.ID_OF(cls.META_SCHEMA)
if meta_schema_id:
meta_schemas[meta_schema_id] = cls
return cls
return _validates
def _DEFAULT_TYPES(self):
if self._CREATED_WITH_DEFAULT_TYPES is None:
raise _DontDoThat()
warn(
(
"The DEFAULT_TYPES attribute is deprecated. "
"See the type checker attached to this validator instead."
),
DeprecationWarning,
stacklevel=2,
)
return self._DEFAULT_TYPES
class _DefaultTypesDeprecatingMetaClass(type):
DEFAULT_TYPES = property(_DEFAULT_TYPES)
def _id_of(schema):
if schema is True or schema is False:
return u""
return schema.get(u"$id", u"")
def create(
meta_schema,
validators=(),
version=None,
default_types=None,
type_checker=None,
id_of=_id_of,
):
"""
Create a new validator class.
Arguments:
meta_schema (collections.Mapping):
the meta schema for the new validator class
validators (collections.Mapping):
a mapping from names to callables, where each callable will
validate the schema property with the given name.
Each callable should take 4 arguments:
1. a validator instance,
2. the value of the property being validated within the
instance
3. the instance
4. the schema
version (str):
an identifier for the version that this validator class will
validate. If provided, the returned validator class will have its
``__name__`` set to include the version, and also will have
`jsonschema.validators.validates` automatically called for the
given version.
type_checker (jsonschema.TypeChecker):
a type checker, used when applying the :validator:`type` validator.
If unprovided, a `jsonschema.TypeChecker` will be created with
a set of default types typical of JSON Schema drafts.
default_types (collections.Mapping):
.. deprecated:: 3.0.0
Please use the type_checker argument instead.
If set, it provides mappings of JSON types to Python types that
will be converted to functions and redefined in this object's
`jsonschema.TypeChecker`.
id_of (collections.Callable):
A function that given a schema, returns its ID.
Returns:
a new `jsonschema.IValidator` class
"""
if default_types is not None:
if type_checker is not None:
raise TypeError(
"Do not specify default_types when providing a type checker.",
)
_created_with_default_types = True
warn(
(
"The default_types argument is deprecated. "
"Use the type_checker argument instead."
),
DeprecationWarning,
stacklevel=2,
)
type_checker = _types.TypeChecker(
type_checkers=_generate_legacy_type_checks(default_types),
)
else:
default_types = _DEPRECATED_DEFAULT_TYPES
if type_checker is None:
_created_with_default_types = False
type_checker = _TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES
elif type_checker is _TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES:
_created_with_default_types = False
else:
_created_with_default_types = None
@add_metaclass(_DefaultTypesDeprecatingMetaClass)
class Validator(object):
VALIDATORS = dict(validators)
META_SCHEMA = dict(meta_schema)
TYPE_CHECKER = type_checker
ID_OF = staticmethod(id_of)
DEFAULT_TYPES = property(_DEFAULT_TYPES)
_DEFAULT_TYPES = dict(default_types)
_CREATED_WITH_DEFAULT_TYPES = _created_with_default_types
def __init__(
self,
schema,
types=(),
resolver=None,
format_checker=None,
):
if types:
warn(
(
"The types argument is deprecated. Provide "
"a type_checker to jsonschema.validators.extend "
"instead."
),
DeprecationWarning,
stacklevel=2,
)
self.TYPE_CHECKER = self.TYPE_CHECKER.redefine_many(
_generate_legacy_type_checks(types),
)
if resolver is None:
resolver = RefResolver.from_schema(schema, id_of=id_of)
self.resolver = resolver
self.format_checker = format_checker
self.schema = schema
@classmethod
def check_schema(cls, schema):
for error in cls(cls.META_SCHEMA).iter_errors(schema):
raise exceptions.SchemaError.create_from(error)
def iter_errors(self, instance, _schema=None):
if _schema is None:
_schema = self.schema
if _schema is True:
return
elif _schema is False:
yield exceptions.ValidationError(
"False schema does not allow %r" % (instance,),
validator=None,
validator_value=None,
instance=instance,
schema=_schema,
)
return
scope = id_of(_schema)
if scope:
self.resolver.push_scope(scope)
try:
ref = _schema.get(u"$ref")
if ref is not None:
validators = [(u"$ref", ref)]
else:
validators = iteritems(_schema)
for k, v in validators:
validator = self.VALIDATORS.get(k)
if validator is None:
continue
errors = validator(self, v, instance, _schema) or ()
for error in errors:
# set details if not already set by the called fn
error._set(
validator=k,
validator_value=v,
instance=instance,
schema=_schema,
)
if k != u"$ref":
error.schema_path.appendleft(k)
yield error
finally:
if scope:
self.resolver.pop_scope()
def descend(self, instance, schema, path=None, schema_path=None):
for error in self.iter_errors(instance, schema):
if path is not None:
error.path.appendleft(path)
if schema_path is not None:
error.schema_path.appendleft(schema_path)
yield error
def validate(self, *args, **kwargs):
for error in self.iter_errors(*args, **kwargs):
raise error
def is_type(self, instance, type):
try:
return self.TYPE_CHECKER.is_type(instance, type)
except exceptions.UndefinedTypeCheck:
raise exceptions.UnknownType(type, instance, self.schema)
def is_valid(self, instance, _schema=None):
error = next(self.iter_errors(instance, _schema), None)
return error is None
if version is not None:
Validator = validates(version)(Validator)
Validator.__name__ = version.title().replace(" ", "") + "Validator"
return Validator
def extend(validator, validators=(), version=None, type_checker=None):
"""
Create a new validator class by extending an existing one.
Arguments:
validator (jsonschema.IValidator):
an existing validator class
validators (collections.Mapping):
a mapping of new validator callables to extend with, whose
structure is as in `create`.
.. note::
Any validator callables with the same name as an existing one
will (silently) replace the old validator callable entirely,
effectively overriding any validation done in the "parent"
validator class.
If you wish to instead extend the behavior of a parent's
validator callable, delegate and call it directly in the new
validator function by retrieving it using
``OldValidator.VALIDATORS["validator_name"]``.
version (str):
a version for the new validator class
type_checker (jsonschema.TypeChecker):
a type checker, used when applying the :validator:`type` validator.
If unprovided, the type checker of the extended
`jsonschema.IValidator` will be carried along.`
Returns:
a new `jsonschema.IValidator` class extending the one provided
.. note:: Meta Schemas
The new validator class will have its parent's meta schema.
If you wish to change or extend the meta schema in the new
validator class, modify ``META_SCHEMA`` directly on the returned
class. Note that no implicit copying is done, so a copy should
likely be made before modifying it, in order to not affect the
old validator.
"""
all_validators = dict(validator.VALIDATORS)
all_validators.update(validators)
if type_checker is None:
type_checker = validator.TYPE_CHECKER
elif validator._CREATED_WITH_DEFAULT_TYPES:
raise TypeError(
"Cannot extend a validator created with default_types "
"with a type_checker. Update the validator to use a "
"type_checker when created."
)
return create(
meta_schema=validator.META_SCHEMA,
validators=all_validators,
version=version,
type_checker=type_checker,
id_of=validator.ID_OF,
)
Draft3Validator = create(
meta_schema=_utils.load_schema("draft3"),
validators={
u"$ref": _validators.ref,
u"additionalItems": _validators.additionalItems,
u"additionalProperties": _validators.additionalProperties,
u"dependencies": _legacy_validators.dependencies_draft3,
u"disallow": _legacy_validators.disallow_draft3,
u"divisibleBy": _validators.multipleOf,
u"enum": _validators.enum,
u"extends": _legacy_validators.extends_draft3,
u"format": _validators.format,
u"items": _legacy_validators.items_draft3_draft4,
u"maxItems": _validators.maxItems,
u"maxLength": _validators.maxLength,
u"maximum": _legacy_validators.maximum_draft3_draft4,
u"minItems": _validators.minItems,
u"minLength": _validators.minLength,
u"minimum": _legacy_validators.minimum_draft3_draft4,
u"pattern": _validators.pattern,
u"patternProperties": _validators.patternProperties,
u"properties": _legacy_validators.properties_draft3,
u"type": _legacy_validators.type_draft3,
u"uniqueItems": _validators.uniqueItems,
},
type_checker=_types.draft3_type_checker,
version="draft3",
id_of=lambda schema: schema.get(u"id", ""),
)
Draft4Validator = create(
meta_schema=_utils.load_schema("draft4"),
validators={
u"$ref": _validators.ref,
u"additionalItems": _validators.additionalItems,
u"additionalProperties": _validators.additionalProperties,
u"allOf": _validators.allOf,
u"anyOf": _validators.anyOf,
u"dependencies": _validators.dependencies,
u"enum": _validators.enum,
u"format": _validators.format,
u"items": _legacy_validators.items_draft3_draft4,
u"maxItems": _validators.maxItems,
u"maxLength": _validators.maxLength,
u"maxProperties": _validators.maxProperties,
u"maximum": _legacy_validators.maximum_draft3_draft4,
u"minItems": _validators.minItems,
u"minLength": _validators.minLength,
u"minProperties": _validators.minProperties,
u"minimum": _legacy_validators.minimum_draft3_draft4,
u"multipleOf": _validators.multipleOf,
u"not": _validators.not_,
u"oneOf": _validators.oneOf,
u"pattern": _validators.pattern,
u"patternProperties": _validators.patternProperties,
u"properties": _validators.properties,
u"required": _validators.required,
u"type": _validators.type,
u"uniqueItems": _validators.uniqueItems,
},
type_checker=_types.draft4_type_checker,
version="draft4",
id_of=lambda schema: schema.get(u"id", ""),
)
Draft6Validator = create(
meta_schema=_utils.load_schema("draft6"),
validators={
u"$ref": _validators.ref,
u"additionalItems": _validators.additionalItems,
u"additionalProperties": _validators.additionalProperties,
u"allOf": _validators.allOf,
u"anyOf": _validators.anyOf,
u"const": _validators.const,
u"contains": _validators.contains,
u"dependencies": _validators.dependencies,
u"enum": _validators.enum,
u"exclusiveMaximum": _validators.exclusiveMaximum,
u"exclusiveMinimum": _validators.exclusiveMinimum,
u"format": _validators.format,
u"items": _validators.items,
u"maxItems": _validators.maxItems,
u"maxLength": _validators.maxLength,
u"maxProperties": _validators.maxProperties,
u"maximum": _validators.maximum,
u"minItems": _validators.minItems,
u"minLength": _validators.minLength,
u"minProperties": _validators.minProperties,
u"minimum": _validators.minimum,
u"multipleOf": _validators.multipleOf,
u"not": _validators.not_,
u"oneOf": _validators.oneOf,
u"pattern": _validators.pattern,
u"patternProperties": _validators.patternProperties,
u"properties": _validators.properties,
u"propertyNames": _validators.propertyNames,
u"required": _validators.required,
u"type": _validators.type,
u"uniqueItems": _validators.uniqueItems,
},
type_checker=_types.draft6_type_checker,
version="draft6",
)
Draft7Validator = create(
meta_schema=_utils.load_schema("draft7"),
validators={
u"$ref": _validators.ref,
u"additionalItems": _validators.additionalItems,
u"additionalProperties": _validators.additionalProperties,
u"allOf": _validators.allOf,
u"anyOf": _validators.anyOf,
u"const": _validators.const,
u"contains": _validators.contains,
u"dependencies": _validators.dependencies,
u"enum": _validators.enum,
u"exclusiveMaximum": _validators.exclusiveMaximum,
u"exclusiveMinimum": _validators.exclusiveMinimum,
u"format": _validators.format,
u"if": _validators.if_,
u"items": _validators.items,
u"maxItems": _validators.maxItems,
u"maxLength": _validators.maxLength,
u"maxProperties": _validators.maxProperties,
u"maximum": _validators.maximum,
u"minItems": _validators.minItems,
u"minLength": _validators.minLength,
u"minProperties": _validators.minProperties,
u"minimum": _validators.minimum,
u"multipleOf": _validators.multipleOf,
u"oneOf": _validators.oneOf,
u"not": _validators.not_,
u"pattern": _validators.pattern,
u"patternProperties": _validators.patternProperties,
u"properties": _validators.properties,
u"propertyNames": _validators.propertyNames,
u"required": _validators.required,
u"type": _validators.type,
u"uniqueItems": _validators.uniqueItems,
},
type_checker=_types.draft7_type_checker,
version="draft7",
)
_LATEST_VERSION = Draft7Validator
class RefResolver(object):
"""
Resolve JSON References.
Arguments:
base_uri (str):
The URI of the referring document
referrer:
The actual referring document
store (dict):
A mapping from URIs to documents to cache
cache_remote (bool):
Whether remote refs should be cached after first resolution
handlers (dict):
A mapping from URI schemes to functions that should be used
to retrieve them
urljoin_cache (:func:`functools.lru_cache`):
A cache that will be used for caching the results of joining
the resolution scope to subscopes.
remote_cache (:func:`functools.lru_cache`):
A cache that will be used for caching the results of
resolved remote URLs.
Attributes:
cache_remote (bool):
Whether remote refs should be cached after first resolution
"""
def __init__(
self,
base_uri,
referrer,
store=(),
cache_remote=True,
handlers=(),
urljoin_cache=None,
remote_cache=None,
):
if urljoin_cache is None:
urljoin_cache = lru_cache(1024)(urljoin)
if remote_cache is None:
remote_cache = lru_cache(1024)(self.resolve_from_url)
self.referrer = referrer
self.cache_remote = cache_remote
self.handlers = dict(handlers)
self._scopes_stack = [base_uri]
self.store = _utils.URIDict(
(id, validator.META_SCHEMA)
for id, validator in iteritems(meta_schemas)
)
self.store.update(store)
self.store[base_uri] = referrer
self._urljoin_cache = urljoin_cache
self._remote_cache = remote_cache
@classmethod
def from_schema(cls, schema, id_of=_id_of, *args, **kwargs):
"""
Construct a resolver from a JSON schema object.
Arguments:
schema:
the referring schema
Returns:
`RefResolver`
"""
return cls(base_uri=id_of(schema), referrer=schema, *args, **kwargs)
def push_scope(self, scope):
self._scopes_stack.append(
self._urljoin_cache(self.resolution_scope, scope),
)
def pop_scope(self):
try:
self._scopes_stack.pop()
except IndexError:
raise exceptions.RefResolutionError(
"Failed to pop the scope from an empty stack. "
"`pop_scope()` should only be called once for every "
"`push_scope()`"
)
@property
def resolution_scope(self):
return self._scopes_stack[-1]
@property
def base_uri(self):
uri, _ = urldefrag(self.resolution_scope)
return uri
@contextlib.contextmanager
def in_scope(self, scope):
self.push_scope(scope)
try:
yield
finally:
self.pop_scope()
@contextlib.contextmanager
def resolving(self, ref):
"""
Resolve the given ``ref`` and enter its resolution scope.
Exits the scope on exit of this context manager.
Arguments:
ref (str):
The reference to resolve
"""
url, resolved = self.resolve(ref)
self.push_scope(url)
try:
yield resolved
finally:
self.pop_scope()
def resolve(self, ref):
url = self._urljoin_cache(self.resolution_scope, ref)
return url, self._remote_cache(url)
def resolve_from_url(self, url):
url, fragment = urldefrag(url)
try:
document = self.store[url]
except KeyError:
try:
document = self.resolve_remote(url)
except Exception as exc:
raise exceptions.RefResolutionError(exc)
return self.resolve_fragment(document, fragment)
def resolve_fragment(self, document, fragment):
"""
Resolve a ``fragment`` within the referenced ``document``.
Arguments:
document:
The referent document
fragment (str):
a URI fragment to resolve within it
"""
fragment = fragment.lstrip(u"/")
parts = unquote(fragment).split(u"/") if fragment else []
for part in parts:
part = part.replace(u"~1", u"/").replace(u"~0", u"~")
if isinstance(document, Sequence):
# Array indexes should be turned into integers
try:
part = int(part)
except ValueError:
pass
try:
document = document[part]
except (TypeError, LookupError):
raise exceptions.RefResolutionError(
"Unresolvable JSON pointer: %r" % fragment
)
return document
def resolve_remote(self, uri):
"""
Resolve a remote ``uri``.
If called directly, does not check the store first, but after
retrieving the document at the specified URI it will be saved in
the store if :attr:`cache_remote` is True.
.. note::
If the requests_ library is present, ``jsonschema`` will use it to
request the remote ``uri``, so that the correct encoding is
detected and used.
If it isn't, or if the scheme of the ``uri`` is not ``http`` or
``https``, UTF-8 is assumed.
Arguments:
uri (str):
The URI to resolve
Returns:
The retrieved document
.. _requests: https://pypi.org/project/requests/
"""
try:
import requests
except ImportError:
requests = None
scheme = urlsplit(uri).scheme
if scheme in self.handlers:
result = self.handlers[scheme](uri)
elif scheme in [u"http", u"https"] and requests:
# Requests has support for detecting the correct encoding of
# json over http
result = requests.get(uri).json()
else:
# Otherwise, pass off to urllib and assume utf-8
with urlopen(uri) as url:
result = json.loads(url.read().decode("utf-8"))
if self.cache_remote:
self.store[uri] = result
return result
def validate(instance, schema, cls=None, *args, **kwargs):
"""
Validate an instance under the given schema.
>>> validate([2, 3, 4], {"maxItems": 2})
Traceback (most recent call last):
...
ValidationError: [2, 3, 4] is too long
:func:`validate` will first verify that the provided schema is itself
valid, since not doing so can lead to less obvious error messages and fail
in less obvious or consistent ways.
If you know you have a valid schema already, especially if you
intend to validate multiple instances with the same schema, you
likely would prefer using the `IValidator.validate` method directly
on a specific validator (e.g. ``Draft7Validator.validate``).
Arguments:
instance:
The instance to validate
schema:
The schema to validate with
cls (IValidator):
The class that will be used to validate the instance.
If the ``cls`` argument is not provided, two things will happen in
accordance with the specification. First, if the schema has a
:validator:`$schema` property containing a known meta-schema [#]_ then the
proper validator will be used. The specification recommends that all
schemas contain :validator:`$schema` properties for this reason. If no
:validator:`$schema` property is found, the default validator class is
the latest released draft.
Any other provided positional and keyword arguments will be passed on when
instantiating the ``cls``.
Raises:
`jsonschema.exceptions.ValidationError` if the instance
is invalid
`jsonschema.exceptions.SchemaError` if the schema itself
is invalid
.. rubric:: Footnotes
.. [#] known by a validator registered with
`jsonschema.validators.validates`
"""
if cls is None:
cls = validator_for(schema)
cls.check_schema(schema)
validator = cls(schema, *args, **kwargs)
error = exceptions.best_match(validator.iter_errors(instance))
if error is not None:
raise error
def validator_for(schema, default=_LATEST_VERSION):
"""
Retrieve the validator class appropriate for validating the given schema.
Uses the :validator:`$schema` property that should be present in the given
schema to look up the appropriate validator class.
Arguments:
schema (collections.Mapping or bool):
the schema to look at
default:
the default to return if the appropriate validator class cannot be
determined.
If unprovided, the default is to return
the latest supported draft.
"""
if schema is True or schema is False or u"$schema" not in schema:
return default
if schema[u"$schema"] not in meta_schemas:
warn(
(
"The metaschema specified by $schema was not found. "
"Using the latest draft to validate, but this will raise "
"an error in the future."
),
DeprecationWarning,
stacklevel=2,
)
return meta_schemas.get(schema[u"$schema"], _LATEST_VERSION)
| 30.416844
| 79
| 0.609898
|
c24ed9aeb948611bc0cf3dca59c1303d9cd6002d
| 161
|
py
|
Python
|
newsapi/__init__.py
|
TomFaulkner/newsapi-python
|
457197d3e2f82bf650345cec5238a9a89f783448
|
[
"MIT"
] | null | null | null |
newsapi/__init__.py
|
TomFaulkner/newsapi-python
|
457197d3e2f82bf650345cec5238a9a89f783448
|
[
"MIT"
] | null | null | null |
newsapi/__init__.py
|
TomFaulkner/newsapi-python
|
457197d3e2f82bf650345cec5238a9a89f783448
|
[
"MIT"
] | null | null | null |
from newsapi.client import NewsApi
from newsapi.constants import COUNTRIES, CATEGORIES, LANGUAGES
__all__ = ['NewsApi', 'COUNTRIES', 'CATEGORIES', 'LANGUAGES']
| 32.2
| 62
| 0.782609
|
0d247c47315a7c20d08d806f30b320394638b2e3
| 3,719
|
py
|
Python
|
data/datasets/dukemtmcreid.py
|
NIRVANALAN/magnifiernet_reid
|
2d2dfa331fe55d4d6e83be0b8f03f06a79adb3d1
|
[
"MIT"
] | 6
|
2020-05-25T15:36:06.000Z
|
2021-03-12T07:53:11.000Z
|
data/datasets/dukemtmcreid.py
|
NIRVANALAN/magnifiernet_reid
|
2d2dfa331fe55d4d6e83be0b8f03f06a79adb3d1
|
[
"MIT"
] | null | null | null |
data/datasets/dukemtmcreid.py
|
NIRVANALAN/magnifiernet_reid
|
2d2dfa331fe55d4d6e83be0b8f03f06a79adb3d1
|
[
"MIT"
] | 4
|
2020-10-04T01:00:12.000Z
|
2021-05-02T06:25:09.000Z
|
# encoding: utf-8
"""
@author: liaoxingyu
@contact: liaoxingyu2@jd.com
"""
import glob
import re
import urllib
import zipfile
import os.path as osp
from utils.iotools import mkdir_if_missing
from .bases import BaseImageDataset
class DukeMTMCreID(BaseImageDataset):
"""
DukeMTMC-reID
Reference:
1. Ristani et al. Performance Measures and a Data Set for Multi-Target, Multi-Camera Tracking. ECCVW 2016.
2. Zheng et al. Unlabeled Samples Generated by GAN Improve the Person Re-identification Baseline in vitro. ICCV 2017.
URL: https://github.com/layumi/DukeMTMC-reID_evaluation
Dataset statistics:
# identities: 1404 (train + query)
# images:16522 (train) + 2228 (query) + 17661 (gallery)
# cameras: 8
"""
__name__ = 'dukemtmc'
dataset_dir = 'dukemtmc-reid'
def __init__(self, root='/home/haoluo/data', verbose=True, **kwargs):
super(DukeMTMCreID, self).__init__()
self.dataset_dir = osp.join(root, self.dataset_dir)
self.dataset_url = 'http://vision.cs.duke.edu/DukeMTMC/data/misc/DukeMTMC-reID.zip'
self.train_dir = osp.join(self.dataset_dir, 'DukeMTMC-reID/bounding_box_train')
self.query_dir = osp.join(self.dataset_dir, 'DukeMTMC-reID/query')
self.gallery_dir = osp.join(self.dataset_dir, 'DukeMTMC-reID/bounding_box_test')
self._download_data()
self._check_before_run()
train = self._process_dir(self.train_dir, relabel=True)
query = self._process_dir(self.query_dir, relabel=False)
gallery = self._process_dir(self.gallery_dir, relabel=False)
if verbose:
print("=> DukeMTMC-reID loaded")
self.print_dataset_statistics(train, query, gallery)
self.train = train
self.query = query
self.gallery = gallery
self.num_train_pids, self.num_train_imgs, self.num_train_cams = self.get_imagedata_info(self.train)
self.num_query_pids, self.num_query_imgs, self.num_query_cams = self.get_imagedata_info(self.query)
self.num_gallery_pids, self.num_gallery_imgs, self.num_gallery_cams = self.get_imagedata_info(self.gallery)
def _download_data(self):
if osp.exists(self.dataset_dir):
print("This dataset has been downloaded.")
return
print("Creating directory {}".format(self.dataset_dir))
mkdir_if_missing(self.dataset_dir)
fpath = osp.join(self.dataset_dir, osp.basename(self.dataset_url))
print("Downloading DukeMTMC-reID dataset")
urllib.request.urlretrieve(self.dataset_url, fpath)
print("Extracting files")
zip_ref = zipfile.ZipFile(fpath, 'r')
zip_ref.extractall(self.dataset_dir)
zip_ref.close()
def _check_before_run(self):
"""Check if all files are available before going deeper"""
if not osp.exists(self.dataset_dir):
raise RuntimeError("'{}' is not available".format(self.dataset_dir))
if not osp.exists(self.train_dir):
raise RuntimeError("'{}' is not available".format(self.train_dir))
if not osp.exists(self.query_dir):
raise RuntimeError("'{}' is not available".format(self.query_dir))
if not osp.exists(self.gallery_dir):
raise RuntimeError("'{}' is not available".format(self.gallery_dir))
def _process_dir(self, dir_path, relabel=False):
img_paths = glob.glob(osp.join(dir_path, '*.jpg'))
pattern = re.compile(r'([-\d]+)_c(\d)')
pid_container = set()
for img_path in img_paths:
pid, _ = map(int, pattern.search(img_path).groups())
pid_container.add(pid)
pid2label = {pid: label for label, pid in enumerate(pid_container)}
label2pid = {label: pid for label, pid in enumerate(pid_container)}
dataset = []
for img_path in img_paths:
pid, camid = map(int, pattern.search(img_path).groups())
assert 1 <= camid <= 8
camid -= 1 # index starts from 0
if relabel: pid = pid2label[pid]
dataset.append((img_path, pid, camid))
return dataset, label2pid
| 33.809091
| 118
| 0.741597
|
79455086adf5afe58c64a3d5edb453f8ea09b87b
| 4,742
|
py
|
Python
|
pygluu/kubernetes/yamlparser.py
|
scottwedge/cloud-native-edition
|
75f714210ec564dcef68c7b6a8c936ec615d0540
|
[
"Apache-2.0"
] | null | null | null |
pygluu/kubernetes/yamlparser.py
|
scottwedge/cloud-native-edition
|
75f714210ec564dcef68c7b6a8c936ec615d0540
|
[
"Apache-2.0"
] | null | null | null |
pygluu/kubernetes/yamlparser.py
|
scottwedge/cloud-native-edition
|
75f714210ec564dcef68c7b6a8c936ec615d0540
|
[
"Apache-2.0"
] | null | null | null |
"""
License terms and conditions for Gluu Cloud Native Edition:
https://www.apache.org/licenses/LICENSE-2.0
"""
from pathlib import Path
import contextlib
import os
import json
import logging
from ruamel.yaml import YAML
from ruamel.yaml.comments import CommentedMap
from collections import OrderedDict, Mapping
import subprocess
import shlex
def update_settings_json_file(settings):
"""Write settings out to a json file
"""
with open(Path('./settings.json'), 'w+') as file:
json.dump(settings, file, indent=2)
def exec_cmd(cmd):
args = shlex.split(cmd)
popen = subprocess.Popen(args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = popen.communicate()
retcode = popen.returncode
if retcode != 0:
logger.error(str(stderr, "utf-8"))
logger.info(str(stdout, "utf-8"))
return stdout, stderr, retcode
def get_logger(name):
log_format = '%(asctime)s - %(name)8s - %(levelname)5s - %(message)s'
logging.basicConfig(level=logging.INFO,
format=log_format,
filename='setup.log',
filemode='w')
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(logging.Formatter(log_format))
logging.getLogger(name).addHandler(console)
return logging.getLogger(name)
logger = get_logger("gluu-yaml-parser ")
class Parser(dict):
def __init__(self, filename, check_value=None, check_value_name=None, check_key='kind'):
super().__init__()
self.filename = Path(filename)
self.yaml = YAML()
self.yaml.preserve_quotes = True
self.manifests_dict_list = []
self.modify_dict = dict
self.tmp_yaml_file = Path("./tmp.yaml")
if check_value:
if self.filename.exists():
with open(filename) as file:
manifests_dicts = self.yaml.load_all(file)
for manifest in manifests_dicts:
try:
if manifest[check_key] == check_value:
if check_value_name:
if manifest['metadata']['name'] == check_value_name:
self.modify_dict = manifest
else:
self.manifests_dict_list.append(manifest)
else:
self.modify_dict = manifest
else:
self.manifests_dict_list.append(manifest)
except KeyError:
# Key kind is not found so its the values.yaml for helm which only has one dict item
self.modify_dict = manifest
with open(self.tmp_yaml_file, 'w') as file:
self.yaml.dump(self.modify_dict, file)
with open(self.tmp_yaml_file) as f:
super(Parser, self).update(self.yaml.load(f) or {})
@property
def return_manifests_dict(self):
if self.filename.exists():
with open(self.filename) as file:
manifests_dicts = self.yaml.load_all(file)
for manifest in manifests_dicts:
self.manifests_dict_list.append(manifest)
return self.manifests_dict_list
def __setitem__(self, key, value):
super(Parser, self).__setitem__(key, value)
def dump_it(self):
d = self.analyze_ordered_dict_object(self)
final_manifest_dict_list = self.manifests_dict_list + [d]
with open(self.filename, "w+") as f:
self.yaml.dump_all(final_manifest_dict_list, f)
with contextlib.suppress(FileNotFoundError):
os.remove(self.tmp_yaml_file)
def analyze_ordered_dict_object(self, data):
if isinstance(data, OrderedDict) or isinstance(data, dict):
commented_map = CommentedMap()
for k, v in data.items():
commented_map[k] = self.analyze_ordered_dict_object(v)
return commented_map
return data
def __delitem__(self, key):
try:
super(Parser, self).__delitem__(key)
except KeyError as e:
logger.error(e)
def update(self, other=None, **kwargs):
if other is not None:
for k, v in other.items() if isinstance(other, Mapping) else other:
self[k] = v
for k, v in kwargs.items():
self[k] = v
super(Parser, self).update(self)
| 35.38806
| 112
| 0.570434
|
45cbf655577993b75f756dd74dd6d97358e665ac
| 9,491
|
py
|
Python
|
vmware_nsx/shell/admin/plugins/nsxv/resources/metadata.py
|
mail2nsrajesh/vmware-nsx
|
63154b510b9fd95c10fffae86bfc49073cafeb40
|
[
"Apache-2.0"
] | null | null | null |
vmware_nsx/shell/admin/plugins/nsxv/resources/metadata.py
|
mail2nsrajesh/vmware-nsx
|
63154b510b9fd95c10fffae86bfc49073cafeb40
|
[
"Apache-2.0"
] | null | null | null |
vmware_nsx/shell/admin/plugins/nsxv/resources/metadata.py
|
mail2nsrajesh/vmware-nsx
|
63154b510b9fd95c10fffae86bfc49073cafeb40
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 VMware, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
import hmac
from neutron.db import models_v2
from neutron_lib.callbacks import registry
from oslo_config import cfg
from oslo_log import log as logging
from vmware_nsx.common import config
from vmware_nsx.common import locking
from vmware_nsx.common import nsxv_constants
from vmware_nsx.db import nsxv_db
from vmware_nsx.plugins.nsx_v import availability_zones as nsx_az
from vmware_nsx.plugins.nsx_v import md_proxy
from vmware_nsx.plugins.nsx_v.vshield.common import constants as vcns_constants
from vmware_nsx.plugins.nsx_v.vshield import nsxv_loadbalancer as nsxv_lb
from vmware_nsx.shell.admin.plugins.common import constants
from vmware_nsx.shell.admin.plugins.common import formatters
from vmware_nsx.shell.admin.plugins.common import utils as admin_utils
from vmware_nsx.shell.admin.plugins.nsxv.resources import utils as utils
from vmware_nsx.shell import resources as shell
LOG = logging.getLogger(__name__)
nsxv = utils.get_nsxv_client()
@admin_utils.output_header
def nsx_redo_metadata_cfg(resource, event, trigger, **kwargs):
edgeapi = utils.NeutronDbClient()
config.register_nsxv_azs(cfg.CONF, cfg.CONF.nsxv.availability_zones)
conf_az = nsx_az.NsxVAvailabilityZones()
az_list = conf_az.list_availability_zones_objects()
for az in az_list:
if az.supports_metadata():
nsx_redo_metadata_cfg_for_az(az, edgeapi)
else:
LOG.info("Skipping availability zone: %s - no metadata "
"configuration", az.name)
def nsx_redo_metadata_cfg_for_az(az, edgeapi):
LOG.info("Updating MetaData for availability zone: %s", az.name)
# Get the list of internal networks for this AZ
db_net = nsxv_db.get_nsxv_internal_network(
edgeapi.context.session,
vcns_constants.InternalEdgePurposes.INTER_EDGE_PURPOSE,
az.name)
internal_net = None
internal_subnet = None
if db_net:
internal_net = db_net['network_id']
internal_subnet = edgeapi.context.session.query(
models_v2.Subnet).filter_by(
network_id=internal_net).first().get('id')
# Get the list of internal edges for this AZ
edge_list = nsxv_db.get_nsxv_internal_edges_by_purpose(
edgeapi.context.session,
vcns_constants.InternalEdgePurposes.INTER_EDGE_PURPOSE)
edge_az_list = [edge for edge in edge_list if
nsxv_db.get_router_availability_zone(
edgeapi.context.session, edge['router_id']) == az.name]
md_rtr_ids = [edge['router_id'] for edge in edge_az_list]
edge_internal_ips = []
for edge in edge_az_list:
edge_internal_port = edgeapi.context.session.query(
models_v2.Port).filter_by(network_id=internal_net,
device_id=edge['router_id']).first()
if edge_internal_port:
edge_internal_ip = edgeapi.context.session.query(
models_v2.IPAllocation).filter_by(
port_id=edge_internal_port['id']).first()
edge_internal_ips.append(edge_internal_ip['ip_address'])
if not internal_net or not internal_subnet or not edge_internal_ips:
LOG.error("Metadata infrastructure is missing or broken. "
"It is recommended to restart neutron service before "
"proceeding with configuration restoration")
return
router_bindings = nsxv_db.get_nsxv_router_bindings(
edgeapi.context.session,
filters={'edge_type': [nsxv_constants.SERVICE_EDGE],
'availability_zones': az.name})
edge_ids = list(set([binding['edge_id'] for binding in router_bindings
if (binding['router_id'] not in set(md_rtr_ids)
and not binding['router_id'].startswith(
vcns_constants.BACKUP_ROUTER_PREFIX)
and not binding['router_id'].startswith(
vcns_constants.PLR_EDGE_PREFIX))]))
for edge_id in edge_ids:
with locking.LockManager.get_lock(edge_id):
lb = nsxv_lb.NsxvLoadbalancer.get_loadbalancer(nsxv, edge_id)
virt = lb.virtual_servers.get(md_proxy.METADATA_VSE_NAME)
if virt:
pool = virt.default_pool
pool.members = {}
i = 0
s_port = cfg.CONF.nsxv.nova_metadata_port
for member_ip in edge_internal_ips:
i += 1
member = nsxv_lb.NsxvLBPoolMember(
name='Member-%d' % i,
ip_address=member_ip,
port=s_port,
monitor_port=s_port)
pool.add_member(member)
lb.submit_to_backend(nsxv, edge_id)
@admin_utils.output_header
def update_shared_secret(resource, event, trigger, **kwargs):
edgeapi = utils.NeutronDbClient()
edge_list = nsxv_db.get_nsxv_internal_edges_by_purpose(
edgeapi.context.session,
vcns_constants.InternalEdgePurposes.INTER_EDGE_PURPOSE)
md_rtr_ids = [edge['router_id'] for edge in edge_list]
router_bindings = nsxv_db.get_nsxv_router_bindings(
edgeapi.context.session,
filters={'edge_type': [nsxv_constants.SERVICE_EDGE]})
edge_ids = list(set([binding['edge_id'] for binding in router_bindings
if (binding['router_id'] not in set(md_rtr_ids)
and not binding['router_id'].startswith(
vcns_constants.BACKUP_ROUTER_PREFIX)
and not binding['router_id'].startswith(
vcns_constants.PLR_EDGE_PREFIX))]))
for edge_id in edge_ids:
with locking.LockManager.get_lock(edge_id):
lb = nsxv_lb.NsxvLoadbalancer.get_loadbalancer(nsxv, edge_id)
virt = lb.virtual_servers.get(md_proxy.METADATA_VSE_NAME)
if not virt:
LOG.error("Virtual server not found for edge: %s", edge_id)
continue
virt.del_app_rule('insert-auth')
if cfg.CONF.nsxv.metadata_shared_secret:
signature = hmac.new(cfg.CONF.nsxv.metadata_shared_secret,
edge_id,
hashlib.sha256).hexdigest()
sign = 'reqadd X-Metadata-Provider-Signature:' + signature
sign_app_rule = nsxv_lb.NsxvLBAppRule('insert-auth', sign)
virt.add_app_rule(sign_app_rule)
lb.submit_to_backend(nsxv, edge_id)
def _md_member_status(title, edge_ids):
for edge_id in edge_ids:
lb_stats = nsxv.get_loadbalancer_statistics(
edge_id)
pools_stats = lb_stats[1].get('pool', [])
members = []
for pool_stats in pools_stats:
if pool_stats['name'] == md_proxy.METADATA_POOL_NAME:
for member in pool_stats.get('member', []):
members.append({'member_ip': member['ipAddress'],
'member_status': member['status']})
LOG.info(formatters.output_formatter(
title % edge_id,
members, ['member_ip', 'member_status']))
@admin_utils.output_header
def get_metadata_status(resource, event, trigger, **kwargs):
if kwargs.get('property'):
properties = admin_utils.parse_multi_keyval_opt(kwargs['property'])
net_id = properties.get('network_id')
else:
net_id = None
edgeapi = utils.NeutronDbClient()
edge_list = nsxv_db.get_nsxv_internal_edges_by_purpose(
edgeapi.context.session,
vcns_constants.InternalEdgePurposes.INTER_EDGE_PURPOSE)
md_rtr_ids = [edge['router_id'] for edge in edge_list]
router_bindings = nsxv_db.get_nsxv_router_bindings(
edgeapi.context.session,
filters={'router_id': md_rtr_ids})
edge_ids = [b['edge_id'] for b in router_bindings]
_md_member_status('Metadata edge appliance: %s members', edge_ids)
if net_id:
as_provider_data = nsxv_db.get_edge_vnic_bindings_by_int_lswitch(
edgeapi.context.session, net_id)
providers = [asp['edge_id'] for asp in as_provider_data]
if providers:
LOG.info('Metadata providers for network %s', net_id)
_md_member_status('Edge %s', providers)
else:
LOG.info('No providers found for network %s', net_id)
registry.subscribe(nsx_redo_metadata_cfg,
constants.METADATA,
shell.Operations.NSX_UPDATE.value)
registry.subscribe(update_shared_secret,
constants.METADATA,
shell.Operations.NSX_UPDATE_SECRET.value)
registry.subscribe(get_metadata_status, constants.METADATA,
shell.Operations.STATUS.value)
| 41.445415
| 79
| 0.65462
|
76bad01a2df6ff66d58c6f883e0bf83a5e6b124b
| 12,456
|
py
|
Python
|
python/common/UtilImage.py
|
SystemCorps/CNN-2D-X-Ray-Catheter-Detection
|
6e4d6e3473c4032e7db4cc9f7f7897a80185fee6
|
[
"BSD-3-Clause"
] | 25
|
2017-10-02T09:42:42.000Z
|
2022-02-01T08:25:55.000Z
|
python/common/UtilImage.py
|
SystemCorps/CNN-2D-X-Ray-Catheter-Detection
|
6e4d6e3473c4032e7db4cc9f7f7897a80185fee6
|
[
"BSD-3-Clause"
] | 1
|
2019-09-25T09:01:23.000Z
|
2019-09-25T09:01:23.000Z
|
python/common/UtilImage.py
|
SystemCorps/CNN-2D-X-Ray-Catheter-Detection
|
6e4d6e3473c4032e7db4cc9f7f7897a80185fee6
|
[
"BSD-3-Clause"
] | 23
|
2017-09-05T03:13:57.000Z
|
2021-11-17T08:55:09.000Z
|
from __future__ import division
import os
import sys
import array
import struct
import io
import math
import numpy as np
import colorsys
import pydicom
from pydicom.dataset import Dataset, FileDataset
import pydicom.uid
import skimage as ski
import skimage.io
import skimage.transform
import skimage.draw
import skimage.morphology
import warnings
from Util import *
from File import *
NORMALIZE_NO = 0
NORMALIZE_SIMPLE = 1
NORMALIZE_CONTRAST_STRETCHING = 2
# NORMALIZE_ADAPTATIVE_EQUALIZATION = 3
# _image type should be np.float32
def NormalizeFrame(_image, _normalize = NORMALIZE_NO):
if _normalize == NORMALIZE_SIMPLE:
min = np.min(_image)
max = np.max(_image)
return (_image - min)/(max - min)
elif _normalize == NORMALIZE_CONTRAST_STRETCHING:
p2, p98 = np.percentile(_image, (2, 98))
return ski.exposure.rescale_intensity(_image, in_range=(p2, p98), out_range=(0,1))
# elif _normalize == NORMALIZE_ADAPTATIVE_EQUALIZATION
# return ski.exposure.equalize_adapthist(_image, clip_limit=0.03):
return _image
def GetMaxValue(_pixelSize):
if _pixelSize == 8:
return 255
elif _pixelSize == 16:
return 65535
elif _pixelSize == 10:
return 1023
else:
print("GetMaxValue():: WARNING NOT IMPLEMENTED _pixelSize = " + str(_pixelSize))
return 65535
def GetFloat32NormalizedFrame(_image, _pixelSize, _normalize = NORMALIZE_NO):
_image = _image.astype(np.float32)
if _normalize != NORMALIZE_NO:
return NormalizeFrame(_image, _normalize)
return _image/GetMaxValue(_pixelSize)
def ReadOnlyDicomInfo(_filename):
dcmInfo = pydicom.read_file(_filename, stop_before_pixels = True, defer_size = 16)
# print(dcmInfo.Columns)
# print(dcmInfo.Rows)
# print(dcmInfo.NumberOfFrames)
# print(dcmInfo.BitsStored)
return dcmInfo
def ReadDicomFrame(_filename, _frameId):
# print(_filename + " " + str(_frameId))
file = open(_filename, "rb") # TODO use OpenFile here?
dcmInfo = pydicom.read_file(file, stop_before_pixels = True, defer_size = 16)
if _frameId < 0 and _frameId >= dcmInfo.NumberOfFrames:
print("ReadDicomFrame():: ERROR _frameId should be inferior dcmInfo.NumberOfFrames")
# print(dcmInfo.BitsStored)
if dcmInfo.BitsStored == 16 or dcmInfo.BitsStored == 10:
pixelType = "H"
pixelSize = 2 # dcmInfo.BitsStored//8
elif dcmInfo.BitsStored == 8:
pixelType = "B"
pixelSize = 1
else:
print("ReadDicomFrame():: WARNING NOT IMPLEMENTED dcmInfo.BitsStored = " + str(dcmInfo.BitsStored))
sizeImageInByte = dcmInfo.Columns*dcmInfo.Rows*pixelSize
# print(sizeImageInByte)
# print(file.tell())
# skip the dicom tag (0x7fe0, 0x0010) 4 bytes,
# then the VR info if we have "explicit VR" (if not, nothing is there in "implicit VR") 4 bytes (if not 0 byte): (VR_OW = 0x574f for example)
# finally the length of the sequence 4 bytes
# u16 = struct.unpack('H', file.read(2))[0]
# print(hex(u16) + " " + str(u16))
# u16 = struct.unpack('H', file.read(2))[0]
# print(hex(u16) + " " + str(u16))
# if dcmInfo.is_implicit_VR == False:
# s32 = struct.unpack('i', file.read(4))[0]
# print(hex(s32) + " " + str(s32))
# s32 = struct.unpack('i', file.read(4))[0]
# print(hex(s32) + " " + str(s32))
if dcmInfo.is_implicit_VR == True:
file.seek(8, io.SEEK_CUR)
else:
file.seek(12, io.SEEK_CUR)
file.seek(_frameId*sizeImageInByte, io.SEEK_CUR)
package = file.read(sizeImageInByte)
# print(len(package))
# seems faster than...
image = array.array(pixelType)
if sys.version_info < (3,0):
image.fromstring(package) # DEPRECATED
else:
image.frombytes(package)
# ...this
# n = dcmInfo.Columns*dcmInfo.Rows
# image = struct.unpack(str(n)+pixelType, package)
# image = np.array(image).reshape(dcmInfo.Columns, dcmInfo.Rows)
# print(sizeImageInByte)
# print(np.array(image).shape)
# print(np.array(image).dtype)
image = np.array(image).reshape(dcmInfo.Rows, dcmInfo.Columns)
file.close() # TODO use CloseFile here?
return image, dcmInfo
def GetFloat32DicomFrame(_filename, _frameId, _normalize = NORMALIZE_NO):
image, dcmInfo = ReadDicomFrame(_filename, _frameId)
return GetFloat32NormalizedFrame(image, dcmInfo.BitsStored, _normalize)
# save a X-ray sequence into dicom format, _sequence is numpy array with the following shape (NumberOfFrames, Rows, Columns)
def SaveDicomSequence(_filename, _sequence):
file_meta = Dataset()
# file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.2' # CT Image Storage
file_meta.MediaStorageSOPClassUID = '1.2.3.4.5.1.4.1.1.2' # need valid UID here for real work
file_meta.MediaStorageSOPInstanceUID = "1.2.3" # need valid UID here for real work
file_meta.ImplementationClassUID = "1.2.3.4" # need valid UIDs here
# Create the FileDataset instance (initially no data elements, but file_meta supplied)
ds = FileDataset(_filename, {}, file_meta=file_meta, preamble=b"\0" * 128)
# Add the data elements -- not trying to set all required here. Check DICOM standard
# ds.PatientName = "Test^Firstname"
# ds.PatientID = "123456"
# Set the transfer syntax
ds.is_little_endian = True
ds.is_implicit_VR = True # implicit VR (0002,0010) TransferSyntaxUID: 1.2.840.10008.1.2
# ds.is_implicit_VR = False # explicit VR (0002,0010) TransferSyntaxUID: 1.2.840.10008.1.2.1
# Set creation date/time
# dt = datetime.datetime.now()
# ds.ContentDate = dt.strftime('%Y%m%d')
# timeStr = dt.strftime('%H%M%S.%f') # long format with micro seconds
# ds.ContentTime = timeStr
ds.SamplesPerPixel = 1
ds.PhotometricInterpretation = "MONOCHROME2"
ds.PixelRepresentation = 0
ds.HighBit = 15
ds.BitsStored = 16
ds.BitsAllocated = 16
if sys.version_info < (3,0):
ds.SmallestImagePixelValue = '\\x00\\x00'
ds.LargestImagePixelValue = '\\xff\\xff'
else:
ds.SmallestImagePixelValue = (0).to_bytes(2, byteorder='little')
ds.LargestImagePixelValue = (65535).to_bytes(2, byteorder='little')
ds.Columns = _sequence.shape[2]
ds.Rows = _sequence.shape[1]
ds.NumberOfFrames = _sequence.shape[0]
if _sequence.dtype != np.uint16:
print("warning _sequence.dtype != np.uint16")
_sequence = _sequence.astype(np.uint16)
ds.PixelData = _sequence.tostring()
ds.save_as(_filename)
# Write as a different transfer syntax
# ds.file_meta.TransferSyntaxUID = pydicom.uid.ExplicitVRBigEndian # XXX shouldn't need this but pydicom 0.9.5 bug not recognizing transfer syntax
# ds.is_little_endian = False
# ds.is_implicit_VR = False
# print("Writing test file as Big Endian Explicit VR", filename2)
# ds.save_as(filename2)
def LoadImage(_path):
return ski.io.imread(_path)
def SaveImage(_path, _buffer):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
ski.io.imsave(_path, _buffer)
# ski.io.imsave(_path, _buffer)
def StackImagesMultiChan(_imgs, _columns, _rows):
Xid = 2
Yid = 3
bigImage = np.zeros((_imgs.shape[1], _rows*_imgs.shape[Yid], _columns*_imgs.shape[Xid]), dtype=_imgs.dtype)
# for index, img in enumerate(_imgs):
for index in range(_columns*_rows):
if index >= len(_imgs):
break
i = int(index/_columns)
j = index%_columns
for chan in range(_imgs.shape[1]):
bigImage[chan, i*_imgs.shape[Yid]:(i+1)*_imgs.shape[Yid], j*_imgs.shape[Xid]:(j+1)*_imgs.shape[Xid]] = _imgs[index][chan][...]
return bigImage
def SaveSetImagesMultiChan(_path, _imgs, _columns, _rows):
image = StackImagesMultiChan(_imgs, _columns, _rows)
image = np.moveaxis(image, 0, -1)
image = image*255
image = image.astype(np.uint8)
SaveImage(_path, image)
def ConcatImagesAndSave(_imageNameList, _concatImageName, _sizeX, _sizeY, _columns, _rows):
imageList = np.zeros((len(_imageNameList), _sizeY, _sizeX, 3), dtype = np.uint8)
for i in range(len(_imageNameList)):
if IsFileExist(_imageNameList[i]) == True:
imageList[i][...] = LoadImage(_imageNameList[i])
imageList = np.rollaxis(imageList, 3, 1)
concatImage = StackImagesMultiChan(imageList, _columns, _rows)
imageList = None
concatImage = np.moveaxis(concatImage, 0, -1)
SaveImage(_concatImageName, concatImage)
def GrayToRGB(_image):
image = np.empty((3, _image.shape[0], _image.shape[1]), dtype = _image.dtype)
image[0][...] = _image[...]
image[1][...] = _image[...]
image[2][...] = _image[...]
return np.moveaxis(image, 0, -1)
def GrayToRGBSet(_imageSet):
imageSet = np.empty((3, _imageSet.shape[0], _imageSet.shape[1], _imageSet.shape[2]), dtype = _imageSet.dtype)
for i in range(len(_imageSet)):
imageSet[0][i] = _imageSet[i][...]
imageSet[1][i] = _imageSet[i][...]
imageSet[2][i] = _imageSet[i][...]
return np.moveaxis(imageSet, 0, -1)
# _image has to be smaller than (_newImageSizeX, _newImageSizeY)
# _image.shape has even numbers
# (_newImageSizeX, _newImageSizeY) has even numbers
def PadImage(_image, _newImageSizeX, _newImageSizeY):
padX = (_newImageSizeX - _image.shape[1])//2
padY = (_newImageSizeY - _image.shape[0])//2
padImage = ski.util.pad(_image, ((padY, padY),(padX, padX)), 'constant', constant_values=0)
return padImage, padX, padY
def ResizeImage(_image, _factor):
return ski.transform.resize(_image, (_image.shape[0]*_factor, _image.shape[1]*_factor), order = 3, preserve_range=True)
def ResizeImageMultiChan(_image, _factor):
# print(_image.shape)
_image = np.rollaxis(_image, 2, 0)
newImage = np.empty([_image.shape[0], (int)(_image.shape[1]*_factor), (int)(_image.shape[2]*_factor)], dtype = _image.dtype)
for i in range(_image.shape[0]):
image = ski.transform.resize(_image[i], (_image.shape[1]*_factor, _image.shape[2]*_factor), order = 3, preserve_range=True)
# newImage[i] = image.astype(np.float32)
newImage[i] = image
newImage = np.rollaxis(newImage, 0, 3)
print(newImage.shape)
print(newImage.dtype)
return newImage
def DrawLine(_image, _x1, _y1, _x2, _y2, _color):
# vertical line
if _x1 == _x2:
for i in range(_y1, _y2 + 1):
_image[0][i][_x1] = _color[0]
_image[1][i][_x1] = _color[1]
_image[2][i][_x1] = _color[2]
# horizontal line
elif _y1 == _y2:
for i in range(_x1, _x2 + 1):
_image[0][_y1][i] = _color[0]
_image[1][_y1][i] = _color[1]
_image[2][_y1][i] = _color[2]
else:
# TODO
assert(False)
def DrawRect(_image, _x1, _y1, _x2, _y2, _color):
DrawLine(_image, _x1, _y1, _x1, _y2, _color)
DrawLine(_image, _x2, _y1, _x2, _y2, _color)
DrawLine(_image, _x1, _y1, _x2, _y1, _color)
DrawLine(_image, _x1, _y2, _x2, _y2, _color)
def PtsListToMask(_imageSizeX, _imageSizeY, _ptsList, _dilationStructure = (2,2)):
coordinates = np.swapaxes(_ptsList, 0, 1)
# print(coordinates.shape)
# coordinates = coordinates - 0.5
# coordinates = np.round(coordinates)
coordinates = np.floor(coordinates)
coordinates = coordinates.astype(int)
ids = np.where(np.logical_and(coordinates[0] < _imageSizeX, coordinates[0] >= 0))
coordinates = coordinates[:,ids[0]]
# coordinates[0][coordinates[0] > _imageSizeX - 1] = _imageSizeX - 1
ids = np.where(np.logical_and(coordinates[1] < _imageSizeY, coordinates[1] >= 0))
coordinates = coordinates[:,ids[0]]
# coordinates[1][coordinates[1] > _imageSizeY - 1] = _imageSizeY - 1
# mask = np.zeros((_imageSizeX, _imageSizeY), dtype=np.uint8)
mask = np.zeros((_imageSizeY, _imageSizeX), dtype=bool)
mask[coordinates[1], coordinates[0]] = True
structure = np.ones(_dilationStructure)
mask = ski.morphology.binary_dilation(mask, structure)
# return mask
return mask.astype(np.uint8)
def DrawCenterline(_outImage, _centerline, _color = [0., 1., 0.], _deltaColor = [0., -1., 1.], _size = 3., _hls = False):
# _outImage[...] = MarkerListToMask(SIZE_Y, SIZE_X, centerline, (1,1))[...]
delta = 1./len(_centerline)
for coord in _centerline:
if _hls == True:
colorRGB = colorsys.hls_to_rgb(_color[0], _color[1], _color[2])
else:
colorRGB = _color
if True:
# if False:
rr, cc = ski.draw.circle(int(coord[1]), int(coord[0]), _size)
ids = np.where(np.logical_and(rr < _outImage.shape[2], rr >= 0))
rr = rr[ids]
cc = cc[ids]
ids = np.where(np.logical_and(cc < _outImage.shape[1], cc >= 0))
rr = rr[ids]
cc = cc[ids]
_outImage[0][rr, cc] = colorRGB[0]
_outImage[1][rr, cc] = colorRGB[1]
_outImage[2][rr, cc] = colorRGB[2]
else:
_outImage[0, int(coord[1]), int(coord[0])] = colorRGB[0]
_outImage[1, int(coord[1]), int(coord[0])] = colorRGB[1]
_outImage[2, int(coord[1]), int(coord[0])] = colorRGB[2]
_color[0] = Clamp(_color[0] + delta*_deltaColor[0], 0., 1.)
_color[1] = Clamp(_color[1] + delta*_deltaColor[1], 0., 1.)
_color[2] = Clamp(_color[2] + delta*_deltaColor[2], 0., 1.)
# print("_color " + str(_color))
| 35.896254
| 148
| 0.711304
|
50ff9ccf7940dd637725692efefe867d4003ac8e
| 4,201
|
py
|
Python
|
font.py
|
IsacSvensson/Ascii-cam
|
ef81d4f25045e6b4e743458c6f3940fdb6abfa72
|
[
"MIT"
] | null | null | null |
font.py
|
IsacSvensson/Ascii-cam
|
ef81d4f25045e6b4e743458c6f3940fdb6abfa72
|
[
"MIT"
] | null | null | null |
font.py
|
IsacSvensson/Ascii-cam
|
ef81d4f25045e6b4e743458c6f3940fdb6abfa72
|
[
"MIT"
] | null | null | null |
"""
Module for handling weighting chars and drawing the ascii-picture
"""
import string
from PIL import Image, ImageDraw, ImageFont
def distributeWeights(chars, minInterval=0, maxInterval=255):
"""
Distributes the wheighted characters over the given interval
Returns a list of tuples countaining the weighted characters
Params:
List: chars - Containg tuples with undistributed weight and associated character
Int: minInterval - minimum value in distributed system
Int: maxInterval - maximum value in distributed system
Returns:
List: tuples countaining distributed weight and character
"""
minValue = chars[0][0]
maxValue = chars[0][0]
for char in chars:
if minValue > char[0]:
minValue = char[0]
if maxValue < char[0]:
maxValue = char[0]
toRet = []
# Distribute values between minInterval and maxInterval
# (minIntv + (unDistWeight - minVal)*(maxIntv-minIntv)/(maxVal-minVal)
for char in chars:
weight = minInterval + (char[0] - minValue)*(maxInterval-minInterval)
weight = weight / (maxValue-minValue)
toRet.append((weight, char[1]))
return toRet
def getGeneralSize(chars, font):
"""
Calculates the width and height from the largest characters
Params:
List: chars - Containing single characters
Font: font - Object of the class Font in PIL.ImageFont
Returns:
int: width
int: height
"""
generalWidth = 0
generalHeight = 0
for char in chars:
# get the size of the biggest char
textWidth, textHeight = font.getsize(char)
if generalWidth < textWidth:
generalWidth = textWidth
if generalHeight < textHeight:
generalHeight = textHeight
return generalWidth, generalHeight
def getWeightedChars():
"""
Creates a list of all printable characters, calculates the "blackness" of
the characters and then create and return a distibuted list of the characters
Returns:
List: Containing tuples with weight and the character
"""
printables = string.printable
font = ImageFont.truetype("consola.ttf", 28, encoding="unic")
generalWidth, generalHeight = getGeneralSize(printables, font)
chars = []
for count, char in enumerate(printables):
# calculate darkness of the img
canvas = Image.new('RGB', (generalWidth, generalHeight), "white")
draw = ImageDraw.Draw(canvas)
draw.text((0, 0), char, 'black', font)
pixels = canvas.load()
totalSum = int()
for i in range(generalWidth):
for j in range(generalHeight):
totalSum = totalSum + sum(pixels[i, j])
totalSum = totalSum / (generalHeight*generalWidth*3)
if count == 95:
break
chars.append((abs(totalSum-255), char))
chars.sort()
chars = distributeWeights(chars)
return chars
def getChar(val, chars):
"""
Gets a character whith the corresponding "blackness" to the pixel
Params:
Numeric value: val - value to match to characters
List: chars - weighted characters list
Returns:
String: Containg one single character
"""
minDif = None
for i, char in enumerate(chars):
if minDif is None:
minDif = (abs(val-char[0]), i)
elif minDif[0] > abs(val-char[0]):
minDif = (abs(val-char[0]), i)
return chars[minDif[1]][1]
def generateAsciiImage(imgObj, chars):
"""
Functions for generating the ascii picture
Params:
Img: imgObj - Object of class Img
Returns:
String: Containing the picture as ASCII-art
"""
img = imgObj.img
size = img.size
pix = img.load()
val = []
for i in range(0, size[1]):
val.append([])
for j in range(0, size[0]):
val[i].append((pix[j, i][0]*0.299 + pix[j, i][1]*0.587 + pix[j, i][2]*0.114))
toPrint = str()
for i in range(0, size[1]):
row = str()
for j in range(0, size[0]):
row = row + getChar(val[i][j], chars)*3
toPrint = toPrint + '\n' + row
return toPrint
| 30.889706
| 89
| 0.620567
|
774d88d7d11a4d83018e19841afd9c142641d30b
| 5,822
|
py
|
Python
|
test/test_parser/test_tokeniser.py
|
weakit/Mathics
|
6f7f836d41b27d3ccf078b5da21a7e70a93da679
|
[
"Apache-2.0"
] | 2
|
2019-06-01T12:51:00.000Z
|
2019-06-02T05:17:15.000Z
|
test/test_parser/test_tokeniser.py
|
weakit/Mathics
|
6f7f836d41b27d3ccf078b5da21a7e70a93da679
|
[
"Apache-2.0"
] | 3
|
2018-09-06T17:50:01.000Z
|
2018-09-14T14:05:34.000Z
|
test/test_parser/test_tokeniser.py
|
suhr/Mathics
|
dcdf81a70d617ce460f45cbdcddcb16846ef50dc
|
[
"Apache-2.0"
] | 1
|
2018-09-05T05:09:47.000Z
|
2018-09-05T05:09:47.000Z
|
import unittest
import random
import sys
from mathics.core.parser.tokeniser import Tokeniser, Token
from mathics.core.parser.errors import ScanError, IncompleteSyntaxError, InvalidSyntaxError
from mathics.core.parser.feed import SingleLineFeeder
class TokeniserTest(unittest.TestCase):
def tokens(self, code):
tokeniser = Tokeniser(SingleLineFeeder(code))
tokens = []
while True:
token = tokeniser.next()
if token.tag == 'END':
break
else:
tokens.append(token)
return tokens
def tags(self, code):
return [token.tag for token in self.tokens(code)]
def single_token(self, code):
tokens = self.tokens(code)
self.assertEqual(len(tokens), 1)
token = tokens[0]
return token
def check_number(self, code):
token = self.single_token(code)
self.assertEqual(token, Token('Number', code, 0))
def check_symbol(self, code):
token = self.single_token(code)
self.assertEqual(token, Token('Symbol', code, 0))
def check_string(self, code):
token = self.single_token(code)
self.assertEqual(token, Token('String', code, 0))
def test_number(self):
self.assertEqual(self.tags('1.5'), ['Number'])
self.assertEqual(self.tags('1.5*^10'), ['Number'])
def scan_error(self, string):
self.assertRaises(ScanError, self.tokens, string)
def incomplete_error(self, string):
self.assertRaises(IncompleteSyntaxError, self.tokens, string)
def invalid_error(self, string):
self.assertRaises(InvalidSyntaxError, self.tokens, string)
def testSymbol(self):
self.check_symbol('xX')
self.check_symbol('context`name')
self.check_symbol('`name')
self.check_symbol('`context`name')
def testNumber(self):
self.check_number('0')
def testNumberBase(self):
self.check_number('8^^23')
self.check_number('10*^3')
self.check_number('10*^-3')
self.check_number('8^^23*^2')
def testNumberBig(self):
for _ in range(10):
self.check_number(str(random.randint(0, sys.maxsize)))
self.check_number(str(random.randint(sys.maxsize, sys.maxsize * sys.maxsize)))
def testNumberReal(self):
self.check_number('1.5')
self.check_number('1.5`')
self.check_number('0.0')
def testString(self):
self.check_string(r'"abc"')
self.incomplete_error(r'"abc')
self.check_string(r'"abc(*def*)"')
self.check_string(r'"a\"b\\c"')
self.incomplete_error(r'"\"')
def testPrecision(self):
self.check_number('1.5`-5')
self.check_number('1.5`0')
self.check_number('1.5`10')
def testAccuracy(self):
self.scan_error('1.5``')
self.check_number('1.0``20')
self.check_number('1.0``0')
self.check_number('1.4``-20')
def testSet(self):
self.assertEqual(self.tokens('x = y'), [Token('Symbol', 'x', 0), Token('Set', '=', 2), Token('Symbol', 'y', 4)])
self.assertEqual(self.tokens('x /: y = z'), [Token('Symbol', 'x', 0), Token('TagSet', '/:', 2), Token('Symbol', 'y', 5), Token('Set', '=', 7), Token('Symbol', 'z', 9)])
def testUnset(self):
self.assertEqual(self.tokens('=.'), [Token('Unset', '=.', 0)])
self.assertEqual(self.tokens('= .'), [Token('Unset', '= .', 0)])
self.assertEqual(self.tokens('=.5'), [Token('Set', '=', 0), Token('Number', '.5', 1)])
self.assertEqual(self.tokens('= ..'), [Token('Set', '=', 0), Token('Repeated', '..', 2)])
def testIntRepeated(self):
self.assertEqual(self.tokens('1..'), [Token('Number', '1', 0), Token('Repeated', '..', 1)])
self.assertEqual(self.tokens('1. .'), [Token('Number', '1.', 0), Token('Dot', '.', 3)])
def testIntegeral(self):
self.assertEqual(self.tokens('\u222B x \uF74C y'), [Token('Integral', '\u222B', 0), Token('Symbol', 'x', 2), Token('DifferentialD', '\uF74C', 4), Token('Symbol', 'y', 6)])
def testPre(self):
self.assertEqual(self.tokens('++x++'), [Token('Increment', '++', 0), Token('Symbol', 'x', 2), Token('Increment', '++', 3)])
def testFunction(self):
self.assertEqual(self.tokens('x&'), [Token('Symbol', 'x', 0), Token('Function', '&', 1)])
self.assertEqual(self.tokens('x\uf4a1'), [Token('Symbol', 'x', 0), Token('Function', '\uf4a1', 1)])
def testApply(self):
self.assertEqual(self.tokens('f // x'), [Token('Symbol', 'f', 0), Token('Postfix', '//', 2), Token('Symbol', 'x', 5)])
self.assertEqual(self.tokens('f @ x'), [Token('Symbol', 'f', 0), Token('Prefix', '@', 2), Token('Symbol', 'x', 4)])
self.assertEqual(self.tokens('f ~ x'), [Token('Symbol', 'f', 0), Token('Infix', '~', 2), Token('Symbol', 'x', 4)])
def testBackslash(self):
self.assertEqual(self.tokens('\\[Backslash]'), [Token('Backslash', '\u2216', 0)])
self.assertEqual(self.tokens('\\ a'), [Token('RawBackslash', '\\', 0), Token('Symbol', 'a', 2)])
self.incomplete_error('\\')
def testBoxes(self):
self.assertEqual(self.tokens('\\(1\\)'), [Token('LeftRowBox', '\\(', 0), Token('Number', "1", 2), Token('RightRowBox', '\\)', 3)])
def testInformation(self):
self.assertEqual(self.tokens('??Sin'), [Token('Information', '??', 0), Token('Symbol', 'Sin', 2)])
self.assertEqual(self.tokens('? ?Sin'), [Token('PatternTest', '?', 0), Token('PatternTest', '?', 2), Token('Symbol', 'Sin', 3)])
def testAssociation(self):
self.assertEqual(self.tokens('<|x -> m|>'), [Token('RawLeftAssociation', '<|', 0), Token('Symbol', "x", 2), Token('Rule', '->', 4), Token('Symbol', "m", 7), Token('RawRightAssociation', '|>', 8)])
| 40.430556
| 204
| 0.583133
|
237d46946583e7d498ac3638133cb605d30acb93
| 4,267
|
py
|
Python
|
escriptcore/py_src/symbolic/pretty.py
|
markendr/esys-escript.github.io
|
0023eab09cd71f830ab098cb3a468e6139191e8d
|
[
"Apache-2.0"
] | null | null | null |
escriptcore/py_src/symbolic/pretty.py
|
markendr/esys-escript.github.io
|
0023eab09cd71f830ab098cb3a468e6139191e8d
|
[
"Apache-2.0"
] | 1
|
2019-01-14T03:07:43.000Z
|
2019-01-14T03:07:43.000Z
|
escriptcore/py_src/symbolic/pretty.py
|
markendr/esys-escript.github.io
|
0023eab09cd71f830ab098cb3a468e6139191e8d
|
[
"Apache-2.0"
] | null | null | null |
##############################################################################
#
# Copyright (c) 2003-2018 by The University of Queensland
# http://www.uq.edu.au
#
# Primary Business: Queensland, Australia
# Licensed under the Apache License, version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
#
# Development until 2012 by Earth Systems Science Computational Center (ESSCC)
# Development 2012-2013 by School of Earth Sciences
# Development from 2014 by Centre for Geoscience Computing (GeoComp)
#
##############################################################################
from __future__ import print_function, division
__copyright__="""Copyright (c) 2003-2018 by The University of Queensland
http://www.uq.edu.au
Primary Business: Queensland, Australia"""
__license__="""Licensed under the Apache License, version 2.0
http://www.apache.org/licenses/LICENSE-2.0"""
__url__="https://launchpad.net/escript-finley"
import numpy
import sympy
from sympy.printing.pretty.pretty import PrettyPrinter,prettyForm,pretty_symbol
from .symbol import Symbol
__author__="Cihan Altinay"
class ValueMatrix(object):
def __init__(self, content):
self._items=numpy.array(content)
if self._items.ndim>2:
raise TypeError("ValueMatrix only supports 1-D and 2-D arrays")
elif self._items.ndim==1:
self._items=self._items.reshape((1,)+self._items.shape)
self.rows,self.cols=self._items.shape
def __getitem__(self, key):
return self._items[key]
class EscriptPrettyPrinter(PrettyPrinter):
"""
"""
def __init__(self, profile=None):
PrettyPrinter.__init__(self, profile)
try:
self.__ppMatrix = self._print_Matrix
except AttributeError:
# renamed in 0.7.2
self.__ppMatrix = self._print_MatrixBase
def _print_Symbol(self, e):
# handle escript symbols
if isinstance(e, Symbol):
if e.getRank()<=4:
return self._print(e.__array__())
return PrettyPrinter._print_Symbol(self,e)
# e is a sympy Symbol. Remove any brackets from the name in case e is
# a component
n,c=Symbol._symComp(e)
if len(c)==0:
return PrettyPrinter._print_Symbol(self,e)
s=sympy.Symbol(n+'_'.join([str(i) for i in c]))
return PrettyPrinter._print_Symbol(self, s)
def _print_ndarray(self, e):
if e.ndim==0:
return self._print(e.item())
elif e.ndim==1:
m=sympy.Matrix(1,e.shape[0],lambda i,j:e[j])
return self.__ppMatrix(m)
elif e.ndim==2:
i,j=e.shape
m=sympy.Matrix(i,j,lambda i,j:e[i,j])
return self.__ppMatrix(m)
else: #ndim==3 or 4:
arr=numpy.empty(e.shape[2:],dtype=object)
for idx in numpy.ndindex(e.shape[2:]):
arr[idx]=Symbol(e[idx])
m=ValueMatrix(arr)
return self.__ppMatrix(m)
def _print_grad_n(self, e):
s=prettyForm(*self._print(e.args[0]).parens())
i=pretty_symbol(",_"+str(e.args[1]))
return prettyForm(*s.right(i))
def pretty(expr, profile=None, **kargs):
"""
Returns a string containing the prettified form of expr.
Supported arguments:
``expr``
the expression to print
``wrap_line``
line wrapping enabled/disabled, should be a boolean value
(default to True)
``use_unicode``
use unicode characters, such as the Greek letter pi instead of
the string pi. Values should be boolean or None
``full_prec``
use full precision. Default to "auto"
"""
from sympy.printing.pretty.pretty import pretty_use_unicode
if profile is not None:
profile.update(kargs)
else:
profile = kargs
uflag = pretty_use_unicode(kargs.get("use_unicode", None))
try:
pp = EscriptPrettyPrinter(profile)
return pp.doprint(expr)
finally:
pretty_use_unicode(uflag)
def pretty_print(expr, use_unicode=None):
"""
Prints expr in pretty form.
pprint is just a shortcut for this function
"""
print(pretty(expr, use_unicode = use_unicode))
pprint = pretty_print
| 32.572519
| 79
| 0.615421
|
cf5179a38015388cef2f41097b3f626be7b63784
| 1,037
|
py
|
Python
|
python_code/vnev/Lib/site-packages/jdcloud_sdk/services/xdata/client/XdataClient.py
|
Ureimu/weather-robot
|
7634195af388538a566ccea9f8a8534c5fb0f4b6
|
[
"MIT"
] | 14
|
2018-04-19T09:53:56.000Z
|
2022-01-27T06:05:48.000Z
|
python_code/vnev/Lib/site-packages/jdcloud_sdk/services/xdata/client/XdataClient.py
|
Ureimu/weather-robot
|
7634195af388538a566ccea9f8a8534c5fb0f4b6
|
[
"MIT"
] | 15
|
2018-09-11T05:39:54.000Z
|
2021-07-02T12:38:02.000Z
|
python_code/vnev/Lib/site-packages/jdcloud_sdk/services/xdata/client/XdataClient.py
|
Ureimu/weather-robot
|
7634195af388538a566ccea9f8a8534c5fb0f4b6
|
[
"MIT"
] | 33
|
2018-04-20T05:29:16.000Z
|
2022-02-17T09:10:05.000Z
|
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
from jdcloud_sdk.core.jdcloudclient import JDCloudClient
from jdcloud_sdk.core.config import Config
class XdataClient(JDCloudClient):
def __init__(self, credential, config=None, logger=None):
if config is None:
config = Config('xdata.jdcloud-api.com')
super(XdataClient, self).__init__(credential, config, 'xdata', '1.1.0', logger)
| 34.566667
| 87
| 0.745419
|
d3f5e38bdcc1e19f754b5488cd583ad447f059f1
| 28,909
|
py
|
Python
|
silx/gui/plot3d/items/mesh.py
|
vincefn/silx
|
4b239abfc90d2fa7d6ab61425f8bfc7b83c0f444
|
[
"CC0-1.0",
"MIT"
] | null | null | null |
silx/gui/plot3d/items/mesh.py
|
vincefn/silx
|
4b239abfc90d2fa7d6ab61425f8bfc7b83c0f444
|
[
"CC0-1.0",
"MIT"
] | null | null | null |
silx/gui/plot3d/items/mesh.py
|
vincefn/silx
|
4b239abfc90d2fa7d6ab61425f8bfc7b83c0f444
|
[
"CC0-1.0",
"MIT"
] | null | null | null |
# coding: utf-8
# /*##########################################################################
#
# Copyright (c) 2017-2019 European Synchrotron Radiation Facility
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# ###########################################################################*/
"""This module provides regular mesh item class.
"""
from __future__ import absolute_import
__authors__ = ["T. Vincent"]
__license__ = "MIT"
__date__ = "17/07/2018"
import logging
import numpy
from ..scene import primitives, utils, function
from ..scene.transform import Rotate
from .core import DataItem3D, ItemChangedType
from .mixins import ColormapMixIn
from ._pick import PickingResult
_logger = logging.getLogger(__name__)
class _MeshBase(DataItem3D):
"""Base class for :class:`Mesh' and :class:`ColormapMesh`.
:param parent: The View widget this item belongs to.
"""
def __init__(self, parent=None):
DataItem3D.__init__(self, parent=parent)
self._mesh = None
def _setMesh(self, mesh):
"""Set mesh primitive
:param Union[None,Geometry] mesh: The scene primitive
"""
self._getScenePrimitive().children = [] # Remove any previous mesh
self._mesh = mesh
if self._mesh is not None:
self._getScenePrimitive().children.append(self._mesh)
self._updated(ItemChangedType.DATA)
def _getMesh(self):
"""Returns the underlying Mesh scene primitive"""
return self._mesh
def getPositionData(self, copy=True):
"""Get the mesh vertex positions.
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: The (x, y, z) positions as a (N, 3) array
:rtype: numpy.ndarray
"""
if self._getMesh() is None:
return numpy.empty((0, 3), dtype=numpy.float32)
else:
return self._getMesh().getAttribute('position', copy=copy)
def getNormalData(self, copy=True):
"""Get the mesh vertex normals.
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: The normals as a (N, 3) array, a single normal or None
:rtype: Union[numpy.ndarray,None]
"""
if self._getMesh() is None:
return None
else:
return self._getMesh().getAttribute('normal', copy=copy)
def getIndices(self, copy=True):
"""Get the vertex indices.
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: The vertex indices as an array or None.
:rtype: Union[numpy.ndarray,None]
"""
if self._getMesh() is None:
return None
else:
return self._getMesh().getIndices(copy=copy)
def getDrawMode(self):
"""Get mesh rendering mode.
:return: The drawing mode of this primitive
:rtype: str
"""
return self._getMesh().drawMode
def _pickFull(self, context):
"""Perform precise picking in this item at given widget position.
:param PickContext context: Current picking context
:return: Object holding the results or None
:rtype: Union[None,PickingResult]
"""
rayObject = context.getPickingSegment(frame=self._getScenePrimitive())
if rayObject is None: # No picking outside viewport
return None
rayObject = rayObject[:, :3]
positions = self.getPositionData(copy=False)
if positions.size == 0:
return None
mode = self.getDrawMode()
vertexIndices = self.getIndices(copy=False)
if vertexIndices is not None: # Expand indices
positions = utils.unindexArrays(mode, vertexIndices, positions)[0]
triangles = positions.reshape(-1, 3, 3)
else:
if mode == 'triangles':
triangles = positions.reshape(-1, 3, 3)
elif mode == 'triangle_strip':
# Expand strip
triangles = numpy.empty((len(positions) - 2, 3, 3),
dtype=positions.dtype)
triangles[:, 0] = positions[:-2]
triangles[:, 1] = positions[1:-1]
triangles[:, 2] = positions[2:]
elif mode == 'fan':
# Expand fan
triangles = numpy.empty((len(positions) - 2, 3, 3),
dtype=positions.dtype)
triangles[:, 0] = positions[0]
triangles[:, 1] = positions[1:-1]
triangles[:, 2] = positions[2:]
else:
_logger.warning("Unsupported draw mode: %s" % mode)
return None
trianglesIndices, t, barycentric = utils.segmentTrianglesIntersection(
rayObject, triangles)
if len(trianglesIndices) == 0:
return None
points = t.reshape(-1, 1) * (rayObject[1] - rayObject[0]) + rayObject[0]
# Get vertex index from triangle index and closest point in triangle
closest = numpy.argmax(barycentric, axis=1)
if mode == 'triangles':
indices = trianglesIndices * 3 + closest
elif mode == 'triangle_strip':
indices = trianglesIndices + closest
elif mode == 'fan':
indices = trianglesIndices + closest # For corners 1 and 2
indices[closest == 0] = 0 # For first corner (common)
if vertexIndices is not None:
# Convert from indices in expanded triangles to input vertices
indices = vertexIndices[indices]
return PickingResult(self,
positions=points,
indices=indices,
fetchdata=self.getPositionData)
class Mesh(_MeshBase):
"""Description of mesh.
:param parent: The View widget this item belongs to.
"""
def __init__(self, parent=None):
_MeshBase.__init__(self, parent=parent)
def setData(self,
position,
color,
normal=None,
mode='triangles',
indices=None,
copy=True):
"""Set mesh geometry data.
Supported drawing modes are: 'triangles', 'triangle_strip', 'fan'
:param numpy.ndarray position:
Position (x, y, z) of each vertex as a (N, 3) array
:param numpy.ndarray color: Colors for each point or a single color
:param Union[numpy.ndarray,None] normal: Normals for each point or None (default)
:param str mode: The drawing mode.
:param Union[List[int],None] indices:
Array of vertex indices or None to use arrays directly.
:param bool copy: True (default) to copy the data,
False to use as is (do not modify!).
"""
assert mode in ('triangles', 'triangle_strip', 'fan')
if position is None or len(position) == 0:
mesh = None
else:
mesh = primitives.Mesh3D(
position, color, normal, mode=mode, indices=indices, copy=copy)
self._setMesh(mesh)
def getData(self, copy=True):
"""Get the mesh geometry.
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: The positions, colors, normals and mode
:rtype: tuple of numpy.ndarray
"""
return (self.getPositionData(copy=copy),
self.getColorData(copy=copy),
self.getNormalData(copy=copy),
self.getDrawMode())
def getColorData(self, copy=True):
"""Get the mesh vertex colors.
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: The RGBA colors as a (N, 4) array or a single color
:rtype: numpy.ndarray
"""
if self._getMesh() is None:
return numpy.empty((0, 4), dtype=numpy.float32)
else:
return self._getMesh().getAttribute('color', copy=copy)
class ColormapMesh(_MeshBase, ColormapMixIn):
"""Description of mesh which color is defined by scalar and a colormap.
:param parent: The View widget this item belongs to.
"""
def __init__(self, parent=None):
_MeshBase.__init__(self, parent=parent)
ColormapMixIn.__init__(self, function.Colormap())
def setData(self,
position,
value,
normal=None,
mode='triangles',
indices=None,
copy=True):
"""Set mesh geometry data.
Supported drawing modes are: 'triangles', 'triangle_strip', 'fan'
:param numpy.ndarray position:
Position (x, y, z) of each vertex as a (N, 3) array
:param numpy.ndarray value: Data value for each vertex.
:param Union[numpy.ndarray,None] normal: Normals for each point or None (default)
:param str mode: The drawing mode.
:param Union[List[int],None] indices:
Array of vertex indices or None to use arrays directly.
:param bool copy: True (default) to copy the data,
False to use as is (do not modify!).
"""
assert mode in ('triangles', 'triangle_strip', 'fan')
if position is None or len(position) == 0:
mesh = None
else:
mesh = primitives.ColormapMesh3D(
position=position,
value=numpy.array(value, copy=False).reshape(-1, 1), # Make it a 2D array
colormap=self._getSceneColormap(),
normal=normal,
mode=mode,
indices=indices,
copy=copy)
self._setMesh(mesh)
# Store data range info
ColormapMixIn._setRangeFromData(self, self.getValueData(copy=False))
def getData(self, copy=True):
"""Get the mesh geometry.
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: The positions, values, normals and mode
:rtype: tuple of numpy.ndarray
"""
return (self.getPositionData(copy=copy),
self.getValueData(copy=copy),
self.getNormalData(copy=copy),
self.getDrawMode())
def getValueData(self, copy=True):
"""Get the mesh vertex values.
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: Array of data values
:rtype: numpy.ndarray
"""
if self._getMesh() is None:
return numpy.empty((0,), dtype=numpy.float32)
else:
return self._getMesh().getAttribute('value', copy=copy)
class _CylindricalVolume(DataItem3D):
"""Class that represents a volume with a rotational symmetry along z
:param parent: The View widget this item belongs to.
"""
def __init__(self, parent=None):
DataItem3D.__init__(self, parent=parent)
self._mesh = None
self._nbFaces = 0
def getPosition(self, copy=True):
"""Get primitive positions.
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: Position of the primitives as a (N, 3) array.
:rtype: numpy.ndarray
"""
raise NotImplementedError("Must be implemented in subclass")
def _setData(self, position, radius, height, angles, color, flatFaces,
rotation):
"""Set volume geometry data.
:param numpy.ndarray position:
Center position (x, y, z) of each volume as (N, 3) array.
:param float radius: External radius ot the volume.
:param float height: Height of the volume(s).
:param numpy.ndarray angles: Angles of the edges.
:param numpy.array color: RGB color of the volume(s).
:param bool flatFaces:
If the volume as flat faces or not. Used for normals calculation.
"""
self._getScenePrimitive().children = [] # Remove any previous mesh
if position is None or len(position) == 0:
self._mesh = None
self._nbFaces = 0
else:
self._nbFaces = len(angles) - 1
volume = numpy.empty(shape=(len(angles) - 1, 12, 3),
dtype=numpy.float32)
normal = numpy.empty(shape=(len(angles) - 1, 12, 3),
dtype=numpy.float32)
for i in range(0, len(angles) - 1):
# c6
# /\
# / \
# / \
# c4|------|c5
# | \ |
# | \ |
# | \ |
# | \ |
# c2|------|c3
# \ /
# \ /
# \/
# c1
c1 = numpy.array([0, 0, -height/2])
c1 = rotation.transformPoint(c1)
c2 = numpy.array([radius * numpy.cos(angles[i]),
radius * numpy.sin(angles[i]),
-height/2])
c2 = rotation.transformPoint(c2)
c3 = numpy.array([radius * numpy.cos(angles[i+1]),
radius * numpy.sin(angles[i+1]),
-height/2])
c3 = rotation.transformPoint(c3)
c4 = numpy.array([radius * numpy.cos(angles[i]),
radius * numpy.sin(angles[i]),
height/2])
c4 = rotation.transformPoint(c4)
c5 = numpy.array([radius * numpy.cos(angles[i+1]),
radius * numpy.sin(angles[i+1]),
height/2])
c5 = rotation.transformPoint(c5)
c6 = numpy.array([0, 0, height/2])
c6 = rotation.transformPoint(c6)
volume[i] = numpy.array([c1, c3, c2,
c2, c3, c4,
c3, c5, c4,
c4, c5, c6])
if flatFaces:
normal[i] = numpy.array([numpy.cross(c3-c1, c2-c1), # c1
numpy.cross(c2-c3, c1-c3), # c3
numpy.cross(c1-c2, c3-c2), # c2
numpy.cross(c3-c2, c4-c2), # c2
numpy.cross(c4-c3, c2-c3), # c3
numpy.cross(c2-c4, c3-c4), # c4
numpy.cross(c5-c3, c4-c3), # c3
numpy.cross(c4-c5, c3-c5), # c5
numpy.cross(c3-c4, c5-c4), # c4
numpy.cross(c5-c4, c6-c4), # c4
numpy.cross(c6-c5, c5-c5), # c5
numpy.cross(c4-c6, c5-c6)]) # c6
else:
normal[i] = numpy.array([numpy.cross(c3-c1, c2-c1),
numpy.cross(c2-c3, c1-c3),
numpy.cross(c1-c2, c3-c2),
c2-c1, c3-c1, c4-c6, # c2 c2 c4
c3-c1, c5-c6, c4-c6, # c3 c5 c4
numpy.cross(c5-c4, c6-c4),
numpy.cross(c6-c5, c5-c5),
numpy.cross(c4-c6, c5-c6)])
# Multiplication according to the number of positions
vertices = numpy.tile(volume.reshape(-1, 3), (len(position), 1))\
.reshape((-1, 3))
normals = numpy.tile(normal.reshape(-1, 3), (len(position), 1))\
.reshape((-1, 3))
# Translations
numpy.add(vertices, numpy.tile(position, (1, (len(angles)-1) * 12))
.reshape((-1, 3)), out=vertices)
# Colors
if numpy.ndim(color) == 2:
color = numpy.tile(color, (1, 12 * (len(angles) - 1)))\
.reshape(-1, 3)
self._mesh = primitives.Mesh3D(
vertices, color, normals, mode='triangles', copy=False)
self._getScenePrimitive().children.append(self._mesh)
self._updated(ItemChangedType.DATA)
def _pickFull(self, context):
"""Perform precise picking in this item at given widget position.
:param PickContext context: Current picking context
:return: Object holding the results or None
:rtype: Union[None,PickingResult]
"""
if self._mesh is None or self._nbFaces == 0:
return None
rayObject = context.getPickingSegment(frame=self._getScenePrimitive())
if rayObject is None: # No picking outside viewport
return None
rayObject = rayObject[:, :3]
positions = self._mesh.getAttribute('position', copy=False)
triangles = positions.reshape(-1, 3, 3) # 'triangle' draw mode
trianglesIndices, t = utils.segmentTrianglesIntersection(
rayObject, triangles)[:2]
if len(trianglesIndices) == 0:
return None
# Get object index from triangle index
indices = trianglesIndices // (4 * self._nbFaces)
# Select closest intersection point for each primitive
indices, firstIndices = numpy.unique(indices, return_index=True)
t = t[firstIndices]
# Resort along t as result of numpy.unique is not sorted by t
sortedIndices = numpy.argsort(t)
t = t[sortedIndices]
indices = indices[sortedIndices]
points = t.reshape(-1, 1) * (rayObject[1] - rayObject[0]) + rayObject[0]
return PickingResult(self,
positions=points,
indices=indices,
fetchdata=self.getPosition)
class Box(_CylindricalVolume):
"""Description of a box.
Can be used to draw one box or many similar boxes.
:param parent: The View widget this item belongs to.
"""
def __init__(self, parent=None):
super(Box, self).__init__(parent)
self.position = None
self.size = None
self.color = None
self.rotation = None
self.setData()
def setData(self, size=(1, 1, 1), color=(1, 1, 1),
position=(0, 0, 0), rotation=(0, (0, 0, 0))):
"""
Set Box geometry data.
:param numpy.array size: Size (dx, dy, dz) of the box(es).
:param numpy.array color: RGB color of the box(es).
:param numpy.ndarray position:
Center position (x, y, z) of each box as a (N, 3) array.
:param tuple(float, array) rotation:
Angle (in degrees) and axis of rotation.
If (0, (0, 0, 0)) (default), the hexagonal faces are on
xy plane and a side face is aligned with x axis.
"""
self.position = numpy.atleast_2d(numpy.array(position, copy=True))
self.size = numpy.array(size, copy=True)
self.color = numpy.array(color, copy=True)
self.rotation = Rotate(rotation[0],
rotation[1][0], rotation[1][1], rotation[1][2])
assert (numpy.ndim(self.color) == 1 or
len(self.color) == len(self.position))
diagonal = numpy.sqrt(self.size[0]**2 + self.size[1]**2)
alpha = 2 * numpy.arcsin(self.size[1] / diagonal)
beta = 2 * numpy.arcsin(self.size[0] / diagonal)
angles = numpy.array([0,
alpha,
alpha + beta,
alpha + beta + alpha,
2 * numpy.pi])
numpy.subtract(angles, 0.5 * alpha, out=angles)
self._setData(self.position,
numpy.sqrt(self.size[0]**2 + self.size[1]**2)/2,
self.size[2],
angles,
self.color,
True,
self.rotation)
def getPosition(self, copy=True):
"""Get box(es) position(s).
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: Position of the box(es) as a (N, 3) array.
:rtype: numpy.ndarray
"""
return numpy.array(self.position, copy=copy)
def getSize(self):
"""Get box(es) size.
:return: Size (dx, dy, dz) of the box(es).
:rtype: numpy.ndarray
"""
return numpy.array(self.size, copy=True)
def getColor(self, copy=True):
"""Get box(es) color.
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: RGB color of the box(es).
:rtype: numpy.ndarray
"""
return numpy.array(self.color, copy=copy)
class Cylinder(_CylindricalVolume):
"""Description of a cylinder.
Can be used to draw one cylinder or many similar cylinders.
:param parent: The View widget this item belongs to.
"""
def __init__(self, parent=None):
super(Cylinder, self).__init__(parent)
self.position = None
self.radius = None
self.height = None
self.color = None
self.nbFaces = 0
self.rotation = None
self.setData()
def setData(self, radius=1, height=1, color=(1, 1, 1), nbFaces=20,
position=(0, 0, 0), rotation=(0, (0, 0, 0))):
"""
Set the cylinder geometry data
:param float radius: Radius of the cylinder(s).
:param float height: Height of the cylinder(s).
:param numpy.array color: RGB color of the cylinder(s).
:param int nbFaces:
Number of faces for cylinder approximation (default 20).
:param numpy.ndarray position:
Center position (x, y, z) of each cylinder as a (N, 3) array.
:param tuple(float, array) rotation:
Angle (in degrees) and axis of rotation.
If (0, (0, 0, 0)) (default), the hexagonal faces are on
xy plane and a side face is aligned with x axis.
"""
self.position = numpy.atleast_2d(numpy.array(position, copy=True))
self.radius = float(radius)
self.height = float(height)
self.color = numpy.array(color, copy=True)
self.nbFaces = int(nbFaces)
self.rotation = Rotate(rotation[0],
rotation[1][0], rotation[1][1], rotation[1][2])
assert (numpy.ndim(self.color) == 1 or
len(self.color) == len(self.position))
angles = numpy.linspace(0, 2*numpy.pi, self.nbFaces + 1)
self._setData(self.position,
self.radius,
self.height,
angles,
self.color,
False,
self.rotation)
def getPosition(self, copy=True):
"""Get cylinder(s) position(s).
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: Position(s) of the cylinder(s) as a (N, 3) array.
:rtype: numpy.ndarray
"""
return numpy.array(self.position, copy=copy)
def getRadius(self):
"""Get cylinder(s) radius.
:return: Radius of the cylinder(s).
:rtype: float
"""
return self.radius
def getHeight(self):
"""Get cylinder(s) height.
:return: Height of the cylinder(s).
:rtype: float
"""
return self.height
def getColor(self, copy=True):
"""Get cylinder(s) color.
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: RGB color of the cylinder(s).
:rtype: numpy.ndarray
"""
return numpy.array(self.color, copy=copy)
class Hexagon(_CylindricalVolume):
"""Description of a uniform hexagonal prism.
Can be used to draw one hexagonal prim or many similar hexagonal
prisms.
:param parent: The View widget this item belongs to.
"""
def __init__(self, parent=None):
super(Hexagon, self).__init__(parent)
self.position = None
self.radius = 0
self.height = 0
self.color = None
self.rotation = None
self.setData()
def setData(self, radius=1, height=1, color=(1, 1, 1),
position=(0, 0, 0), rotation=(0, (0, 0, 0))):
"""
Set the uniform hexagonal prism geometry data
:param float radius: External radius of the hexagonal prism
:param float height: Height of the hexagonal prism
:param numpy.array color: RGB color of the prism(s)
:param numpy.ndarray position:
Center position (x, y, z) of each prism as a (N, 3) array
:param tuple(float, array) rotation:
Angle (in degrees) and axis of rotation.
If (0, (0, 0, 0)) (default), the hexagonal faces are on
xy plane and a side face is aligned with x axis.
"""
self.position = numpy.atleast_2d(numpy.array(position, copy=True))
self.radius = float(radius)
self.height = float(height)
self.color = numpy.array(color, copy=True)
self.rotation = Rotate(rotation[0], rotation[1][0], rotation[1][1],
rotation[1][2])
assert (numpy.ndim(self.color) == 1 or
len(self.color) == len(self.position))
angles = numpy.linspace(0, 2*numpy.pi, 7)
self._setData(self.position,
self.radius,
self.height,
angles,
self.color,
True,
self.rotation)
def getPosition(self, copy=True):
"""Get hexagonal prim(s) position(s).
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: Position(s) of hexagonal prism(s) as a (N, 3) array.
:rtype: numpy.ndarray
"""
return numpy.array(self.position, copy=copy)
def getRadius(self):
"""Get hexagonal prism(s) radius.
:return: Radius of hexagon(s).
:rtype: float
"""
return self.radius
def getHeight(self):
"""Get hexagonal prism(s) height.
:return: Height of hexagonal prism(s).
:rtype: float
"""
return self.height
def getColor(self, copy=True):
"""Get hexagonal prism(s) color.
:param bool copy:
True (default) to get a copy,
False to get internal representation (do not modify!).
:return: RGB color of the hexagonal prism(s).
:rtype: numpy.ndarray
"""
return numpy.array(self.color, copy=copy)
| 36.455233
| 90
| 0.535819
|
f48b05dd90b6f528524a74c2606421662ed54d95
| 638
|
py
|
Python
|
tests/helper/__init__.py
|
fsantini/gtagora-connector-py
|
e97edf0da8adbdfb6d238caf1add42b70d2482f2
|
[
"MIT"
] | 3
|
2020-06-30T14:26:46.000Z
|
2022-01-12T19:44:26.000Z
|
tests/helper/__init__.py
|
fsantini/gtagora-connector-py
|
e97edf0da8adbdfb6d238caf1add42b70d2482f2
|
[
"MIT"
] | null | null | null |
tests/helper/__init__.py
|
fsantini/gtagora-connector-py
|
e97edf0da8adbdfb6d238caf1add42b70d2482f2
|
[
"MIT"
] | 2
|
2019-02-27T13:54:41.000Z
|
2019-10-07T13:55:27.000Z
|
from gtagora.http.client import Client
class FakeResponse:
def __init__(self, status_code=200, data=None):
self.status_code = status_code
self.data = data
self.text = ''
def json(self):
return self.data
class FakeClient(Client):
def __init__(self, connection, response=FakeResponse()):
super().__init__(connection=connection)
self.response = response
def check_connection(self):
return True
def set_next_response(self, response):
self.response = response
def get(self, url, timeout=None, params=None, **kwargs):
return self.response
| 22.785714
| 60
| 0.658307
|
cdf8d6dd82c07f048a779f824ab0b273ea62c1d7
| 5,624
|
py
|
Python
|
azure-mgmt-network/azure/mgmt/network/v2018_12_01/models/effective_network_security_rule.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 1
|
2021-09-07T18:36:04.000Z
|
2021-09-07T18:36:04.000Z
|
azure-mgmt-network/azure/mgmt/network/v2018_12_01/models/effective_network_security_rule.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 2
|
2019-10-02T23:37:38.000Z
|
2020-10-02T01:17:31.000Z
|
azure-mgmt-network/azure/mgmt/network/v2018_12_01/models/effective_network_security_rule.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class EffectiveNetworkSecurityRule(Model):
"""Effective network security rules.
:param name: The name of the security rule specified by the user (if
created by the user).
:type name: str
:param protocol: The network protocol this rule applies to. Possible
values are: 'Tcp', 'Udp', and 'All'. Possible values include: 'Tcp',
'Udp', 'All'
:type protocol: str or
~azure.mgmt.network.v2018_12_01.models.EffectiveSecurityRuleProtocol
:param source_port_range: The source port or range.
:type source_port_range: str
:param destination_port_range: The destination port or range.
:type destination_port_range: str
:param source_port_ranges: The source port ranges. Expected values include
a single integer between 0 and 65535, a range using '-' as separator (e.g.
100-400), or an asterisk (*)
:type source_port_ranges: list[str]
:param destination_port_ranges: The destination port ranges. Expected
values include a single integer between 0 and 65535, a range using '-' as
separator (e.g. 100-400), or an asterisk (*)
:type destination_port_ranges: list[str]
:param source_address_prefix: The source address prefix.
:type source_address_prefix: str
:param destination_address_prefix: The destination address prefix.
:type destination_address_prefix: str
:param source_address_prefixes: The source address prefixes. Expected
values include CIDR IP ranges, Default Tags (VirtualNetwork,
AzureLoadBalancer, Internet), System Tags, and the asterisk (*).
:type source_address_prefixes: list[str]
:param destination_address_prefixes: The destination address prefixes.
Expected values include CIDR IP ranges, Default Tags (VirtualNetwork,
AzureLoadBalancer, Internet), System Tags, and the asterisk (*).
:type destination_address_prefixes: list[str]
:param expanded_source_address_prefix: The expanded source address prefix.
:type expanded_source_address_prefix: list[str]
:param expanded_destination_address_prefix: Expanded destination address
prefix.
:type expanded_destination_address_prefix: list[str]
:param access: Whether network traffic is allowed or denied. Possible
values are: 'Allow' and 'Deny'. Possible values include: 'Allow', 'Deny'
:type access: str or
~azure.mgmt.network.v2018_12_01.models.SecurityRuleAccess
:param priority: The priority of the rule.
:type priority: int
:param direction: The direction of the rule. Possible values are: 'Inbound
and Outbound'. Possible values include: 'Inbound', 'Outbound'
:type direction: str or
~azure.mgmt.network.v2018_12_01.models.SecurityRuleDirection
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'protocol': {'key': 'protocol', 'type': 'str'},
'source_port_range': {'key': 'sourcePortRange', 'type': 'str'},
'destination_port_range': {'key': 'destinationPortRange', 'type': 'str'},
'source_port_ranges': {'key': 'sourcePortRanges', 'type': '[str]'},
'destination_port_ranges': {'key': 'destinationPortRanges', 'type': '[str]'},
'source_address_prefix': {'key': 'sourceAddressPrefix', 'type': 'str'},
'destination_address_prefix': {'key': 'destinationAddressPrefix', 'type': 'str'},
'source_address_prefixes': {'key': 'sourceAddressPrefixes', 'type': '[str]'},
'destination_address_prefixes': {'key': 'destinationAddressPrefixes', 'type': '[str]'},
'expanded_source_address_prefix': {'key': 'expandedSourceAddressPrefix', 'type': '[str]'},
'expanded_destination_address_prefix': {'key': 'expandedDestinationAddressPrefix', 'type': '[str]'},
'access': {'key': 'access', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'direction': {'key': 'direction', 'type': 'str'},
}
def __init__(self, **kwargs):
super(EffectiveNetworkSecurityRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.protocol = kwargs.get('protocol', None)
self.source_port_range = kwargs.get('source_port_range', None)
self.destination_port_range = kwargs.get('destination_port_range', None)
self.source_port_ranges = kwargs.get('source_port_ranges', None)
self.destination_port_ranges = kwargs.get('destination_port_ranges', None)
self.source_address_prefix = kwargs.get('source_address_prefix', None)
self.destination_address_prefix = kwargs.get('destination_address_prefix', None)
self.source_address_prefixes = kwargs.get('source_address_prefixes', None)
self.destination_address_prefixes = kwargs.get('destination_address_prefixes', None)
self.expanded_source_address_prefix = kwargs.get('expanded_source_address_prefix', None)
self.expanded_destination_address_prefix = kwargs.get('expanded_destination_address_prefix', None)
self.access = kwargs.get('access', None)
self.priority = kwargs.get('priority', None)
self.direction = kwargs.get('direction', None)
| 55.137255
| 108
| 0.689189
|
c1e7949980d85bac59d8ace6613af6908ca2f5eb
| 1,068
|
py
|
Python
|
mistral/tests/unit/actions/test_javascript_action.py
|
soda-research/mistral
|
550a3de9c2defc7ce26336cb705d9c8d87bbaddd
|
[
"Apache-2.0"
] | 3
|
2015-08-28T04:57:56.000Z
|
2017-03-27T10:59:56.000Z
|
mistral/tests/unit/actions/test_javascript_action.py
|
soda-research/mistral
|
550a3de9c2defc7ce26336cb705d9c8d87bbaddd
|
[
"Apache-2.0"
] | 21
|
2015-04-14T22:41:53.000Z
|
2019-02-20T09:30:10.000Z
|
mistral/tests/unit/actions/test_javascript_action.py
|
soda-research/mistral
|
550a3de9c2defc7ce26336cb705d9c8d87bbaddd
|
[
"Apache-2.0"
] | 12
|
2015-08-14T02:27:37.000Z
|
2020-12-31T10:09:21.000Z
|
# Copyright 2015 - Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from mistral.actions import std_actions as std
from mistral.tests.unit import base
from mistral.utils import javascript
class JavascriptActionTest(base.BaseTest):
@mock.patch.object(
javascript, 'evaluate', mock.Mock(return_value="3")
)
def test_js_action(self):
mock_ctx = mock.Mock()
script = "return 1 + 2"
action = std.JavaScriptAction(script)
self.assertEqual("3", action.run(mock_ctx))
| 33.375
| 77
| 0.709738
|
5760a5c1cec0f0ca07e932da334ae817bf844834
| 9,567
|
py
|
Python
|
tests/test_integration.py
|
marten-de-vries/Flask-WebSub
|
1853582a7f60e79c9ac1bd03b0bf30b36dab77f6
|
[
"0BSD"
] | 19
|
2017-08-27T09:52:46.000Z
|
2021-11-06T14:53:06.000Z
|
tests/test_integration.py
|
marten-de-vries/Flask-WebSub
|
1853582a7f60e79c9ac1bd03b0bf30b36dab77f6
|
[
"0BSD"
] | 7
|
2018-11-11T17:19:49.000Z
|
2021-04-06T10:54:48.000Z
|
tests/test_integration.py
|
marten-de-vries/Flask-WebSub
|
1853582a7f60e79c9ac1bd03b0bf30b36dab77f6
|
[
"0BSD"
] | 4
|
2018-01-26T13:18:55.000Z
|
2021-11-06T14:51:53.000Z
|
from flask import Flask
import pytest
import requests
from cachelib import SimpleCache
import os
import base64
from unittest.mock import Mock, call
from flask_websub.errors import SubscriberError, NotificationError
from flask_websub.subscriber import Subscriber, SQLite3TempSubscriberStorage, \
SQLite3SubscriberStorage, \
WerkzeugCacheTempSubscriberStorage
from flask_websub.hub import Hub, SQLite3HubStorage
from .utils import serve_app
def run_hub_app(celery, worker, https):
app = Flask(__name__)
app.config['PUBLISH_SUPPORTED'] = True
hub = Hub(SQLite3HubStorage('hub.db'), celery, **app.config)
worker.reload()
app.register_blueprint(hub.build_blueprint(url_prefix='/hub'))
with serve_app(app, port=5001, https=https):
yield hub
os.remove('hub.db')
@pytest.fixture
def https_hub(celery_session_app, celery_session_worker):
# monkey-patch requests
def new(*args, **kwargs):
kwargs['verify'] = False
return old(*args, **kwargs)
old, requests.request = requests.request, new
# suppress warning
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
yield from run_hub_app(celery_session_app, celery_session_worker,
https=True)
# de-monkey patch
requests.request = old
@pytest.fixture
def hub(celery_session_app, celery_session_worker):
yield from run_hub_app(celery_session_app, celery_session_worker,
https=False)
def subscriber_app(subscriber):
app = Flask(__name__)
app.register_blueprint(subscriber.build_blueprint(url_prefix='/callbacks'))
with serve_app(app, port=5002):
yield subscriber
os.remove('subscriber.db')
@pytest.fixture
def subscriber():
subscriber = Subscriber(SQLite3SubscriberStorage('subscriber.db'),
SQLite3TempSubscriberStorage('subscriber.db'))
yield from subscriber_app(subscriber)
@pytest.fixture
def werkzeug_subscriber():
subscriber = Subscriber(SQLite3SubscriberStorage('subscriber.db'),
WerkzeugCacheTempSubscriberStorage(SimpleCache()))
yield from subscriber_app(subscriber)
def test_subscriber_error(subscriber):
with pytest.raises(SubscriberError):
subscriber.subscribe(topic_url='http://example.com',
# hub is not active, but the checks mean we don't
# get there anyway
hub_url='http://localhost:5001/hub',
# impossible
lease_seconds=-1)
# nonexisting URL
with pytest.raises(SubscriberError):
subscriber.subscribe(topic_url='http://example.com',
hub_url='http://unexisting')
# page exists, but is not a hub
with pytest.raises(SubscriberError):
subscriber.subscribe(topic_url='http://example.com',
hub_url='http://localhost:5002/ping')
def test_subscription_werkzeug(hub, werkzeug_subscriber):
on_success = Mock()
werkzeug_subscriber.add_success_handler(on_success)
topic = 'http://example.com'
id = werkzeug_subscriber.subscribe(topic_url=topic,
hub_url='http://localhost:5001/hub')
while not on_success.called:
pass
on_success.assert_called_with(topic, id, 'subscribe')
def test_unexisting_werkzeug(werkzeug_subscriber):
resp = requests.get('http://localhost:5002/callbacks/unexisting', params={
'hub.mode': 'subscribe',
})
assert resp.status_code == 404
def test_sub_notify_unsub(https_hub, subscriber):
# subscribe
on_success = Mock()
subscriber.add_success_handler(on_success)
topic = 'http://example.com'
id = subscriber.subscribe(topic_url=topic,
hub_url='https://localhost:5001/hub')
while not on_success.called:
pass # wait for the worker to finish
on_success.assert_called_with(topic, id, 'subscribe')
# send notification
on_topic_change = Mock()
subscriber.add_listener(on_topic_change)
content = {
'content': base64.b64encode(b'Hello World!').decode('ascii'),
'headers': {
'Link': ', '.join([
'<http://example.com>; rel="self"',
'<https://localhost:5001/hub>; rel="hub"',
])
},
}
https_hub.send_change_notification.delay(topic, content).get()
while not on_topic_change.called: # pragma: no cover
pass
on_topic_change.assert_called_with(topic, id, b'Hello World!')
# unsubscribe
on_success = Mock()
subscriber.add_success_handler(on_success)
subscriber.unsubscribe(id)
while not on_success.called:
pass
on_success.assert_called_with(topic, id, 'unsubscribe')
def test_validator(hub, subscriber):
on_error = Mock()
subscriber.add_error_handler(on_error)
error = 'invalid URL'
@hub.register_validator
def validate(callback_url, topic_url, *args):
if not topic_url.startswith('http://example.com'):
return error
topic = 'http://invalid.com'
id = subscriber.subscribe(topic_url=topic,
hub_url='http://localhost:5001/hub')
while not on_error.called:
pass
on_error.assert_called_with(topic, id, error)
topic = 'http://example.com/abc'
on_success = Mock()
subscriber.add_success_handler(on_success)
id = subscriber.subscribe(topic_url=topic,
hub_url='http://localhost:5001/hub')
while not on_success.called:
pass
on_success.assert_called_with(topic, id, 'subscribe')
def test_renew(hub, subscriber):
topic = 'http://example.com/def'
on_success = Mock()
subscriber.add_success_handler(on_success)
id = subscriber.subscribe(topic_url=topic,
hub_url='http://localhost:5001/hub')
while not on_success.called:
pass
subscriber.renew(id)
while on_success.call_count != 2:
pass
# renew everything (because of the huge margin, and everything here means
# our single subscription)
subscriber.renew_close_to_expiration(margin_in_seconds=10000000000000)
while on_success.call_count != 3:
pass
on_success.assert_has_calls([call(topic, id, 'subscribe'),
call(topic, id, 'subscribe'),
call(topic, id, 'subscribe')])
def test_renew_unexisting_id(subscriber):
with pytest.raises(SubscriberError):
subscriber.renew('unexisting')
def test_schedule_cleanup(hub):
# long-term scheduling (does nothing in the time frame of this test)
hub.schedule_cleanup(every_x_seconds=60 * 60 * 24) # once a day
def test_hub_cleanup(hub):
# cleanup (does nothing, but tests the path)
hub.cleanup_expired_subscriptions.delay().get()
def test_hub_invalid_input(hub):
with pytest.raises(NotificationError):
hub.send_change_notification.delay('http://unexisting').get()
with pytest.raises(NotificationError):
# URL exists, but it does not have the right Link headers so sending
# out a notification for it will (rightfully) fail.
hub.send_change_notification.delay('http://localhost:5001/ping').get()
def test_subscriber_cleanup(subscriber):
subscriber.cleanup()
def test_hub_manually(hub):
resp = requests.post('http://localhost:5001/hub')
assert resp.status_code == 400
resp2 = requests.post('http://localhost:5001/hub', data={
'hub.mode': 'unknown',
'hub.topic': 'http://example.com',
'hub.callback': 'http://unimportant/',
})
assert resp2.status_code == 400
resp3 = requests.post('http://localhost:5001/hub', data={
'hub.mode': 'subscribe',
'hub.topic': 'http://example.com',
'hub.callback': 'http://unimportant/',
'hub.lease_seconds': 10000000000000000000 # out of bounds
})
assert resp3.status_code == 202
resp4 = requests.post('http://localhost:5001/hub', data={
'hub.mode': 'subscribe',
'hub.topic': 'http://example.com',
'hub.callback': 'http://unimportant/',
'hub.lease_seconds': -10 # impossible
})
assert resp4.status_code == 400
resp5 = requests.post('http://localhost:5001/hub', data={
'hub.mode': 'subscribe',
'hub.topic': 'http://example.com',
'hub.callback': 'http://unimportant/',
'hub.secret': 'X' * 1024, # secret too big
})
assert resp5.status_code == 400
resp6 = requests.post('http://localhost:5001/hub', data={
'hub.mode': 'publish',
# this page does not contain proper Links, so the publish action will
# (eventually) fail.
'hub.topic': 'http://localhost:5001/ping',
})
assert resp6.status_code == 202
def test_subscriber_manually(subscriber):
resp = requests.get('http://localhost:5002/callbacks/unexisting')
assert resp.status_code == 400
resp2 = requests.get('http://localhost:5002/callbacks/unexisting', params={
'hub.mode': 'subscribe',
})
assert resp2.status_code == 404
resp3 = requests.post('http://localhost:5002/callbacks/unexisting')
assert resp3.status_code == 404
resp4 = requests.get('http://localhost:5002/callbacks/unexisting', {
'hub.mode': 'denied',
})
assert resp4.status_code == 404
| 32.763699
| 79
| 0.648793
|
ce7d3d32cd0c08c085e22e1cdb4f2aa8c7db4cd0
| 8,781
|
py
|
Python
|
deep_privacy/engine/hooks/log_hooks.py
|
skoskjei/DP-ATT
|
eb7380099f5c7e533fd0d247456b4a418529d62b
|
[
"MIT"
] | 1,128
|
2019-09-11T01:38:09.000Z
|
2022-03-31T17:06:56.000Z
|
deep_privacy/engine/hooks/log_hooks.py
|
skoskjei/DP-ATT
|
eb7380099f5c7e533fd0d247456b4a418529d62b
|
[
"MIT"
] | 45
|
2019-09-11T05:39:53.000Z
|
2021-12-05T17:52:07.000Z
|
deep_privacy/engine/hooks/log_hooks.py
|
skoskjei/DP-ATT
|
eb7380099f5c7e533fd0d247456b4a418529d62b
|
[
"MIT"
] | 185
|
2019-09-11T02:15:56.000Z
|
2022-03-23T16:12:41.000Z
|
import torch
import logging
import time
from deep_privacy import torch_utils, logger
from deep_privacy.metrics import metric_api
from .base import HookBase, HOOK_REGISTRY
from deep_privacy.inference import infer
try:
from apex import amp
except ImportError:
pass
@HOOK_REGISTRY.register_module
class ImageSaveHook(HookBase):
def __init__(self, ims_per_save: int, n_diverse_samples: int):
self.ims_per_save = ims_per_save
self.next_save_point = self.ims_per_save
self.before_images = None
self._n_diverse_samples = n_diverse_samples
def state_dict(self):
return {
"next_save_point": self.next_save_point,
"before_images": self.before_images}
def load_state_dict(self, state_dict: dict):
self.next_save_point = state_dict["next_save_point"]
self.before_images = state_dict["before_images"]
def after_step(self):
if self.global_step() >= self.next_save_point:
self.next_save_point += self.ims_per_save
self.save_fake_images(True)
self.save_fake_images(False)
def save_fake_images(self, validation: bool):
g = self.trainer.generator
if validation:
g = self.trainer.RA_generator
fake_data, real_data, condition = self.get_images(g)
fake_data = fake_data[:64]
logger.save_images(
"fakes", fake_data, denormalize=True, nrow=8,
log_to_validation=validation)
logger.save_images(
"reals", real_data[:64], denormalize=True, log_to_writer=False,
nrow=8,
log_to_validation=validation)
condition = condition[:64]
logger.save_images(
"condition", condition, log_to_writer=False, denormalize=True,
nrow=8,
log_to_validation=validation)
self.save_images_diverse()
def get_images(self, g):
g.eval()
batch = next(iter(self.trainer.dataloader_val))
z = g.generate_latent_variable(batch["img"]).zero_()
with torch.no_grad():
fake_data_sample = g(**batch,
z=z)
g.train()
return fake_data_sample, batch["img"], batch["condition"]
@torch.no_grad()
def save_images_diverse(self):
"""
Generates images with several latent variables
"""
g = self.trainer.RA_generator
g.eval()
batch = next(iter(self.trainer.dataloader_val))
batch = {k: v[:8] for k, v in batch.items()}
fakes = [batch["condition"].cpu()]
for i in range(self._n_diverse_samples):
z = g.generate_latent_variable(batch["img"])
fake = g(**batch, z=z)
fakes.append(fake.cpu())
fakes = torch.cat(fakes)
logger.save_images(
"diverse", fakes, log_to_validation=True, nrow=8, denormalize=True)
g.train()
def before_extend(self):
transition_value = 1
self.trainer.RA_generator.update_transition_value(
transition_value
)
fake_data, real_data, condition = self.get_images(
self.trainer.RA_generator
)
before_images = [
torch_utils.denormalize_img(x[:8])
for x in [real_data, fake_data, condition]
]
before_images = torch.cat((before_images), dim=0)
self.before_images = before_images.cpu()
def after_extend(self):
transition_value = 0
self.trainer.RA_generator.update_transition_value(
transition_value
)
fake_data, real_data, condition = self.get_images(
self.trainer.RA_generator
)
after_images = [
torch_utils.denormalize_img(x[:8])
for x in [real_data, fake_data, condition]
]
after_images = torch.cat((after_images), dim=0)
after_images = torch.nn.functional.avg_pool2d(after_images, 2)
after_images = after_images.cpu()
assert after_images.shape == self.before_images.shape
diff = self.before_images - after_images
to_save = torch.cat(
(self.before_images, after_images, diff), dim=2)
imsize = after_images.shape[-1]
imname = f"transition/from_{imsize}"
logger.save_images(imname, to_save,
log_to_writer=True, nrow=8 * 3)
self.before_images = None
@HOOK_REGISTRY.register_module
class MetricHook(HookBase):
def __init__(
self,
ims_per_log: int,
fid_batch_size: int,
lpips_batch_size: int,
min_imsize_to_calculate: int):
self.next_check = ims_per_log
self.num_ims_per_fid = ims_per_log
self.lpips_batch_size = lpips_batch_size
self.fid_batch_size = fid_batch_size
self.min_imsize_to_calculate = min_imsize_to_calculate
def state_dict(self):
return {"next_check": self.next_check}
def load_state_dict(self, state_dict: dict):
self.next_check = state_dict["next_check"]
def after_step(self):
if self.global_step() >= self.next_check:
self.next_check += self.num_ims_per_fid
if self.current_imsize() >= self.min_imsize_to_calculate:
self.calculate_fid()
def calculate_fid(self):
logger.info("Starting calculation of FID value")
generator = self.trainer.RA_generator
real_images, fake_images = infer.infer_images(
self.trainer.dataloader_val, generator,
truncation_level=0
)
"""
# Remove FID calculation as holy shit this is expensive.
cfg = self.trainer.cfg
identifier = f"{cfg.dataset_type}_{cfg.data_val.dataset.percentage}_{self.current_imsize()}"
transition_value = self.trainer.RA_generator.transition_value
fid_val = metric_api.fid(
real_images, fake_images,
batch_size=self.fid_batch_size)
logger.log_variable("stats/fid", np.mean(fid_val),
log_level=logging.INFO)
"""
l1 = metric_api.l1(real_images, fake_images)
l2 = metric_api.l1(real_images, fake_images)
psnr = metric_api.psnr(real_images, fake_images)
lpips = metric_api.lpips(
real_images, fake_images, self.lpips_batch_size)
logger.log_variable("stats/l1", l1, log_level=logging.INFO)
logger.log_variable("stats/l2", l2, log_level=logging.INFO)
logger.log_variable("stats/psnr", psnr, log_level=logging.INFO)
logger.log_variable("stats/lpips", lpips, log_level=logging.INFO)
@HOOK_REGISTRY.register_module
class StatsLogger(HookBase):
def __init__(
self,
num_ims_per_log: int):
self.num_ims_per_log = num_ims_per_log
self.next_log_point = self.num_ims_per_log
self.start_time = time.time()
self.num_skipped_steps = 0
def state_dict(self):
return {
"total_time": (time.time() - self.start_time),
"num_skipped_steps": self.num_skipped_steps
}
def load_state_dict(self, state_dict: dict):
self.start_time = time.time() - state_dict["total_time"]
self.num_skipped_steps = state_dict["num_skipped_steps"]
def before_train(self):
self.batch_start_time = time.time()
self.log_dictionary({"stats/batch_size": self.trainer.batch_size()})
def log_dictionary(self, to_log: dict):
logger.log_dictionary(to_log)
def after_step(self):
has_gradient_penalty = "loss/gradient_penalty" in self.to_log
if has_gradient_penalty or self.global_step() >= self.next_log_point:
self.log_stats()
self.log_dictionary(self.to_log)
self.log_loss_scales()
self.next_log_point = self.global_step() + self.num_ims_per_log
def log_stats(self):
time_spent = time.time() - self.batch_start_time
num_steps = self.global_step() - self.next_log_point + self.num_ims_per_log
num_steps = max(num_steps, 1)
nsec_per_img = time_spent / num_steps
total_time = (time.time() - self.start_time) / 60
to_log = {
"stats/nsec_per_img": nsec_per_img,
"stats/batch_size": self.trainer.batch_size(),
"stats/training_time_minutes": total_time,
}
self.batch_start_time = time.time()
self.log_dictionary(to_log)
def log_loss_scales(self):
to_log = {f'amp/loss_scale_{loss_idx}': loss_scaler._loss_scale
for loss_idx, loss_scaler in enumerate(amp._amp_state.loss_scalers)}
to_log['amp/num_skipped_gradients'] = self.num_skipped_steps
self.log_dictionary(to_log)
| 36.435685
| 100
| 0.636602
|
8b465ae186e56ccbeea5de2e6293c729d2cfcd69
| 918
|
py
|
Python
|
examples/cookie_eater.py
|
gitter-badger/zproc
|
b26d39d9c564886c48c84a1e93e616f21f3bece4
|
[
"MIT"
] | null | null | null |
examples/cookie_eater.py
|
gitter-badger/zproc
|
b26d39d9c564886c48c84a1e93e616f21f3bece4
|
[
"MIT"
] | null | null | null |
examples/cookie_eater.py
|
gitter-badger/zproc
|
b26d39d9c564886c48c84a1e93e616f21f3bece4
|
[
"MIT"
] | null | null | null |
"""
Expected output:
<Process pid: 2555 target: <function cookie_eater at 0x7f5b4542c9d8> uuid: e74521ae-76ca-11e8-bd1f-7c7a912e12b5>
<Process pid: 2556 target: <function cookie_baker at 0x7f5b4542c950> uuid: e74521ae-76ca-11e8-bd1f-7c7a912e12b5>
Here's a cookie!
Here's a cookie!
Here's a cookie!
nom nom nom
Here's a cookie!
nom nom nom
Here's a cookie!
nom nom nom
nom nom nom
nom nom nom
"""
import zproc
ctx = zproc.Context(wait=True) # background waits for all processes to finish
ctx.state["cookies"] = 0
@zproc.atomic
def eat_cookie(state):
state["cookies"] -= 1
print("nom nom nom")
@zproc.atomic
def bake_cookie(state):
state["cookies"] += 1
print("Here's a cookie!")
@ctx.call_when_change("cookies")
def cookie_eater(_, state):
eat_cookie(state)
@ctx.process
def cookie_baker(state):
for i in range(5):
bake_cookie(state)
print(cookie_eater)
print(cookie_baker)
| 19.125
| 112
| 0.720044
|
ab7a808e02732afca740c661f84d1e6a4e437435
| 3,966
|
py
|
Python
|
src/azure-cli/azure/cli/command_modules/acs/_completers.py
|
YuanyuanNi/azure-cli
|
63844964374858bfacd209bfe1b69eb456bd64ca
|
[
"MIT"
] | 3,287
|
2016-07-26T17:34:33.000Z
|
2022-03-31T09:52:13.000Z
|
src/azure-cli/azure/cli/command_modules/acs/_completers.py
|
YuanyuanNi/azure-cli
|
63844964374858bfacd209bfe1b69eb456bd64ca
|
[
"MIT"
] | 19,206
|
2016-07-26T07:04:42.000Z
|
2022-03-31T23:57:09.000Z
|
src/azure-cli/azure/cli/command_modules/acs/_completers.py
|
YuanyuanNi/azure-cli
|
63844964374858bfacd209bfe1b69eb456bd64ca
|
[
"MIT"
] | 2,575
|
2016-07-26T06:44:40.000Z
|
2022-03-31T22:56:06.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core.commands.parameters import get_one_of_subscription_locations
from azure.cli.core.decorators import Completer
@Completer
def get_k8s_upgrades_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument
"""Return Kubernetes versions available for upgrading an existing cluster."""
resource_group = getattr(namespace, 'resource_group_name', None)
name = getattr(namespace, 'name', None)
return get_k8s_upgrades(cmd.cli_ctx, resource_group, name) if resource_group and name else None
def get_k8s_upgrades(cli_ctx, resource_group, name):
from ._client_factory import cf_managed_clusters
results = cf_managed_clusters(cli_ctx).get_upgrade_profile(resource_group, name).as_dict()
return results['control_plane_profile']['upgrades']
@Completer
def get_k8s_versions_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument
"""Return Kubernetes versions available for provisioning a new cluster."""
location = _get_location(cmd.cli_ctx, namespace)
return get_k8s_versions(cmd.cli_ctx, location) if location else None
def get_k8s_versions(cli_ctx, location):
"""Return a list of Kubernetes versions available for a new cluster."""
from ._client_factory import cf_container_services
from jmespath import search
results = cf_container_services(cli_ctx).list_orchestrators(location, resource_type='managedClusters').as_dict()
# Flatten all the "orchestrator_version" fields into one array
return search('orchestrators[*].orchestrator_version', results)
@Completer
def get_vm_size_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument
"""Return the intersection of the VM sizes allowed by the ACS SDK with those returned by the Compute Service."""
from azure.mgmt.containerservice.models import ContainerServiceVMSizeTypes
location = _get_location(cmd.cli_ctx, namespace)
result = get_vm_sizes(cmd.cli_ctx, location)
return set(r.name for r in result) & set(c.value for c in ContainerServiceVMSizeTypes)
def get_vm_sizes(cli_ctx, location):
from ._client_factory import cf_compute_service
return cf_compute_service(cli_ctx).virtual_machine_sizes.list(location)
@Completer
def get_ossku_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument
"""Return the list of allowed os-sku values"""
return ["Ubuntu", "CBLMariner"]
def _get_location(cli_ctx, namespace):
"""
Return an Azure location by using an explicit `--location` argument, then by `--resource-group`, and
finally by the subscription if neither argument was provided.
"""
location = None
if getattr(namespace, 'location', None):
location = namespace.location
elif getattr(namespace, 'resource_group_name', None):
location = _get_location_from_resource_group(cli_ctx, namespace.resource_group_name)
if not location:
location = get_one_of_subscription_locations(cli_ctx)
return location
def _get_location_from_resource_group(cli_ctx, resource_group_name):
from ._client_factory import cf_resource_groups
from msrestazure.azure_exceptions import CloudError
try:
rg = cf_resource_groups(cli_ctx).get(resource_group_name)
return rg.location
except CloudError as err:
# Print a warning if the user hit [TAB] but the `--resource-group` argument was incorrect.
# For example: "Warning: Resource group 'bogus' could not be found."
from argcomplete import warn
warn('Warning: {}'.format(err.message))
| 43.108696
| 116
| 0.724155
|
1679bec9f858eed342b9d8427df5229c85a70385
| 7,196
|
py
|
Python
|
tuiuiu/tuiuiucore/rich_text.py
|
caputomarcos/tuiuiu.io
|
d8fb57cf95487e7fe1454b2130ef18acc916da46
|
[
"BSD-3-Clause"
] | 3
|
2019-08-08T09:09:35.000Z
|
2020-12-15T18:04:17.000Z
|
tuiuiu/tuiuiucore/rich_text.py
|
caputomarcos/tuiuiu.io
|
d8fb57cf95487e7fe1454b2130ef18acc916da46
|
[
"BSD-3-Clause"
] | null | null | null |
tuiuiu/tuiuiucore/rich_text.py
|
caputomarcos/tuiuiu.io
|
d8fb57cf95487e7fe1454b2130ef18acc916da46
|
[
"BSD-3-Clause"
] | 1
|
2017-09-09T20:10:40.000Z
|
2017-09-09T20:10:40.000Z
|
from __future__ import absolute_import, unicode_literals
import re # parsing HTML with regexes LIKE A BOSS.
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import escape
from django.utils.safestring import mark_safe
from tuiuiu.tuiuiucore import hooks
from tuiuiu.tuiuiucore.models import Page
from tuiuiu.tuiuiucore.whitelist import Whitelister
# Define a set of 'embed handlers' and 'link handlers'. These handle the translation
# of 'special' HTML elements in rich text - ones which we do not want to include
# verbatim in the DB representation because they embed information which is stored
# elsewhere in the database and is liable to change - from real HTML representation
# to DB representation and back again.
class PageLinkHandler(object):
"""
PageLinkHandler will be invoked whenever we encounter an <a> element in HTML content
with an attribute of data-linktype="page". The resulting element in the database
representation will be:
<a linktype="page" id="42">hello world</a>
"""
@staticmethod
def get_db_attributes(tag):
"""
Given an <a> tag that we've identified as a page link embed (because it has a
data-linktype="page" attribute), return a dict of the attributes we should
have on the resulting <a linktype="page"> element.
"""
return {'id': tag['data-id']}
@staticmethod
def expand_db_attributes(attrs, for_editor):
try:
page = Page.objects.get(id=attrs['id'])
if for_editor:
editor_attrs = 'data-linktype="page" data-id="%d" ' % page.id
parent_page = page.get_parent()
if parent_page:
editor_attrs += 'data-parent-id="%d" ' % parent_page.id
else:
editor_attrs = ''
return '<a %shref="%s">' % (editor_attrs, escape(page.specific.url))
except Page.DoesNotExist:
return "<a>"
EMBED_HANDLERS = {}
LINK_HANDLERS = {
'page': PageLinkHandler,
}
has_loaded_embed_handlers = False
has_loaded_link_handlers = False
def get_embed_handler(embed_type):
global EMBED_HANDLERS, has_loaded_embed_handlers
if not has_loaded_embed_handlers:
for hook in hooks.get_hooks('register_rich_text_embed_handler'):
handler_name, handler = hook()
EMBED_HANDLERS[handler_name] = handler
has_loaded_embed_handlers = True
return EMBED_HANDLERS[embed_type]
def get_link_handler(link_type):
global LINK_HANDLERS, has_loaded_link_handlers
if not has_loaded_link_handlers:
for hook in hooks.get_hooks('register_rich_text_link_handler'):
handler_name, handler = hook()
LINK_HANDLERS[handler_name] = handler
has_loaded_link_handlers = True
return LINK_HANDLERS[link_type]
class DbWhitelister(Whitelister):
"""
A custom whitelisting engine to convert the HTML as returned by the rich text editor
into the pseudo-HTML format stored in the database (in which images, documents and other
linked objects are identified by ID rather than URL):
* implements a 'construct_whitelister_element_rules' hook so that other apps can modify
the whitelist ruleset (e.g. to permit additional HTML elements beyond those in the base
Whitelister module);
* replaces any element with a 'data-embedtype' attribute with an <embed> element, with
attributes supplied by the handler for that type as defined in EMBED_HANDLERS;
* rewrites the attributes of any <a> element with a 'data-linktype' attribute, as
determined by the handler for that type defined in LINK_HANDLERS, while keeping the
element content intact.
"""
has_loaded_custom_whitelist_rules = False
@classmethod
def clean(cls, html):
if not cls.has_loaded_custom_whitelist_rules:
for fn in hooks.get_hooks('construct_whitelister_element_rules'):
cls.element_rules = cls.element_rules.copy()
cls.element_rules.update(fn())
cls.has_loaded_custom_whitelist_rules = True
return super(DbWhitelister, cls).clean(html)
@classmethod
def clean_tag_node(cls, doc, tag):
if 'data-embedtype' in tag.attrs:
embed_type = tag['data-embedtype']
# fetch the appropriate embed handler for this embedtype
embed_handler = get_embed_handler(embed_type)
embed_attrs = embed_handler.get_db_attributes(tag)
embed_attrs['embedtype'] = embed_type
embed_tag = doc.new_tag('embed', **embed_attrs)
embed_tag.can_be_empty_element = True
tag.replace_with(embed_tag)
elif tag.name == 'a' and 'data-linktype' in tag.attrs:
# first, whitelist the contents of this tag
for child in tag.contents:
cls.clean_node(doc, child)
link_type = tag['data-linktype']
link_handler = get_link_handler(link_type)
link_attrs = link_handler.get_db_attributes(tag)
link_attrs['linktype'] = link_type
tag.attrs.clear()
tag.attrs.update(**link_attrs)
else:
if tag.name == 'div':
tag.name = 'p'
super(DbWhitelister, cls).clean_tag_node(doc, tag)
FIND_A_TAG = re.compile(r'<a(\b[^>]*)>')
FIND_EMBED_TAG = re.compile(r'<embed(\b[^>]*)/>')
FIND_ATTRS = re.compile(r'([\w-]+)\="([^"]*)"')
def extract_attrs(attr_string):
"""
helper method to extract tag attributes as a dict. Does not escape HTML entities!
"""
attributes = {}
for name, val in FIND_ATTRS.findall(attr_string):
attributes[name] = val
return attributes
def expand_db_html(html, for_editor=False):
"""
Expand database-representation HTML into proper HTML usable in either
templates or the rich text editor
"""
def replace_a_tag(m):
attrs = extract_attrs(m.group(1))
if 'linktype' not in attrs:
# return unchanged
return m.group(0)
handler = get_link_handler(attrs['linktype'])
return handler.expand_db_attributes(attrs, for_editor)
def replace_embed_tag(m):
attrs = extract_attrs(m.group(1))
handler = get_embed_handler(attrs['embedtype'])
return handler.expand_db_attributes(attrs, for_editor)
html = FIND_A_TAG.sub(replace_a_tag, html)
html = FIND_EMBED_TAG.sub(replace_embed_tag, html)
return html
@python_2_unicode_compatible
class RichText(object):
"""
A custom object used to represent a renderable rich text value.
Provides a 'source' property to access the original source code,
and renders to the front-end HTML rendering.
Used as the native value of a tuiuiucore.blocks.field_block.RichTextBlock.
"""
def __init__(self, source):
self.source = (source or '')
def __html__(self):
return '<div class="rich-text">' + expand_db_html(self.source) + '</div>'
def __str__(self):
return mark_safe(self.__html__())
def __bool__(self):
return bool(self.source)
__nonzero__ = __bool__
| 35.102439
| 93
| 0.672318
|
d4fe9338bad1561eb46b2348c731e892650acf88
| 2,074
|
py
|
Python
|
pysnmp/LIVINGSTON-ROOT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/LIVINGSTON-ROOT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/LIVINGSTON-ROOT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module LIVINGSTON-ROOT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/LIVINGSTON-ROOT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:56:56 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
enterprises, ObjectIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, iso, Bits, Gauge32, Counter32, Integer32, Counter64, NotificationType, IpAddress, Unsigned32, ModuleIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "enterprises", "ObjectIdentity", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "iso", "Bits", "Gauge32", "Counter32", "Integer32", "Counter64", "NotificationType", "IpAddress", "Unsigned32", "ModuleIdentity")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
lucent = MibIdentifier((1, 3, 6, 1, 4, 1, 307))
product = MibIdentifier((1, 3, 6, 1, 4, 1, 307, 1))
lucentPMProduct = MibIdentifier((1, 3, 6, 1, 4, 1, 307, 1, 1))
lucentPMMib = MibIdentifier((1, 3, 6, 1, 4, 1, 307, 1, 2))
lucentPM3 = MibIdentifier((1, 3, 6, 1, 4, 1, 307, 1, 1, 1))
lucentPM4 = MibIdentifier((1, 3, 6, 1, 4, 1, 307, 1, 1, 2))
mibBuilder.exportSymbols("LIVINGSTON-ROOT-MIB", lucent=lucent, lucentPM4=lucentPM4, lucentPMMib=lucentPMMib, lucentPM3=lucentPM3, lucentPMProduct=lucentPMProduct, product=product)
| 98.761905
| 505
| 0.762295
|
37cbe3187457307c4538aa23d8d17a7d58242e2c
| 5,947
|
py
|
Python
|
src/python/turicreate/test/test_graph_compute.py
|
pappasG/turicreate
|
494e313957a6c01333628b182a7d5bc6efea18f8
|
[
"BSD-3-Clause"
] | 2
|
2019-02-08T08:45:27.000Z
|
2020-09-07T05:55:18.000Z
|
src/python/turicreate/test/test_graph_compute.py
|
pappasG/turicreate
|
494e313957a6c01333628b182a7d5bc6efea18f8
|
[
"BSD-3-Clause"
] | 3
|
2022-02-15T04:42:24.000Z
|
2022-03-12T01:05:15.000Z
|
src/python/turicreate/test/test_graph_compute.py
|
pappasG/turicreate
|
494e313957a6c01333628b182a7d5bc6efea18f8
|
[
"BSD-3-Clause"
] | 1
|
2019-11-23T09:47:24.000Z
|
2019-11-23T09:47:24.000Z
|
# -*- coding: utf-8 -*-
# Copyright © 2017 Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can
# be found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
from __future__ import print_function as _
from __future__ import division as _
from __future__ import absolute_import as _
from .. import SGraph, Edge
import unittest
import time
import sys
if sys.version_info.major > 2:
unittest.TestCase.assertItemsEqual = unittest.TestCase.assertCountEqual
def degree_count_fn(source, edge, target, edge_dir, field):
if field is None:
target['in_degree'] += 1
source['out_degree'] += 1
target['all_degree'] += 1
source['all_degree'] += 1
else:
if edge_dir is 'in' or edge_dir is 'all':
target[field] = target[field] + 1
if edge_dir is 'out' or edge_dir is 'all':
source[field] = source[field] + 1
return (source, edge, target)
def exception_fn(source, edge, target):
raise RuntimeError
def return_none_fn(source, edge, target):
return None
def return_pair_fn(source, edge, target):
return (source, target)
class GraphTests(unittest.TestCase):
def test_simple_triple_apply(self):
def identity_fun(src, edge, dst):
return src, edge, dst
nverts = 100
ring_graph = SGraph().add_edges([Edge(i, 0) for i in range(1, nverts)])
ring_graph.vertices['id'] = ring_graph.vertices['__id']
ring_graph.edges['src'] = ring_graph.edges['__src_id']
ring_graph2 = ring_graph.triple_apply(identity_fun, ['id', 'src'])
self.assertSequenceEqual(list(ring_graph2.vertices['id']), list(ring_graph2.vertices['__id']))
self.assertSequenceEqual(list(ring_graph2.edges['src']), list(ring_graph2.edges['__src_id']))
for i in ring_graph.edges['__dst_id']:
self.assertEqual(i, 0)
def test_triple_apply(self):
nverts = 100
ring_graph = SGraph().add_edges([Edge(i, 0) for i in range(1, nverts)])
vdata = ring_graph.get_vertices()
vdata['in_degree'] = 0
vdata['out_degree'] = 0
vdata['all_degree'] = 0
vdata['do_not_touch'] = 0
ring_graph = ring_graph.add_vertices(vdata)
ret = ring_graph.triple_apply(lambda source, edge, target: degree_count_fn(source, edge, target, 'in', 'in_degree'), mutated_fields=['in_degree'], input_fields=['in_degree'])
self.assertItemsEqual(ret.get_fields(), ['__id', '__src_id', '__dst_id', 'in_degree'])
ret = ring_graph.triple_apply(lambda source, edge, target: degree_count_fn(source, edge, target, 'out', 'out_degree'), mutated_fields=['out_degree'], input_fields=['out_degree'])
self.assertItemsEqual(ret.get_fields(), ['__id', '__src_id', '__dst_id', 'out_degree'])
ret = ring_graph.triple_apply(lambda source, edge, target: degree_count_fn(source, edge, target, 'all', 'all_degree'), mutated_fields=['all_degree'], input_fields=['all_degree'])
self.assertItemsEqual(ret.get_fields(), ['__id', '__src_id', '__dst_id', 'all_degree'])
ring_graph = ring_graph.triple_apply(lambda source, edge, target: degree_count_fn(source, edge, target, 'all', None), ['in_degree', 'out_degree', 'all_degree'])
self.assertItemsEqual(ring_graph.get_fields(), ['__id', '__src_id', '__dst_id', 'in_degree', 'out_degree', 'all_degree', 'do_not_touch'])
vdata = ring_graph.get_vertices()
for v in vdata:
if (v['__id'] == 0):
self.assertEqual(v['in_degree'], nverts - 1)
self.assertEqual(v['out_degree'], 0)
else:
self.assertEqual(v['in_degree'], 0)
self.assertEqual(v['out_degree'], 1)
self.assertEqual(v['all_degree'], (v['in_degree'] + v['out_degree']))
# test lambda that changes fields that are not in the mutate_fields
ring_graph = ring_graph.triple_apply(lambda source, edge, target: degree_count_fn(source, edge, target, 'all', 'do_not_touch'), mutated_fields=['in_degree'])
vdata = ring_graph.get_vertices()
for v in vdata:
self.assertEqual(v['do_not_touch'], 0)
self.assertEqual(v['all_degree'], (v['in_degree'] + v['out_degree']))
# test change edge data
ring_graph.edges['src_id'] = 0
ring_graph.edges['dst_id'] = 0
def edge_update_fn(source, edge, target):
edge['src_id'] = source['__id']
edge['dst_id'] = target['__id']
return (source, edge, target)
ring_graph = ring_graph.triple_apply(edge_update_fn, mutated_fields=['src_id', 'dst_id'])
edata = ring_graph.get_edges()
for e in edata:
self.assertEqual(e['__src_id'], e['src_id'])
self.assertEqual(e['__dst_id'], e['dst_id'])
# test exception in lambda
self.assertRaises(RuntimeError, lambda: ring_graph.triple_apply(exception_fn, mutated_fields=['in_degree']))
# test lambda that does not return a tuple of dicts
self.assertRaises(RuntimeError, lambda: ring_graph.triple_apply(return_none_fn, mutated_fields=['in_degree']))
self.assertRaises(RuntimeError, lambda: ring_graph.triple_apply(return_pair_fn, mutated_fields=['in_degree']))
# test api input validation
self.assertRaises(TypeError, lambda: ring_graph.triple_apply(exception_fn, mutated_fields=None))
self.assertRaises(TypeError, lambda: ring_graph.triple_apply(exception_fn, mutated_fields=['in_degree'], input_fields={'a': 'b'}))
self.assertRaises(ValueError, lambda: ring_graph.triple_apply(exception_fn, mutated_fields=[]))
self.assertRaises(ValueError, lambda: ring_graph.triple_apply(exception_fn, mutated_fields=['field_not_exist']))
self.assertRaises(ValueError, lambda: ring_graph.triple_apply(exception_fn, mutated_fields=['__id']))
| 49.14876
| 186
| 0.666555
|
d2525117447840d95af45ecad78f35910c0b7622
| 2,076
|
py
|
Python
|
sms_post.py
|
kanazux/dev-test
|
c87084e1ed590a2058bbe5103e993fc7cf875d19
|
[
"BSD-2-Clause"
] | null | null | null |
sms_post.py
|
kanazux/dev-test
|
c87084e1ed590a2058bbe5103e993fc7cf875d19
|
[
"BSD-2-Clause"
] | null | null | null |
sms_post.py
|
kanazux/dev-test
|
c87084e1ed590a2058bbe5103e993fc7cf875d19
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import json
import base64
import binascii
from flask import g, abort, Flask, jsonify, request
ddd = ['11', '12', '13', '14', '15', '16', '17', '18', '19', '21', '22', '24', '27', '28', '31', '32', '33', '34', '35',
'37', '38', '41', '42', '43', '44', '45', '46', '47', '48', '49', '51', '53', '54', '55', '61', '62', '63', '64',
'65', '66', '67', '68', '69', '71', '73', '74', '75', '77', '79', '81', '82', '83', '84', '85', '86', '87', '88',
'89', '91', '92', '93', '94', '95', '96', '97', '98', '99']
def validate_phone(phone):
"""Validate phone numbers."""
if len(phone) < 12 or len(phone) > 13 or not phone.startswith("55") or phone[2:4] not in ddd:
return jsonify({"ERROR": {f"{phone}": "Is not a valid phone number."}})
return True
def validate_user(user):
"""Validate user hash."""
if isinstance(user, (str)):
user = user.encode()
try:
base64.binascii.a2b_base64(user)
return True
except binascii.Error:
return jsonify({"ERROR": {f"{user}": "Is not a valid base64 hash."}})
def validate_text(text):
"""Validate text message."""
if len(text) < 3 or len(text) > 140:
return jsonify({"ERROR": {f"{text}": "Text is great than 140 characters or less than 3"}})
return True
app = Flask(__name__)
@app.route("/smspost", methods=["POST"])
def smspost():
"""Get data to send sms."""
try:
errors = {}
data = json.loads(request.data)
validate_phone(data['to'])
validate_phone(data['from'])
validate_user(data['user'])
validate_text(data['text'])
except json.decoder.JSONDecodeError:
return jsonify({"ERROR": "This data is not a json format."})
finally:
error = sys.exc_info()[0]
if error == None:
jsonify({"ERROR": "DATA is empty."})
return jsonify({"ERROR": f"{error}"})
return jsonify({"Status": "Send"})
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5001, debug=True)
| 30.529412
| 120
| 0.542871
|
4daa59d8264fd1915b960e58a9735f9477df0b17
| 841
|
py
|
Python
|
neighbours/urls.py
|
Janice-M/fireNation
|
dfd37d96bfbb4cc18a5bd68b9fabd592a17a8e20
|
[
"MIT"
] | null | null | null |
neighbours/urls.py
|
Janice-M/fireNation
|
dfd37d96bfbb4cc18a5bd68b9fabd592a17a8e20
|
[
"MIT"
] | 9
|
2021-03-19T01:23:55.000Z
|
2022-03-11T23:59:05.000Z
|
neighbours/urls.py
|
Janice-M/fireNation
|
dfd37d96bfbb4cc18a5bd68b9fabd592a17a8e20
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from django.conf.urls.static import static
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.homepage, name='homepage'),
url(r'^add/hood$', views.add_hood, name='add_hood'),
url(r'^join_hood/(\d+)', views.join_hood, name='join_hood'),
url(r'^leave_hood/(\d+)', views.leave_hood, name='leave_hood'),
url(r'^add/biz$', views.add_biz, name='add_biz'),
url(r'^add/post$', views.add_post, name='add_post'),
url(r'^search_results/', views.search_results, name='search_results'),
url(r'^user/(?P<username>\w+)', views.user_profile, name='user_profile'),
url(r'^new/profile$', views.add_profile, name='add_profile'),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
| 35.041667
| 77
| 0.674197
|
109983852c1ee58fdf1617223678b405cb41ced0
| 1,896
|
py
|
Python
|
perma_web/perma/settings/utils/environmental_settings.py
|
leppert/perma
|
adb0cec29679c3d161d72330e19114f89f8c42ac
|
[
"MIT",
"Unlicense"
] | null | null | null |
perma_web/perma/settings/utils/environmental_settings.py
|
leppert/perma
|
adb0cec29679c3d161d72330e19114f89f8c42ac
|
[
"MIT",
"Unlicense"
] | null | null | null |
perma_web/perma/settings/utils/environmental_settings.py
|
leppert/perma
|
adb0cec29679c3d161d72330e19114f89f8c42ac
|
[
"MIT",
"Unlicense"
] | null | null | null |
### environment settings overrides ###
# this is included by __init__.py
import os
# this lets us set values from the environment, like
# export DJANGO__SECRET_KEY=foo
# export DJANGO__INT__SITE_ID=1
# export DJANGO__DATABASES__default__NAME=perma
# export DJANGO__MIRRORS__0='http://127.0.0.1:8001'
def import_environmental_settings(settings):
for key, value in os.environ.iteritems():
if key.startswith("DJANGO__"):
try:
path = key.split('__')[1:]
if path[0] == 'INT':
# convert to int if second piece of path is 'INT'
value = int(value)
path = path[1:]
elif value=='True':
# convert to boolean
value=True
elif value=='False':
value=False
# starting with global settings, walk down the tree to find the intended value
target = settings
while len(path) > 1:
try:
# if it's an int, treat it as an array index
path[0] = int(path[0])
while len(target)<=path[0]:
target += [{}]
except ValueError:
# otherwise it's a dict key
if not path[0] in target:
target[path[0]] = {}
target = target[path.pop(0)]
# set value
try:
path[0] = int(path[0])
while len(target) <= path[0]:
target += [{}]
except ValueError:
pass
target[path[0]] = value
except Exception as e:
print "WARNING: Can't import environmental setting %s: %s" % (key, e)
| 37.92
| 94
| 0.46308
|
69d6307df69c55193a2684ffbfef28b4acb4d3d1
| 1,052
|
py
|
Python
|
python_practice/breadth_first_search.py
|
fourier11/interview
|
66aba2b91705e3018004915e6da9210a72846cc3
|
[
"MIT"
] | null | null | null |
python_practice/breadth_first_search.py
|
fourier11/interview
|
66aba2b91705e3018004915e6da9210a72846cc3
|
[
"MIT"
] | null | null | null |
python_practice/breadth_first_search.py
|
fourier11/interview
|
66aba2b91705e3018004915e6da9210a72846cc3
|
[
"MIT"
] | null | null | null |
from collections import deque
'''
广度优先算法,判断你的朋友名字是否以m结尾
广度优先算法可以用于找到最短路径
'''
graph = {}
graph["you"] = ["alice", "bob", "claire"]
graph["bob"] = ["anuj", "peggy"]
graph["alice"] = ["peggy"]
graph["claire"] = ["thom", "jonny"]
graph["anuj"] = []
graph["peggy"] = []
graph["thom"] = []
graph["jonny"] = []
def search(name):
search_queue = deque()
search_queue += graph[name]
# This array is how you keep track of which people you've searched before.用来打标记的
searched = []
while search_queue:
person = search_queue.popleft()
# Only search this person if you haven't already searched them.
if person not in searched:
if person_is_seller(person):
print(person + " is a mango seller!")
return True
else:
search_queue += graph[person]
# Marks this person as searched
searched.append(person)
return False
def person_is_seller(name):
return name[-1] == 'm'
if __name__ == "__main__":
search("you")
| 25.658537
| 84
| 0.588403
|
37bd1d9d0e190d23b4bf8c850195240f5e9847fc
| 5,595
|
py
|
Python
|
electrum_mona/gui/kivy/uix/dialogs/lightning_open_channel.py
|
david4neblio/electrum-mona
|
2d13b066be2d6205aeaa7ca859884c3ec1b92e83
|
[
"MIT"
] | 2
|
2019-12-27T09:13:48.000Z
|
2020-09-18T14:10:48.000Z
|
electrum_mona/gui/kivy/uix/dialogs/lightning_open_channel.py
|
david4neblio/electrum-mona
|
2d13b066be2d6205aeaa7ca859884c3ec1b92e83
|
[
"MIT"
] | 2
|
2020-07-31T20:14:43.000Z
|
2021-10-17T02:33:41.000Z
|
electrum_mona/gui/kivy/uix/dialogs/lightning_open_channel.py
|
david4neblio/electrum-mona
|
2d13b066be2d6205aeaa7ca859884c3ec1b92e83
|
[
"MIT"
] | 3
|
2020-03-08T19:40:17.000Z
|
2021-11-10T21:41:11.000Z
|
from kivy.lang import Builder
from kivy.factory import Factory
from electrum_mona.gui.kivy.i18n import _
from electrum_mona.lnaddr import lndecode
from electrum_mona.util import bh2u
from electrum_mona.bitcoin import COIN
import electrum_mona.simple_config as config
from .label_dialog import LabelDialog
Builder.load_string('''
<LightningOpenChannelDialog@Popup>
id: s
name: 'lightning_open_channel'
title: _('Open Lightning Channel')
pubkey: ''
amount: ''
ipport: ''
BoxLayout
spacing: '12dp'
padding: '12dp'
orientation: 'vertical'
SendReceiveBlueBottom:
id: blue_bottom
size_hint: 1, None
height: self.minimum_height
BoxLayout:
size_hint: 1, None
height: blue_bottom.item_height
Image:
source: 'atlas://electrum_mona/gui/kivy/theming/light/globe'
size_hint: None, None
size: '22dp', '22dp'
pos_hint: {'center_y': .5}
BlueButton:
text: s.pubkey if s.pubkey else _('Node ID')
shorten: True
#CardSeparator:
# color: blue_bottom.foreground_color
#BoxLayout:
# size_hint: 1, None
# height: blue_bottom.item_height
# Image:
# source: 'atlas://electrum_mona/gui/kivy/theming/light/network'
# size_hint: None, None
# size: '22dp', '22dp'
# pos_hint: {'center_y': .5}
# BlueButton:
# text: s.ipport if s.ipport else _('host:port')
# on_release: s.ipport_dialog()
CardSeparator:
color: blue_bottom.foreground_color
BoxLayout:
size_hint: 1, None
height: blue_bottom.item_height
Image:
source: 'atlas://electrum_mona/gui/kivy/theming/light/calculator'
size_hint: None, None
size: '22dp', '22dp'
pos_hint: {'center_y': .5}
BlueButton:
text: s.amount if s.amount else _('Amount')
on_release: app.amount_dialog(s, True)
BoxLayout:
size_hint: 1, None
IconButton:
icon: 'atlas://electrum_mona/gui/kivy/theming/light/copy'
size_hint: 0.5, None
height: '48dp'
on_release: s.do_paste()
IconButton:
icon: 'atlas://electrum_mona/gui/kivy/theming/light/camera'
size_hint: 0.5, None
height: '48dp'
on_release: app.scan_qr(on_complete=s.on_qr)
Button:
text: _('Suggest')
size_hint: 1, None
height: '48dp'
on_release: s.choose_node()
Button:
text: _('Open')
size_hint: 1, None
height: '48dp'
on_release: s.open_channel()
Widget:
size_hint: 1, 1
''')
class LightningOpenChannelDialog(Factory.Popup):
def ipport_dialog(self):
def callback(text):
self.ipport = text
d = LabelDialog(_('IP/port in format:\n[host]:[port]'), self.ipport, callback)
d.open()
def choose_node(self):
suggested = self.app.wallet.lnworker.suggest_peer()
if suggested:
self.pubkey = suggested.hex()
def __init__(self, app, lnaddr=None, msg=None):
super(LightningOpenChannelDialog, self).__init__()
self.app = app
self.lnaddr = lnaddr
self.msg = msg
def open(self, *args, **kwargs):
super(LightningOpenChannelDialog, self).open(*args, **kwargs)
if self.lnaddr:
fee = self.app.electrum_config.fee_per_kb()
if not fee:
fee = config.FEERATE_FALLBACK_STATIC_FEE
self.amount = self.app.format_amount_and_units(self.lnaddr.amount * COIN + fee * 2)
self.pubkey = bh2u(self.lnaddr.pubkey.serialize())
if self.msg:
self.app.show_info(self.msg)
def do_paste(self):
contents = self.app._clipboard.paste()
if not contents:
self.app.show_info(_("Clipboard is empty"))
return
self.pubkey = contents
def on_qr(self, conn_str):
self.pubkey = conn_str
def open_channel(self):
if not self.pubkey or not self.amount:
self.app.show_info(_('All fields must be filled out'))
return
conn_str = self.pubkey
if self.ipport:
conn_str += '@' + self.ipport.strip()
amount = self.app.get_amount(self.amount)
self.app.protected('Enter PIN to create a new channel', self.do_open_channel, (conn_str, amount))
self.dismiss()
def do_open_channel(self, conn_str, amount, password):
try:
chan = self.app.wallet.lnworker.open_channel(conn_str, amount, 0, password=password)
except Exception as e:
self.app.show_error(_('Problem opening channel: ') + '\n' + repr(e))
return
n = chan.constraints.funding_txn_minimum_depth
message = '\n'.join([
_('Channel established.'),
_('Remote peer ID') + ':' + chan.node_id.hex(),
_('This channel will be usable after {} confirmations').format(n)
])
self.app.show_info(message)
| 36.809211
| 105
| 0.546917
|
0a667e41152fd8c9ba2383b27f91dd4687ca1ed0
| 2,876
|
py
|
Python
|
users/migrations/0001_initial.py
|
boussier/CCPSMV_dashboard
|
5a0ac667713f7fd9e3e93c2a83e3da6f3657323f
|
[
"MIT"
] | 1
|
2021-04-21T11:05:15.000Z
|
2021-04-21T11:05:15.000Z
|
users/migrations/0001_initial.py
|
boussier/django_rest_api
|
fb0b25896370f59d33caf51d5852793e0c94d43c
|
[
"MIT"
] | null | null | null |
users/migrations/0001_initial.py
|
boussier/django_rest_api
|
fb0b25896370f59d33caf51d5852793e0c94d43c
|
[
"MIT"
] | 2
|
2022-03-10T21:14:13.000Z
|
2022-03-15T14:06:45.000Z
|
# Generated by Django 3.1.4 on 2020-12-10 11:49
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='CustomUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| 63.911111
| 329
| 0.664117
|
00008c8370b8df483a4c73eb81be1fb950b97c32
| 2,530
|
py
|
Python
|
gdsfactory/tests/test_import_gds_settings.py
|
simbilod/gdsfactory
|
4d76db32674c3edb4d16260e3177ee29ef9ce11d
|
[
"MIT"
] | null | null | null |
gdsfactory/tests/test_import_gds_settings.py
|
simbilod/gdsfactory
|
4d76db32674c3edb4d16260e3177ee29ef9ce11d
|
[
"MIT"
] | null | null | null |
gdsfactory/tests/test_import_gds_settings.py
|
simbilod/gdsfactory
|
4d76db32674c3edb4d16260e3177ee29ef9ce11d
|
[
"MIT"
] | null | null | null |
from typing import Any, Dict, List, Union
from gdsfactory.components import cells
skip_test = {
"version_stamp",
"extend_ports_list",
"extend_port",
"grating_coupler_tree",
"compensation_path",
"spiral_inner_io_with_gratings",
"component_sequence",
"straight_heater_metal_90_90",
"straight_heater_metal_undercut_90_90",
"mzi_phase_shifter_top_heater_metal",
}
components_to_test = set(cells.keys()) - skip_test
def tuplify(iterable: Union[List, Dict]) -> Any:
"""From a list or tuple returns a tuple."""
if isinstance(iterable, list):
return tuple(map(tuplify, iterable))
if isinstance(iterable, dict):
return {k: tuplify(v) for k, v in iterable.items()}
return iterable
def sort_dict(d: Dict[str, Any]) -> Dict[str, Any]:
return {k: d[k] for k in sorted(d)}
# @pytest.mark.parametrize("component_type", components_to_test)
# def test_properties_components(component_type: str) -> Component:
# """Write component to GDS with setttings written on a label.
# Then import the GDS and check that the settings imported match the original settings.
# """
# cnew = gf.Component()
# c1 = factory[component_type]()
# c1ref = cnew << c1
# add_settings_label(cnew, reference=c1ref)
# gdspath = cnew.write_gds_with_metadata()
# c2 = import_gds(gdspath)
# add_settings_from_label(c2)
# c1s = sort_dict(tuplify(OmegaConf.to_container(c1.settings.full)))
# c2s = sort_dict(tuplify(OmegaConf.to_container(c2.settings.full)))
# # c1s.pop("info")
# # c2s.pop("info")
# # c1s.pop("changed")
# # c2s.pop("changed")
# d = diff(c1s, c2s)
# # print(c1s)
# print(c2s)
# print(d)
# assert len(d) == 0, f"imported settings are different from original {d}"
# return c2
pass
# c = test_properties_components(component_type=list(component_names)[0])
# c = test_properties_components(component_type="ring_single")
# c = test_properties_components(component_type="mzit")
# c = test_properties_components(component_type="bezier")
# c = test_properties_components(component_type="wire_straight")
# c = test_properties_components(component_type="straight")
# c = test_properties_components(component_type="grating_coupler_tree")
# c = test_properties_components(component_type="wire")
# c = test_properties_components(component_type="bend_circular")
# c = test_properties_components(component_type="mzi_arm")
# c = test_properties_components(component_type="straight_pin")
# c.show()
| 32.435897
| 91
| 0.709881
|
1082a6dfcf6dc6bcec8340eb74b238999870f989
| 10,339
|
py
|
Python
|
odk_logger/migrations/0031_auto__add_field_xform_last_submission_time.py
|
curenamo/ssmreleva
|
6e50b8819713c81f42a638dfbe9aa37bd943400f
|
[
"BSD-2-Clause"
] | 123
|
2015-01-08T09:21:05.000Z
|
2021-11-14T19:45:23.000Z
|
odk_logger/migrations/0031_auto__add_field_xform_last_submission_time.py
|
cybernetics/formhub
|
578fc2c5e9febe8dc68b37f7d2e85a76dc2c4c04
|
[
"BSD-2-Clause"
] | 16
|
2015-02-13T16:56:42.000Z
|
2021-02-20T23:58:43.000Z
|
odk_logger/migrations/0031_auto__add_field_xform_last_submission_time.py
|
cybernetics/formhub
|
578fc2c5e9febe8dc68b37f7d2e85a76dc2c4c04
|
[
"BSD-2-Clause"
] | 110
|
2015-01-19T14:34:06.000Z
|
2021-02-01T14:55:11.000Z
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'XForm.last_submission_time'
db.add_column(u'odk_logger_xform', 'last_submission_time',
self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'XForm.last_submission_time'
db.delete_column(u'odk_logger_xform', 'last_submission_time')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'odk_logger.attachment': {
'Meta': {'object_name': 'Attachment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attachments'", 'to': "orm['odk_logger.Instance']"}),
'media_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'mimetype': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'})
},
'odk_logger.instance': {
'Meta': {'object_name': 'Instance'},
'date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'submitted_via_web'", 'max_length': '20'}),
'survey_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['odk_logger.SurveyType']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': u"orm['auth.User']"}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '249'}),
'xform': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': "orm['odk_logger.XForm']"}),
'xml': ('django.db.models.fields.TextField', [], {})
},
'odk_logger.instancehistory': {
'Meta': {'object_name': 'InstanceHistory'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '249'}),
'xform_instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'submission_history'", 'to': "orm['odk_logger.Instance']"}),
'xml': ('django.db.models.fields.TextField', [], {})
},
'odk_logger.surveytype': {
'Meta': {'object_name': 'SurveyType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'odk_logger.xform': {
'Meta': {'ordering': "('id_string',)", 'unique_together': "(('user', 'id_string'), ('user', 'sms_id_string'))", 'object_name': 'XForm'},
'allows_sms': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'bamboo_dataset': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '60'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'null': 'True'}),
'downloadable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'encrypted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_start_time': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'id_string': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'is_crowd_form': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'json': ('django.db.models.fields.TextField', [], {'default': "u''"}),
'last_submission_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'num_of_submissions': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'shared': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shared_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'sms_id_string': ('django.db.models.fields.SlugField', [], {'default': "''", 'max_length': '50'}),
'surveys_with_geopoints': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'xforms'", 'null': 'True', 'to': u"orm['auth.User']"}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '32'}),
'xls': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True'}),
'xml': ('django.db.models.fields.TextField', [], {})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['odk_logger']
| 75.467153
| 187
| 0.565238
|
cf8e9d6c526580bbbd85e6c83b9f9f5196453799
| 17,818
|
py
|
Python
|
poetry/mixology/version_solver.py
|
colour-science/poetry
|
f0855d984d9b0b38657c8b8522d3cfa36280b47c
|
[
"MIT"
] | 2
|
2019-06-19T15:07:58.000Z
|
2019-11-24T14:08:55.000Z
|
poetry/mixology/version_solver.py
|
djetelina/poetry
|
1aa1ab2962bb8b6aed33c2308cf8352809d91685
|
[
"MIT"
] | 1
|
2021-02-15T17:05:58.000Z
|
2021-02-15T17:05:58.000Z
|
poetry/mixology/version_solver.py
|
djetelina/poetry
|
1aa1ab2962bb8b6aed33c2308cf8352809d91685
|
[
"MIT"
] | 1
|
2019-06-19T15:08:05.000Z
|
2019-06-19T15:08:05.000Z
|
# -*- coding: utf-8 -*-
import time
from typing import Dict
from typing import List
from typing import Union
from poetry.packages import Dependency
from poetry.packages import ProjectPackage
from poetry.packages import Package
from poetry.puzzle.provider import Provider
from poetry.semver import Version
from poetry.semver import VersionRange
from .failure import SolveFailure
from .incompatibility import Incompatibility
from .incompatibility_cause import ConflictCause
from .incompatibility_cause import NoVersionsCause
from .incompatibility_cause import PackageNotFoundCause
from .incompatibility_cause import RootCause
from .partial_solution import PartialSolution
from .result import SolverResult
from .set_relation import SetRelation
from .term import Term
_conflict = object()
class VersionSolver:
"""
The version solver that finds a set of package versions that satisfy the
root package's dependencies.
See https://github.com/dart-lang/pub/tree/master/doc/solver.md for details
on how this solver works.
"""
def __init__(
self,
root, # type: ProjectPackage
provider, # type: Provider
locked=None, # type: Dict[str, Package]
use_latest=None, # type: List[str]
):
self._root = root
self._provider = provider
self._locked = locked or {}
if use_latest is None:
use_latest = []
self._use_latest = use_latest
self._incompatibilities = {} # type: Dict[str, List[Incompatibility]]
self._solution = PartialSolution()
@property
def solution(self): # type: () -> PartialSolution
return self._solution
def solve(self): # type: () -> SolverResult
"""
Finds a set of dependencies that match the root package's constraints,
or raises an error if no such set is available.
"""
start = time.time()
root_dependency = Dependency(self._root.name, self._root.version)
root_dependency.is_root = True
self._add_incompatibility(
Incompatibility([Term(root_dependency, False)], RootCause())
)
try:
next = self._root.name
while next is not None:
self._propagate(next)
next = self._choose_package_version()
return self._result()
except Exception:
raise
finally:
self._log(
"Version solving took {:.3f} seconds.\n"
"Tried {} solutions.".format(
time.time() - start, self._solution.attempted_solutions
)
)
def _propagate(self, package): # type: (str) -> None
"""
Performs unit propagation on incompatibilities transitively
related to package to derive new assignments for _solution.
"""
changed = set()
changed.add(package)
while changed:
package = changed.pop()
# Iterate in reverse because conflict resolution tends to produce more
# general incompatibilities as time goes on. If we look at those first,
# we can derive stronger assignments sooner and more eagerly find
# conflicts.
for incompatibility in reversed(self._incompatibilities[package]):
result = self._propagate_incompatibility(incompatibility)
if result is _conflict:
# If the incompatibility is satisfied by the solution, we use
# _resolve_conflict() to determine the root cause of the conflict as a
# new incompatibility.
#
# It also backjumps to a point in the solution
# where that incompatibility will allow us to derive new assignments
# that avoid the conflict.
root_cause = self._resolve_conflict(incompatibility)
# Back jumping erases all the assignments we did at the previous
# decision level, so we clear [changed] and refill it with the
# newly-propagated assignment.
changed.clear()
changed.add(str(self._propagate_incompatibility(root_cause)))
break
elif result is not None:
changed.add(result)
def _propagate_incompatibility(
self, incompatibility
): # type: (Incompatibility) -> Union[str, _conflict, None]
"""
If incompatibility is almost satisfied by _solution, adds the
negation of the unsatisfied term to _solution.
If incompatibility is satisfied by _solution, returns _conflict. If
incompatibility is almost satisfied by _solution, returns the
unsatisfied term's package name.
Otherwise, returns None.
"""
# The first entry in incompatibility.terms that's not yet satisfied by
# _solution, if one exists. If we find more than one, _solution is
# inconclusive for incompatibility and we can't deduce anything.
unsatisfied = None
for term in incompatibility.terms:
relation = self._solution.relation(term)
if relation == SetRelation.DISJOINT:
# If term is already contradicted by _solution, then
# incompatibility is contradicted as well and there's nothing new we
# can deduce from it.
return
elif relation == SetRelation.OVERLAPPING:
# If more than one term is inconclusive, we can't deduce anything about
# incompatibility.
if unsatisfied is not None:
return
# If exactly one term in incompatibility is inconclusive, then it's
# almost satisfied and [term] is the unsatisfied term. We can add the
# inverse of the term to _solution.
unsatisfied = term
# If *all* terms in incompatibility are satisfied by _solution, then
# incompatibility is satisfied and we have a conflict.
if unsatisfied is None:
return _conflict
self._log(
"derived: {}{}".format(
"not " if unsatisfied.is_positive() else "", unsatisfied.dependency
)
)
self._solution.derive(
unsatisfied.dependency, not unsatisfied.is_positive(), incompatibility
)
return unsatisfied.dependency.name
def _resolve_conflict(
self, incompatibility
): # type: (Incompatibility) -> Incompatibility
"""
Given an incompatibility that's satisfied by _solution,
The `conflict resolution`_ constructs a new incompatibility that encapsulates the root
cause of the conflict and backtracks _solution until the new
incompatibility will allow _propagate() to deduce new assignments.
Adds the new incompatibility to _incompatibilities and returns it.
.. _conflict resolution: https://github.com/dart-lang/pub/tree/master/doc/solver.md#conflict-resolution
"""
self._log("conflict: {}".format(incompatibility))
new_incompatibility = False
while not incompatibility.is_failure():
# The term in incompatibility.terms that was most recently satisfied by
# _solution.
most_recent_term = None
# The earliest assignment in _solution such that incompatibility is
# satisfied by _solution up to and including this assignment.
most_recent_satisfier = None
# The difference between most_recent_satisfier and most_recent_term;
# that is, the versions that are allowed by most_recent_satisfier and not
# by most_recent_term. This is None if most_recent_satisfier totally
# satisfies most_recent_term.
difference = None
# The decision level of the earliest assignment in _solution *before*
# most_recent_satisfier such that incompatibility is satisfied by
# _solution up to and including this assignment plus
# most_recent_satisfier.
#
# Decision level 1 is the level where the root package was selected. It's
# safe to go back to decision level 0, but stopping at 1 tends to produce
# better error messages, because references to the root package end up
# closer to the final conclusion that no solution exists.
previous_satisfier_level = 1
for term in incompatibility.terms:
satisfier = self._solution.satisfier(term)
if most_recent_satisfier is None:
most_recent_term = term
most_recent_satisfier = satisfier
elif most_recent_satisfier.index < satisfier.index:
previous_satisfier_level = max(
previous_satisfier_level, most_recent_satisfier.decision_level
)
most_recent_term = term
most_recent_satisfier = satisfier
difference = None
else:
previous_satisfier_level = max(
previous_satisfier_level, satisfier.decision_level
)
if most_recent_term == term:
# If most_recent_satisfier doesn't satisfy most_recent_term on its
# own, then the next-most-recent satisfier may be the one that
# satisfies the remainder.
difference = most_recent_satisfier.difference(most_recent_term)
if difference is not None:
previous_satisfier_level = max(
previous_satisfier_level,
self._solution.satisfier(difference.inverse).decision_level,
)
# If most_recent_identifier is the only satisfier left at its decision
# level, or if it has no cause (indicating that it's a decision rather
# than a derivation), then incompatibility is the root cause. We then
# backjump to previous_satisfier_level, where incompatibility is
# guaranteed to allow _propagate to produce more assignments.
if (
previous_satisfier_level < most_recent_satisfier.decision_level
or most_recent_satisfier.cause is None
):
self._solution.backtrack(previous_satisfier_level)
if new_incompatibility:
self._add_incompatibility(incompatibility)
return incompatibility
# Create a new incompatibility by combining incompatibility with the
# incompatibility that caused most_recent_satisfier to be assigned. Doing
# this iteratively constructs an incompatibility that's guaranteed to be
# true (that is, we know for sure no solution will satisfy the
# incompatibility) while also approximating the intuitive notion of the
# "root cause" of the conflict.
new_terms = []
for term in incompatibility.terms:
if term != most_recent_term:
new_terms.append(term)
for term in most_recent_satisfier.cause.terms:
if term.dependency != most_recent_satisfier.dependency:
new_terms.append(term)
# The most_recent_satisfier may not satisfy most_recent_term on its own
# if there are a collection of constraints on most_recent_term that
# only satisfy it together. For example, if most_recent_term is
# `foo ^1.0.0` and _solution contains `[foo >=1.0.0,
# foo <2.0.0]`, then most_recent_satisfier will be `foo <2.0.0` even
# though it doesn't totally satisfy `foo ^1.0.0`.
#
# In this case, we add `not (most_recent_satisfier \ most_recent_term)` to
# the incompatibility as well, See the `algorithm documentation`_ for
# details.
#
# .. _algorithm documentation: https://github.com/dart-lang/pub/tree/master/doc/solver.md#conflict-resolution
if difference is not None:
new_terms.append(difference.inverse)
incompatibility = Incompatibility(
new_terms, ConflictCause(incompatibility, most_recent_satisfier.cause)
)
new_incompatibility = True
partially = "" if difference is None else " partially"
bang = "!"
self._log(
"{} {} is{} satisfied by {}".format(
bang, most_recent_term, partially, most_recent_satisfier
)
)
self._log(
'{} which is caused by "{}"'.format(bang, most_recent_satisfier.cause)
)
self._log("{} thus: {}".format(bang, incompatibility))
raise SolveFailure(incompatibility)
def _choose_package_version(self): # type: () -> Union[str, None]
"""
Tries to select a version of a required package.
Returns the name of the package whose incompatibilities should be
propagated by _propagate(), or None indicating that version solving is
complete and a solution has been found.
"""
unsatisfied = self._solution.unsatisfied
if not unsatisfied:
return
# Prefer packages with as few remaining versions as possible,
# so that if a conflict is necessary it's forced quickly.
def _get_min(dependency):
if dependency.name in self._use_latest:
# If we're forced to use the latest version of a package, it effectively
# only has one version to choose from.
return 1
if dependency.name in self._locked:
return 1
try:
return len(self._provider.search_for(dependency))
except ValueError:
return 0
if len(unsatisfied) == 1:
dependency = unsatisfied[0]
else:
dependency = min(*unsatisfied, key=_get_min)
locked = self._get_locked(dependency.name)
if locked is None or not dependency.constraint.allows(locked.version):
try:
packages = self._provider.search_for(dependency)
except ValueError as e:
self._add_incompatibility(
Incompatibility([Term(dependency, True)], PackageNotFoundCause(e))
)
return dependency.name
try:
version = packages[0]
except IndexError:
version = None
else:
version = locked
if version is None:
# If there are no versions that satisfy the constraint,
# add an incompatibility that indicates that.
self._add_incompatibility(
Incompatibility([Term(dependency, True)], NoVersionsCause())
)
return dependency.name
version = self._provider.complete_package(version)
conflict = False
for incompatibility in self._provider.incompatibilities_for(version):
self._add_incompatibility(incompatibility)
# If an incompatibility is already satisfied, then selecting version
# would cause a conflict.
#
# We'll continue adding its dependencies, then go back to
# unit propagation which will guide us to choose a better version.
conflict = conflict or all(
[
term.dependency.name == dependency.name
or self._solution.satisfies(term)
for term in incompatibility.terms
]
)
if not conflict:
self._solution.decide(version)
self._log(
"selecting {} ({})".format(version.name, version.full_pretty_version)
)
return dependency.name
def _excludes_single_version(self, constraint): # type: (Any) -> bool
return isinstance(VersionRange().difference(constraint), Version)
def _result(self): # type: () -> SolverResult
"""
Creates a #SolverResult from the decisions in _solution
"""
decisions = self._solution.decisions
return SolverResult(
self._root,
[p for p in decisions if not p.is_root()],
self._solution.attempted_solutions,
)
def _add_incompatibility(self, incompatibility): # type: (Incompatibility) -> None
self._log("fact: {}".format(incompatibility))
for term in incompatibility.terms:
if term.dependency.name not in self._incompatibilities:
self._incompatibilities[term.dependency.name] = []
if incompatibility in self._incompatibilities[term.dependency.name]:
continue
self._incompatibilities[term.dependency.name].append(incompatibility)
def _get_locked(self, package_name): # type: (str) -> Union[Package, None]
if package_name in self._use_latest:
return
locked = self._locked.get(package_name)
if not locked:
return
for dep in self._root.all_requires:
if dep.name == locked.name:
locked.requires_extras = dep.extras
return locked
def _log(self, text):
self._provider.debug(text, self._solution.attempted_solutions)
| 39.861298
| 121
| 0.606521
|
5aac1cfb2a107202013b2247c44955cb8b765a39
| 2,986
|
py
|
Python
|
tests/test_config_flow.py
|
mchwalisz/home-assistant-senec
|
b15843daaefdef34e9f774956eaa4c01b9169fcb
|
[
"Apache-2.0"
] | 10
|
2021-02-25T12:26:58.000Z
|
2022-03-01T07:32:46.000Z
|
tests/test_config_flow.py
|
mchwalisz/home-assistant-senec
|
b15843daaefdef34e9f774956eaa4c01b9169fcb
|
[
"Apache-2.0"
] | 16
|
2020-09-18T19:29:15.000Z
|
2022-03-09T11:23:47.000Z
|
tests/test_config_flow.py
|
mchwalisz/home-assistant-senec
|
b15843daaefdef34e9f774956eaa4c01b9169fcb
|
[
"Apache-2.0"
] | 10
|
2020-10-01T20:59:31.000Z
|
2022-03-01T07:32:19.000Z
|
"""Test the senec config flow."""
from homeassistant import config_entries, setup
from homeassistant.components.senec.config_flow import CannotConnect, InvalidAuth
from homeassistant.components.senec.const import DOMAIN
from tests.async_mock import patch
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.senec.config_flow.PlaceholderHub.authenticate",
return_value=True,
), patch(
"homeassistant.components.senec.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.senec.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
assert result2["type"] == "create_entry"
assert result2["title"] == "Name of the device"
assert result2["data"] == {
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.senec.config_flow.PlaceholderHub.authenticate",
side_effect=InvalidAuth,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.senec.config_flow.PlaceholderHub.authenticate",
side_effect=CannotConnect,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
| 32.456522
| 81
| 0.616209
|
20476db7a51b053fd3d80fc08b74a0f7f6755876
| 474
|
py
|
Python
|
fixture/creator.py
|
Lucas1985/kurs_pythona_new
|
552f1ceec2ff1427dd6ff9d1c69ab5a892271682
|
[
"Apache-2.0"
] | null | null | null |
fixture/creator.py
|
Lucas1985/kurs_pythona_new
|
552f1ceec2ff1427dd6ff9d1c69ab5a892271682
|
[
"Apache-2.0"
] | null | null | null |
fixture/creator.py
|
Lucas1985/kurs_pythona_new
|
552f1ceec2ff1427dd6ff9d1c69ab5a892271682
|
[
"Apache-2.0"
] | null | null | null |
class CreatorHelper:
def __init(self, app):
self.app = app
def change_field_value(self, field_value, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_value).click()
wd.find_element_by_name(field_value).clear()
wd.find_element_by_name(field_value).send_keys(text)
def select_first_checkbox(self):
wd = self.app.wd
wd.find_element_by_name("selected[]").click()
| 29.625
| 64
| 0.64557
|
f980b8dc1de1f91d5b1dd6f67fd1771681224c64
| 1,164
|
py
|
Python
|
NYRP/migrations/0019_questionbug.py
|
WalterSchaertl/NYRP
|
bd9554fba80ed11f9c8efbc6c19b5a5cb987e3b6
|
[
"MIT"
] | 1
|
2018-09-27T01:44:48.000Z
|
2018-09-27T01:44:48.000Z
|
NYRP/migrations/0019_questionbug.py
|
WalterSchaertl/NYRP
|
bd9554fba80ed11f9c8efbc6c19b5a5cb987e3b6
|
[
"MIT"
] | 5
|
2021-04-08T18:23:14.000Z
|
2021-09-22T17:37:53.000Z
|
NYRP/migrations/0019_questionbug.py
|
WalterSchaertl/NYRP
|
bd9554fba80ed11f9c8efbc6c19b5a5cb987e3b6
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-09-24 01:14
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('NYRP', '0018_auto_20170919_1313'),
]
operations = [
migrations.CreateModel(
name='QuestionBug',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bug_choices', models.CharField(choices=[('1', "This question doesn't belong in this topic."), ('2', "This question doesn't belong in this unit."), ('3', 'The given answer is incorrect.'), ('4', 'There is a formatting/typo error in the question.'), ('5', 'Other (please specify below).')], default='5', max_length=1)),
('description', models.TextField()),
('time', models.DateTimeField(blank=True, default=datetime.datetime.now)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='NYRP.Question')),
],
),
]
| 41.571429
| 335
| 0.628007
|
c63896c89730d94778a16854a79d972b459605d2
| 1,412
|
py
|
Python
|
cfg/caro_file_prep.py
|
a1247418/MT18_LH_human-sleep-classification
|
c4a40571390aaa14b1cc8a458100e21252fe05d2
|
[
"MIT"
] | null | null | null |
cfg/caro_file_prep.py
|
a1247418/MT18_LH_human-sleep-classification
|
c4a40571390aaa14b1cc8a458100e21252fe05d2
|
[
"MIT"
] | null | null | null |
cfg/caro_file_prep.py
|
a1247418/MT18_LH_human-sleep-classification
|
c4a40571390aaa14b1cc8a458100e21252fe05d2
|
[
"MIT"
] | null | null | null |
# Use the following to get rid of files with the old filter
# find -type f -regextype posix-extended ! -iregex '.*_newfilt\.mat$' -delete
#
# This script is used to unify the naming convention of the sleep recording files
import os
data_dir = "/cluster/scratch/llorenz/data/caro_new/"
files = [f for f in os.listdir(data_dir) if
os.path.isfile(os.path.join(data_dir, f))]
cnt = 0
for file in files:
if "_corr" in file:
# Use corrected files
os.rename(data_dir + file, data_dir + file.replace("_corr", ""))
file = file.replace("_corr","")
if "V_ML" in file:
# unify in verum naming scheme
os.rename(data_dir + file, data_dir + file.replace("V_ML", "S_ML").replace("_N","_N00"))
file = file.replace("V_ML","S_ML").replace("_N","_N00")
if not file.startswith("WESA"):
new_name = "_".join(prt for prt in file.split("_") if not "msco" in prt)
new_name = "WESA_" + new_name
new_name = new_name.replace("_ML", "_N1S_ML" if "msco1" in file else "_N2S_ML")
new_name = new_name.replace("_newfilt", "")
os.rename(data_dir+file, data_dir+new_name)
print(file,"--->",new_name)
cnt += 1
else:
new_name = file.replace("_newfilt", "")
os.rename(data_dir+file, data_dir+new_name)
print(file,"--->",new_name)
cnt += 1
print("Done.", f"Renamed {cnt} files.")
| 35.3
| 96
| 0.616856
|
2132401b72e9f377e3b3e301afce0dfea030998b
| 272
|
py
|
Python
|
web/impact/impact/views/api_registration_view.py
|
masschallenge/impact-api
|
81075ced8fcc95de9390dd83c15e523e67fc48c0
|
[
"MIT"
] | 5
|
2017-10-19T15:11:52.000Z
|
2020-03-08T07:16:21.000Z
|
web/impact/impact/views/api_registration_view.py
|
masschallenge/impact-api
|
81075ced8fcc95de9390dd83c15e523e67fc48c0
|
[
"MIT"
] | 182
|
2017-06-21T19:32:13.000Z
|
2021-03-22T13:38:16.000Z
|
web/impact/impact/views/api_registration_view.py
|
masschallenge/impact-api
|
81075ced8fcc95de9390dd83c15e523e67fc48c0
|
[
"MIT"
] | 1
|
2018-06-23T11:53:18.000Z
|
2018-06-23T11:53:18.000Z
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.urls import reverse
from registration.backends.simple.views import RegistrationView
class APIRegistrationView(RegistrationView):
def get_success_url(self, user):
return reverse('api-root')
| 22.666667
| 63
| 0.772059
|
cf852852484ccf979e059170505d800849d97458
| 1,807
|
py
|
Python
|
app/doll/models/doll.py
|
Younlab/GFS-Backend
|
06bd2d14bc1e3226a458089fb99496516273f296
|
[
"MIT"
] | 2
|
2019-03-03T10:59:55.000Z
|
2019-03-03T11:00:07.000Z
|
app/doll/models/doll.py
|
Younlab/GFS-Backend
|
06bd2d14bc1e3226a458089fb99496516273f296
|
[
"MIT"
] | null | null | null |
app/doll/models/doll.py
|
Younlab/GFS-Backend
|
06bd2d14bc1e3226a458089fb99496516273f296
|
[
"MIT"
] | null | null | null |
from django.contrib.postgres.fields import ArrayField
from django.db import models
__all__ = (
'Doll',
'Status',
)
class Doll(models.Model):
id = models.PositiveIntegerField(unique=True, primary_key=True)
code_name = models.CharField(max_length=50)
rank = models.PositiveSmallIntegerField()
type = models.CharField(max_length=10)
build_time = models.PositiveIntegerField()
grow = models.PositiveIntegerField()
image = models.ImageField(upload_to='doll_image')
image_d = models.ImageField(upload_to='doll_image_d')
obtain = ArrayField(
ArrayField(
models.PositiveSmallIntegerField()
),
)
slot_01 = ArrayField(
ArrayField(
models.CharField(max_length=20, blank=True, null=True),
),
)
slot_02 = ArrayField(
ArrayField(
models.CharField(max_length=20, blank=True, null=True),
),
)
slot_03 = ArrayField(
ArrayField(
models.CharField(max_length=20, blank=True, null=True),
),
)
def __str__(self):
return self.code_name
class Status(models.Model):
doll = models.ForeignKey(
'Doll',
on_delete=models.CASCADE,
)
hp = models.PositiveIntegerField()
pow = models.PositiveIntegerField()
hit = models.PositiveIntegerField()
dodge = models.PositiveIntegerField()
rate = models.PositiveIntegerField()
armor_piercing = models.PositiveIntegerField()
critical_harm_rate = models.PositiveIntegerField()
critical_percent = models.PositiveIntegerField()
bullet = models.PositiveIntegerField()
speed = models.PositiveIntegerField()
night_view = models.PositiveIntegerField()
armor = models.PositiveIntegerField()
class Meta:
ordering = ['id']
| 28.234375
| 67
| 0.667405
|
c268d77d12b97290f021d9f26570c7008a4777aa
| 10,236
|
py
|
Python
|
torchani/utils.py
|
eric-erki/ccurate-Neural-Network-Potential-on-PyTorch-
|
e1c78efe0e8b471f6f666d5806b0f6ee529d0cc0
|
[
"MIT"
] | null | null | null |
torchani/utils.py
|
eric-erki/ccurate-Neural-Network-Potential-on-PyTorch-
|
e1c78efe0e8b471f6f666d5806b0f6ee529d0cc0
|
[
"MIT"
] | null | null | null |
torchani/utils.py
|
eric-erki/ccurate-Neural-Network-Potential-on-PyTorch-
|
e1c78efe0e8b471f6f666d5806b0f6ee529d0cc0
|
[
"MIT"
] | null | null | null |
import torch
import torch.utils.data
import math
from collections import defaultdict
def pad(species):
"""Put different species together into single tensor.
If the species are from molecules of different number of total atoms, then
ghost atoms with atom type -1 will be added to make it fit into the same
shape.
Arguments:
species (:class:`collections.abc.Sequence`): sequence of species.
Species must be of shape ``(N, A)``, where ``N`` is the number of
3D structures, ``A`` is the number of atoms.
Returns:
:class:`torch.Tensor`: species batched together.
"""
max_atoms = max([s.shape[1] for s in species])
padded_species = []
for s in species:
natoms = s.shape[1]
if natoms < max_atoms:
padding = torch.full((s.shape[0], max_atoms - natoms), -1,
dtype=torch.long, device=s.device)
s = torch.cat([s, padding], dim=1)
padded_species.append(s)
return torch.cat(padded_species)
def pad_atomic_properties(atomic_properties, padding_values=defaultdict(lambda: 0.0, species=-1)):
"""Put a sequence of atomic properties together into single tensor.
Inputs are `[{'species': ..., ...}, {'species': ..., ...}, ...]` and the outputs
are `{'species': padded_tensor, ...}`
Arguments:
species_coordinates (:class:`collections.abc.Sequence`): sequence of
atomic properties.
padding_values (dict): the value to fill to pad tensors to same size
"""
keys = list(atomic_properties[0])
anykey = keys[0]
max_atoms = max(x[anykey].shape[1] for x in atomic_properties)
padded = {k: [] for k in keys}
for p in atomic_properties:
num_molecules = max(v.shape[0] for v in p.values())
for k, v in p.items():
shape = list(v.shape)
padatoms = max_atoms - shape[1]
shape[1] = padatoms
padding = v.new_full(shape, padding_values[k])
v = torch.cat([v, padding], dim=1)
if v.shape[0] < num_molecules:
shape = list(v.shape)
shape[0] = num_molecules
v = v.expand(*shape)
padded[k].append(v)
return {k: torch.cat(v) for k, v in padded.items()}
# @torch.jit.script
def present_species(species):
"""Given a vector of species of atoms, compute the unique species present.
Arguments:
species (:class:`torch.Tensor`): 1D vector of shape ``(atoms,)``
Returns:
:class:`torch.Tensor`: 1D vector storing present atom types sorted.
"""
# present_species, _ = species.flatten()._unique(sorted=True)
present_species = species.flatten().unique(sorted=True)
if present_species[0].item() == -1:
present_species = present_species[1:]
return present_species
def strip_redundant_padding(atomic_properties):
"""Strip trailing padding atoms.
Arguments:
atomic_properties (dict): properties to strip
Returns:
dict: same set of properties with redundant padding atoms stripped.
"""
species = atomic_properties['species']
non_padding = (species >= 0).any(dim=0).nonzero().squeeze()
for k in atomic_properties:
atomic_properties[k] = atomic_properties[k].index_select(1, non_padding)
return atomic_properties
def map2central(cell, coordinates, pbc):
"""Map atoms outside the unit cell into the cell using PBC.
Arguments:
cell (:class:`torch.Tensor`): tensor of shape (3, 3) of the three
vectors defining unit cell:
.. code-block:: python
tensor([[x1, y1, z1],
[x2, y2, z2],
[x3, y3, z3]])
coordinates (:class:`torch.Tensor`): Tensor of shape
``(molecules, atoms, 3)``.
pbc (:class:`torch.Tensor`): boolean vector of size 3 storing
if pbc is enabled for that direction.
Returns:
:class:`torch.Tensor`: coordinates of atoms mapped back to unit cell.
"""
# Step 1: convert coordinates from standard cartesian coordinate to unit
# cell coordinates
inv_cell = torch.inverse(cell)
coordinates_cell = torch.matmul(coordinates, inv_cell)
# Step 2: wrap cell coordinates into [0, 1)
coordinates_cell -= coordinates_cell.floor() * pbc.to(coordinates_cell.dtype)
# Step 3: convert from cell coordinates back to standard cartesian
# coordinate
return torch.matmul(coordinates_cell, cell)
class EnergyShifter(torch.nn.Module):
"""Helper class for adding and subtracting self atomic energies
This is a subclass of :class:`torch.nn.Module`, so it can be used directly
in a pipeline as ``[input->AEVComputer->ANIModel->EnergyShifter->output]``.
Arguments:
self_energies (:class:`collections.abc.Sequence`): Sequence of floating
numbers for the self energy of each atom type. The numbers should
be in order, i.e. ``self_energies[i]`` should be atom type ``i``.
"""
def __init__(self, self_energies):
super(EnergyShifter, self).__init__()
self_energies = torch.tensor(self_energies, dtype=torch.double)
self.register_buffer('self_energies', self_energies)
def sae(self, species):
"""Compute self energies for molecules.
Padding atoms will be automatically excluded.
Arguments:
species (:class:`torch.Tensor`): Long tensor in shape
``(conformations, atoms)``.
Returns:
:class:`torch.Tensor`: 1D vector in shape ``(conformations,)``
for molecular self energies.
"""
self_energies = self.self_energies[species]
self_energies[species == -1] = 0
return self_energies.sum(dim=1)
def subtract_from_dataset(self, atomic_properties, properties):
"""Transformer for :class:`torchani.data.BatchedANIDataset` that
subtract self energies.
"""
species = atomic_properties['species']
energies = properties['energies']
device = energies.device
energies = energies.to(torch.double) - self.sae(species).to(device)
properties['energies'] = energies
return atomic_properties, properties
def forward(self, species_energies):
"""(species, molecular energies)->(species, molecular energies + sae)
"""
species, energies = species_energies
sae = self.sae(species).to(energies.dtype).to(energies.device)
return species, energies + sae
class ChemicalSymbolsToInts:
"""Helper that can be called to convert chemical symbol string to integers
Arguments:
all_species (:class:`collections.abc.Sequence` of :class:`str`):
sequence of all supported species, in order.
"""
def __init__(self, all_species):
self.rev_species = {}
for i, s in enumerate(all_species):
self.rev_species[s] = i
def __call__(self, species):
"""Convert species from squence of strings to 1D tensor"""
rev = [self.rev_species[s] for s in species]
return torch.tensor(rev, dtype=torch.long)
def hessian(coordinates, energies=None, forces=None):
"""Compute analytical hessian from the energy graph or force graph.
Arguments:
coordinates (:class:`torch.Tensor`): Tensor of shape `(molecules, atoms, 3)`
energies (:class:`torch.Tensor`): Tensor of shape `(molecules,)`, if specified,
then `forces` must be `None`. This energies must be computed from
`coordinates` in a graph.
forces (:class:`torch.Tensor`): Tensor of shape `(molecules, atoms, 3)`, if specified,
then `energies` must be `None`. This forces must be computed from
`coordinates` in a graph.
Returns:
:class:`torch.Tensor`: Tensor of shape `(molecules, 3A, 3A)` where A is the number of
atoms in each molecule
"""
if energies is None and forces is None:
raise ValueError('Energies or forces must be specified')
if energies is not None and forces is not None:
raise ValueError('Energies or forces can not be specified at the same time')
if forces is None:
forces = -torch.autograd.grad(energies.sum(), coordinates, create_graph=True)[0]
flattened_force = forces.flatten(start_dim=1)
force_components = flattened_force.unbind(dim=1)
return -torch.stack([
torch.autograd.grad(f.sum(), coordinates, retain_graph=True)[0].flatten(start_dim=1)
for f in force_components
], dim=1)
def vibrational_analysis(masses, hessian, unit='cm^-1'):
"""Computing the vibrational wavenumbers from hessian."""
if unit != 'cm^-1':
raise ValueError('Only cm^-1 are supported right now')
assert hessian.shape[0] == 1, 'Currently only supporting computing one molecule a time'
# Solving the eigenvalue problem: Hq = w^2 * T q
# where H is the Hessian matrix, q is the normal coordinates,
# T = diag(m1, m1, m1, m2, m2, m2, ....) is the mass
# We solve this eigenvalue problem through Lowdin diagnolization:
# Hq = w^2 * Tq ==> Hq = w^2 * T^(1/2) T^(1/2) q
# Letting q' = T^(1/2) q, we then have
# T^(-1/2) H T^(-1/2) q' = w^2 * q'
inv_sqrt_mass = (1 / masses.sqrt()).repeat_interleave(3, dim=1) # shape (molecule, 3 * atoms)
mass_scaled_hessian = hessian * inv_sqrt_mass.unsqueeze(1) * inv_sqrt_mass.unsqueeze(2)
if mass_scaled_hessian.shape[0] != 1:
raise ValueError('The input should contain only one molecule')
mass_scaled_hessian = mass_scaled_hessian.squeeze(0)
eigenvalues, eigenvectors = torch.symeig(mass_scaled_hessian, eigenvectors=True)
angular_frequencies = eigenvalues.sqrt()
frequencies = angular_frequencies / (2 * math.pi)
# converting from sqrt(hartree / (amu * angstrom^2)) to cm^-1
wavenumbers = frequencies * 17092
modes = (eigenvectors.t() * inv_sqrt_mass).reshape(frequencies.numel(), -1, 3)
return wavenumbers, modes
__all__ = ['pad', 'pad_atomic_properties', 'present_species', 'hessian',
'vibrational_analysis', 'strip_redundant_padding',
'ChemicalSymbolsToInts']
| 39.068702
| 98
| 0.643318
|
5a36f92307c079d0b7a4b47be54f9c73e25f9cd6
| 562
|
py
|
Python
|
photos/tests/test_views.py
|
phacic/photos_album
|
1b67798284988322b10bcc29acfba6784d068aca
|
[
"Apache-2.0"
] | null | null | null |
photos/tests/test_views.py
|
phacic/photos_album
|
1b67798284988322b10bcc29acfba6784d068aca
|
[
"Apache-2.0"
] | null | null | null |
photos/tests/test_views.py
|
phacic/photos_album
|
1b67798284988322b10bcc29acfba6784d068aca
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import reverse
from faker import Faker
fake = Faker()
class TestPhoto:
def test_create_photo(self, create_image, auth_api_client, user_someone):
client = auth_api_client(user_someone)
with open(create_image.name, 'rb') as im:
url = reverse('photo-list')
data = {
"title": fake.name(),
"image": im
}
resp = client.post(path=url, data=data, format='multipart')
resp_data = resp.json()
assert resp.status_code == 201
| 24.434783
| 77
| 0.576512
|
2d1c5f82b41cf4b0491433cd0441af4154510624
| 309
|
py
|
Python
|
py/codechef/challenge2015/input/F_CaseGenerator.py
|
shhuan/algorithms
|
2830c7e2ada8dfd3dcdda7c06846116d4f944a27
|
[
"MIT"
] | null | null | null |
py/codechef/challenge2015/input/F_CaseGenerator.py
|
shhuan/algorithms
|
2830c7e2ada8dfd3dcdda7c06846116d4f944a27
|
[
"MIT"
] | null | null | null |
py/codechef/challenge2015/input/F_CaseGenerator.py
|
shhuan/algorithms
|
2830c7e2ada8dfd3dcdda7c06846116d4f944a27
|
[
"MIT"
] | 1
|
2022-03-09T04:52:55.000Z
|
2022-03-09T04:52:55.000Z
|
# -*- coding: utf-8 -*-
"""
created by huash at 2015-05-17 11:30
"""
__author__ = 'huash'
import sys
import os
import random
sys.stdout = open('sampleF-gen.txt', 'w')
caseCount = 100
print(caseCount)
for ci in range(caseCount):
N = random.randint(1, 10)
K = random.randint(1, 4)
print(N, K)
| 14.045455
| 41
| 0.634304
|
381e29283d417d73875a65eda0c9016fc5e28b6d
| 7,625
|
py
|
Python
|
pandas/tools/util.py
|
sandbox/pandas
|
fd5471208244ae1cb9cb426d6aa02ab408cfacba
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause"
] | 1
|
2019-04-28T13:48:34.000Z
|
2019-04-28T13:48:34.000Z
|
pandas/tools/util.py
|
sandbox/pandas
|
fd5471208244ae1cb9cb426d6aa02ab408cfacba
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause"
] | null | null | null |
pandas/tools/util.py
|
sandbox/pandas
|
fd5471208244ae1cb9cb426d6aa02ab408cfacba
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause"
] | 1
|
2019-04-28T13:48:35.000Z
|
2019-04-28T13:48:35.000Z
|
import numpy as np
import pandas.lib as lib
from pandas.types.common import (is_number,
is_numeric_dtype,
is_datetime_or_timedelta_dtype,
is_list_like,
_ensure_object,
is_decimal,
is_scalar as isscalar)
from pandas.types.cast import _possibly_downcast_to_dtype
import pandas as pd
from pandas.compat import reduce
from pandas.core.index import Index
from pandas.core import common as com
def match(needles, haystack):
haystack = Index(haystack)
needles = Index(needles)
return haystack.get_indexer(needles)
def cartesian_product(X):
"""
Numpy version of itertools.product or pandas.compat.product.
Sometimes faster (for large inputs)...
Parameters
----------
X : list-like of list-likes
Returns
-------
product : list of ndarrays
Examples
--------
>>> cartesian_product([list('ABC'), [1, 2]])
[array(['A', 'A', 'B', 'B', 'C', 'C'], dtype='|S1'),
array([1, 2, 1, 2, 1, 2])]
See also
--------
itertools.product : Cartesian product of input iterables. Equivalent to
nested for-loops.
pandas.compat.product : An alias for itertools.product.
"""
msg = "Input must be a list-like of list-likes"
if not is_list_like(X):
raise TypeError(msg)
for x in X:
if not is_list_like(x):
raise TypeError(msg)
if len(X) == 0:
return []
lenX = np.fromiter((len(x) for x in X), dtype=int)
cumprodX = np.cumproduct(lenX)
a = np.roll(cumprodX, 1)
a[0] = 1
if cumprodX[-1] != 0:
b = cumprodX[-1] / cumprodX
else:
# if any factor is empty, the cartesian product is empty
b = np.zeros_like(cumprodX)
return [np.tile(np.repeat(np.asarray(com._values_from_object(x)), b[i]),
np.product(a[i]))
for i, x in enumerate(X)]
def _compose2(f, g):
"""Compose 2 callables"""
return lambda *args, **kwargs: f(g(*args, **kwargs))
def compose(*funcs):
"""Compose 2 or more callables"""
assert len(funcs) > 1, 'At least 2 callables must be passed to compose'
return reduce(_compose2, funcs)
def to_numeric(arg, errors='raise', downcast=None):
"""
Convert argument to a numeric type.
Parameters
----------
arg : list, tuple, 1-d array, or Series
errors : {'ignore', 'raise', 'coerce'}, default 'raise'
- If 'raise', then invalid parsing will raise an exception
- If 'coerce', then invalid parsing will be set as NaN
- If 'ignore', then invalid parsing will return the input
downcast : {'integer', 'signed', 'unsigned', 'float'} , default None
If not None, and if the data has been successfully cast to a
numerical dtype (or if the data was numeric to begin with),
downcast that resulting data to the smallest numerical dtype
possible according to the following rules:
- 'integer' or 'signed': smallest signed int dtype (min.: np.int8)
- 'unsigned': smallest unsigned int dtype (min.: np.uint8)
- 'float': smallest float dtype (min.: np.float32)
As this behaviour is separate from the core conversion to
numeric values, any errors raised during the downcasting
will be surfaced regardless of the value of the 'errors' input.
In addition, downcasting will only occur if the size
of the resulting data's dtype is strictly larger than
the dtype it is to be cast to, so if none of the dtypes
checked satisfy that specification, no downcasting will be
performed on the data.
.. versionadded:: 0.19.0
Returns
-------
ret : numeric if parsing succeeded.
Return type depends on input. Series if Series, otherwise ndarray
Examples
--------
Take separate series and convert to numeric, coercing when told to
>>> import pandas as pd
>>> s = pd.Series(['1.0', '2', -3])
>>> pd.to_numeric(s)
0 1.0
1 2.0
2 -3.0
dtype: float64
>>> pd.to_numeric(s, downcast='float')
0 1.0
1 2.0
2 -3.0
dtype: float32
>>> pd.to_numeric(s, downcast='signed')
0 1
1 2
2 -3
dtype: int8
>>> s = pd.Series(['apple', '1.0', '2', -3])
>>> pd.to_numeric(s, errors='ignore')
0 apple
1 1.0
2 2
3 -3
dtype: object
>>> pd.to_numeric(s, errors='coerce')
0 NaN
1 1.0
2 2.0
3 -3.0
dtype: float64
"""
if downcast not in (None, 'integer', 'signed', 'unsigned', 'float'):
raise ValueError('invalid downcasting method provided')
is_series = False
is_index = False
is_scalar = False
if isinstance(arg, pd.Series):
is_series = True
values = arg.values
elif isinstance(arg, pd.Index):
is_index = True
values = arg.asi8
if values is None:
values = arg.values
elif isinstance(arg, (list, tuple)):
values = np.array(arg, dtype='O')
elif isscalar(arg):
if is_decimal(arg):
return float(arg)
if is_number(arg):
return arg
is_scalar = True
values = np.array([arg], dtype='O')
elif getattr(arg, 'ndim', 1) > 1:
raise TypeError('arg must be a list, tuple, 1-d array, or Series')
else:
values = arg
try:
if is_numeric_dtype(values):
pass
elif is_datetime_or_timedelta_dtype(values):
values = values.astype(np.int64)
else:
values = _ensure_object(values)
coerce_numeric = False if errors in ('ignore', 'raise') else True
values = lib.maybe_convert_numeric(values, set(),
coerce_numeric=coerce_numeric)
except Exception:
if errors == 'raise':
raise
# attempt downcast only if the data has been successfully converted
# to a numerical dtype and if a downcast method has been specified
if downcast is not None and is_numeric_dtype(values):
typecodes = None
if downcast in ('integer', 'signed'):
typecodes = np.typecodes['Integer']
elif downcast == 'unsigned' and np.min(values) >= 0:
typecodes = np.typecodes['UnsignedInteger']
elif downcast == 'float':
typecodes = np.typecodes['Float']
# pandas support goes only to np.float32,
# as float dtypes smaller than that are
# extremely rare and not well supported
float_32_char = np.dtype(np.float32).char
float_32_ind = typecodes.index(float_32_char)
typecodes = typecodes[float_32_ind:]
if typecodes is not None:
# from smallest to largest
for dtype in typecodes:
if np.dtype(dtype).itemsize <= values.dtype.itemsize:
values = _possibly_downcast_to_dtype(
values, dtype)
# successful conversion
if values.dtype == dtype:
break
if is_series:
return pd.Series(values, index=arg.index, name=arg.name)
elif is_index:
# because we want to coerce to numeric if possible,
# do not use _shallow_copy_with_infer
return Index(values, name=arg.name)
elif is_scalar:
return values[0]
else:
return values
| 30.995935
| 77
| 0.580066
|
ac4ed5cb7896345e483c2d63ce7f637a86bd1df9
| 611
|
py
|
Python
|
data/multiphysics_preliminary/plank_10_slice/create_mgxs.py
|
gwenchee/2021-chee-prelim
|
e28fae5f64ab4a4464d73b4cc42cb5c767754e5a
|
[
"BSD-3-Clause"
] | null | null | null |
data/multiphysics_preliminary/plank_10_slice/create_mgxs.py
|
gwenchee/2021-chee-prelim
|
e28fae5f64ab4a4464d73b4cc42cb5c767754e5a
|
[
"BSD-3-Clause"
] | null | null | null |
data/multiphysics_preliminary/plank_10_slice/create_mgxs.py
|
gwenchee/2021-chee-prelim
|
e28fae5f64ab4a4464d73b4cc42cb5c767754e5a
|
[
"BSD-3-Clause"
] | null | null | null |
import openmc
from openmc_file import *
sp = openmc.StatePoint("statepoint.80.h5", autolink=False)
su = openmc.Summary("summary.h5")
sp.link_with_summary(su)
mgxs_lib.load_from_statepoint(sp)
mgxs_file = mgxs_lib.create_mg_library(
xs_type="macro",
xsdata_names=[
"bounds",
"graphite1",
"graphite2",
"prism_cell_1",
"prism_cell_2",
"prism_cell_3",
"prism_cell_4",
"prism_cell_5",
"prism_cell_6",
"prism_cell_7",
"prism_cell_8",
"prism_cell_9",
"prism_cell_10",
],
)
mgxs_file.export_to_hdf5()
| 21.821429
| 58
| 0.620295
|
b457b1d851928af344a2878e37e36d9db4aa7e77
| 10,501
|
py
|
Python
|
src/ethereum/frontier/vm/instructions/system.py
|
LaudateCorpus1/execution-specs
|
1f2e9f925ffd7352da74408dfaa823c67cf469ff
|
[
"CC0-1.0"
] | null | null | null |
src/ethereum/frontier/vm/instructions/system.py
|
LaudateCorpus1/execution-specs
|
1f2e9f925ffd7352da74408dfaa823c67cf469ff
|
[
"CC0-1.0"
] | 1
|
2021-09-12T07:33:49.000Z
|
2021-09-12T07:33:49.000Z
|
src/ethereum/frontier/vm/instructions/system.py
|
LaudateCorpus1/execution-specs
|
1f2e9f925ffd7352da74408dfaa823c67cf469ff
|
[
"CC0-1.0"
] | null | null | null |
"""
Ethereum Virtual Machine (EVM) System Instructions
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. contents:: Table of Contents
:backlinks: none
:local:
Introduction
------------
Implementations of the EVM system related instructions.
"""
from ethereum.base_types import U256, Bytes0, Uint
from ethereum.utils.safe_arithmetic import u256_safe_add
from ...state import (
account_has_code_or_nonce,
get_account,
increment_nonce,
set_account_balance,
)
from ...utils.address import compute_contract_address, to_address
from ...vm.error import OutOfGasError
from .. import Evm, Message
from ..gas import (
GAS_CREATE,
GAS_ZERO,
calculate_call_gas_cost,
calculate_gas_extend_memory,
calculate_message_call_gas_stipend,
subtract_gas,
)
from ..memory import extend_memory, memory_read_bytes, memory_write
from ..stack import pop, push
def create(evm: Evm) -> None:
"""
Creates a new account with associated code.
Parameters
----------
evm :
The current EVM frame.
"""
# This import causes a circular import error
# if it's not moved inside this method
from ...vm.interpreter import STACK_DEPTH_LIMIT, process_create_message
endowment = pop(evm.stack)
memory_start_position = Uint(pop(evm.stack))
memory_size = pop(evm.stack)
extend_memory_gas_cost = calculate_gas_extend_memory(
evm.memory, memory_start_position, memory_size
)
total_gas_cost = u256_safe_add(
GAS_CREATE,
extend_memory_gas_cost,
exception_type=OutOfGasError,
)
evm.gas_left = subtract_gas(evm.gas_left, total_gas_cost)
extend_memory(evm.memory, memory_start_position, memory_size)
sender_address = evm.message.current_target
sender = get_account(evm.env.state, sender_address)
evm.pc += 1
if sender.balance < endowment:
push(evm.stack, U256(0))
return None
if sender.nonce == Uint(2 ** 64 - 1):
push(evm.stack, U256(0))
return None
if evm.message.depth + 1 > STACK_DEPTH_LIMIT:
push(evm.stack, U256(0))
return None
call_data = memory_read_bytes(
evm.memory, memory_start_position, memory_size
)
increment_nonce(evm.env.state, evm.message.current_target)
create_message_gas = evm.gas_left
evm.gas_left = subtract_gas(evm.gas_left, create_message_gas)
contract_address = compute_contract_address(
evm.message.current_target,
get_account(evm.env.state, evm.message.current_target).nonce - U256(1),
)
is_collision = account_has_code_or_nonce(evm.env.state, contract_address)
if is_collision:
push(evm.stack, U256(0))
return
child_message = Message(
caller=evm.message.current_target,
target=Bytes0(),
gas=create_message_gas,
value=endowment,
data=b"",
code=call_data,
current_target=contract_address,
depth=evm.message.depth + 1,
code_address=None,
)
child_evm = process_create_message(child_message, evm.env)
if child_evm.has_erred:
push(evm.stack, U256(0))
else:
push(evm.stack, U256.from_be_bytes(child_evm.message.current_target))
evm.gas_left = child_evm.gas_left
evm.refund_counter += child_evm.refund_counter
evm.accounts_to_delete.update(child_evm.accounts_to_delete)
evm.logs += child_evm.logs
def return_(evm: Evm) -> None:
"""
Halts execution returning output data.
Parameters
----------
evm :
The current EVM frame.
"""
memory_start_position = Uint(pop(evm.stack))
memory_size = pop(evm.stack)
gas_cost = GAS_ZERO + calculate_gas_extend_memory(
evm.memory, memory_start_position, memory_size
)
evm.gas_left = subtract_gas(evm.gas_left, gas_cost)
extend_memory(evm.memory, memory_start_position, memory_size)
evm.output = memory_read_bytes(
evm.memory, memory_start_position, memory_size
)
# HALT the execution
evm.running = False
def call(evm: Evm) -> None:
"""
Message-call into an account.
Parameters
----------
evm :
The current EVM frame.
"""
from ...vm.interpreter import STACK_DEPTH_LIMIT, process_message
gas = pop(evm.stack)
to = to_address(pop(evm.stack))
value = pop(evm.stack)
memory_input_start_position = Uint(pop(evm.stack))
memory_input_size = pop(evm.stack)
memory_output_start_position = Uint(pop(evm.stack))
memory_output_size = pop(evm.stack)
gas_input_memory = calculate_gas_extend_memory(
evm.memory, memory_input_start_position, memory_input_size
)
evm.gas_left = subtract_gas(evm.gas_left, gas_input_memory)
extend_memory(evm.memory, memory_input_start_position, memory_input_size)
gas_output_memory = calculate_gas_extend_memory(
evm.memory, memory_output_start_position, memory_output_size
)
evm.gas_left = subtract_gas(evm.gas_left, gas_output_memory)
extend_memory(evm.memory, memory_output_start_position, memory_output_size)
call_data = memory_read_bytes(
evm.memory, memory_input_start_position, memory_input_size
)
call_gas_fee = calculate_call_gas_cost(evm.env.state, gas, to, value)
message_call_gas_fee = u256_safe_add(
gas,
calculate_message_call_gas_stipend(value),
exception_type=OutOfGasError,
)
evm.gas_left = subtract_gas(evm.gas_left, call_gas_fee)
sender_balance = get_account(
evm.env.state, evm.message.current_target
).balance
evm.pc += 1
if sender_balance < value:
push(evm.stack, U256(0))
evm.gas_left += message_call_gas_fee
return None
if evm.message.depth + 1 > STACK_DEPTH_LIMIT:
push(evm.stack, U256(0))
evm.gas_left += message_call_gas_fee
return None
code = get_account(evm.env.state, to).code
child_message = Message(
caller=evm.message.current_target,
target=to,
gas=message_call_gas_fee,
value=value,
data=call_data,
code=code,
current_target=to,
depth=evm.message.depth + 1,
code_address=to,
)
child_evm = process_message(child_message, evm.env)
if child_evm.has_erred:
push(evm.stack, U256(0))
else:
push(evm.stack, U256(1))
actual_output_size = min(memory_output_size, U256(len(child_evm.output)))
memory_write(
evm.memory,
memory_output_start_position,
child_evm.output[:actual_output_size],
)
evm.gas_left += child_evm.gas_left
evm.refund_counter += child_evm.refund_counter
evm.accounts_to_delete.update(child_evm.accounts_to_delete)
evm.logs += child_evm.logs
def callcode(evm: Evm) -> None:
"""
Message-call into this account with alternative account’s code.
Parameters
----------
evm :
The current EVM frame.
"""
from ...vm.interpreter import STACK_DEPTH_LIMIT, process_message
gas = pop(evm.stack)
code_address = to_address(pop(evm.stack))
value = pop(evm.stack)
memory_input_start_position = Uint(pop(evm.stack))
memory_input_size = pop(evm.stack)
memory_output_start_position = Uint(pop(evm.stack))
memory_output_size = pop(evm.stack)
to = evm.message.current_target
gas_input_memory = calculate_gas_extend_memory(
evm.memory, memory_input_start_position, memory_input_size
)
evm.gas_left = subtract_gas(evm.gas_left, gas_input_memory)
extend_memory(evm.memory, memory_input_start_position, memory_input_size)
gas_output_memory = calculate_gas_extend_memory(
evm.memory, memory_output_start_position, memory_output_size
)
evm.gas_left = subtract_gas(evm.gas_left, gas_output_memory)
extend_memory(evm.memory, memory_output_start_position, memory_output_size)
call_data = memory_read_bytes(
evm.memory, memory_input_start_position, memory_input_size
)
call_gas_fee = calculate_call_gas_cost(evm.env.state, gas, to, value)
message_call_gas_fee = u256_safe_add(
gas,
calculate_message_call_gas_stipend(value),
exception_type=OutOfGasError,
)
evm.gas_left = subtract_gas(evm.gas_left, call_gas_fee)
sender_balance = get_account(
evm.env.state, evm.message.current_target
).balance
evm.pc += 1
if sender_balance < value:
push(evm.stack, U256(0))
evm.gas_left += message_call_gas_fee
return None
if evm.message.depth + 1 > STACK_DEPTH_LIMIT:
push(evm.stack, U256(0))
evm.gas_left += message_call_gas_fee
return None
code = get_account(evm.env.state, code_address).code
child_message = Message(
caller=evm.message.current_target,
target=to,
gas=message_call_gas_fee,
value=value,
data=call_data,
code=code,
current_target=to,
depth=evm.message.depth + 1,
code_address=code_address,
)
child_evm = process_message(child_message, evm.env)
if child_evm.has_erred:
push(evm.stack, U256(0))
else:
push(evm.stack, U256(1))
actual_output_size = min(memory_output_size, U256(len(child_evm.output)))
memory_write(
evm.memory,
memory_output_start_position,
child_evm.output[:actual_output_size],
)
evm.gas_left += child_evm.gas_left
evm.refund_counter += child_evm.refund_counter
evm.accounts_to_delete.update(child_evm.accounts_to_delete)
evm.logs += child_evm.logs
def selfdestruct(evm: Evm) -> None:
"""
Halt execution and register account for later deletion.
Parameters
----------
evm :
The current EVM frame.
"""
beneficiary = to_address(pop(evm.stack))
originator = evm.message.current_target
beneficiary_balance = get_account(evm.env.state, beneficiary).balance
originator_balance = get_account(evm.env.state, originator).balance
# First Transfer to beneficiary
set_account_balance(
evm.env.state, beneficiary, beneficiary_balance + originator_balance
)
# Next, Zero the balance of the address being deleted (must come after
# sending to beneficiary in case the contract named itself as the
# beneficiary).
set_account_balance(evm.env.state, originator, U256(0))
# register account for deletion
evm.accounts_to_delete.add(originator)
# HALT the execution
evm.running = False
| 30.262248
| 79
| 0.687458
|
c7949b73e2165d9f065ce877ef70a9d9fb287e5e
| 4,264
|
py
|
Python
|
Documentation/conf.py
|
ryandgoulding/ovn
|
f792b1a00b439a949e3b7aae4951f8513340c1a1
|
[
"Apache-2.0"
] | null | null | null |
Documentation/conf.py
|
ryandgoulding/ovn
|
f792b1a00b439a949e3b7aae4951f8513340c1a1
|
[
"Apache-2.0"
] | null | null | null |
Documentation/conf.py
|
ryandgoulding/ovn
|
f792b1a00b439a949e3b7aae4951f8513340c1a1
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Open vSwitch documentation build configuration file, created by
# sphinx-quickstart on Fri Sep 30 09:57:36 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import string
import sys
try:
import ovs_sphinx_theme
use_ovs_theme = True
except ImportError:
print("Cannot find 'ovs-sphinx-theme' package. "
"Falling back to default theme.")
use_ovs_theme = False
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
needs_sphinx = '1.1'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.todo']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'contents'
# General information about the project.
project = u'OVN'
copyright = u'2020, The OVN Development Community'
author = u'The OVN Development Community'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = None
filename = "../configure.ac"
with open(filename, 'rU') as f:
for line in f:
if 'AC_INIT' in line:
# Parse "AC_INIT(openvswitch, 2.7.90, bugs@openvswitch.org)":
release = line.split(',')[1].strip(string.whitespace + '[]')
break
if release is None:
sys.stderr.write('%s: failed to determine Open vSwitch version\n'
% filename)
sys.exit(1)
# The short X.Y version.
#
# However, it's important to know the difference between, e.g., 2.7
# and 2.7.90, which can be very different versions (2.7.90 may be much
# closer to 2.8 than to 2.7), so check for that.
version = release if '.90' in release else '.'.join(release.split('.')[0:2])
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# If true, check the validity of #anchors in links.
linkcheck_anchors = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
if use_ovs_theme:
html_theme = 'ovs'
# Add any paths that contain custom themes here, relative to this directory.
if use_ovs_theme:
html_theme_path = [ovs_sphinx_theme.get_theme_dir()]
else:
html_theme_path = []
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
html_logo = '_static/logo.png'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
_man_pages = [
('ovn-sim.1',
u'Open Virtual Network simulator environment'),
('ovsdb-server.7',
u'Open vSwitch Database Server Protocol'),
('ovsdb.5',
u'Open vSwitch Database (File Formats)'),
('ovsdb.7',
u'Open vSwitch Database (Overview)'),
]
# Generate list of (path, name, description, [author, ...], section)
man_pages = [
('ref/%s' % file_name, file_name.split('.', 1)[0],
description, [author], file_name.split('.', 1)[1])
for file_name, description in _man_pages]
| 32.30303
| 78
| 0.682458
|
79b7506bc94203d85c2726f99afa9b34e2c0320e
| 2,145
|
py
|
Python
|
automon/helpers/threading/worker_thread.py
|
TheShellLand/automonisaur
|
b5f304a44449b8664c93d8a8a3c3cf2d73aa0ce9
|
[
"MIT"
] | 2
|
2021-09-15T18:35:44.000Z
|
2022-01-18T05:36:54.000Z
|
automon/helpers/threading/worker_thread.py
|
TheShellLand/automonisaur
|
b5f304a44449b8664c93d8a8a3c3cf2d73aa0ce9
|
[
"MIT"
] | 16
|
2021-08-29T22:51:53.000Z
|
2022-03-09T16:08:19.000Z
|
automon/helpers/threading/worker_thread.py
|
TheShellLand/automonisaur
|
b5f304a44449b8664c93d8a8a3c3cf2d73aa0ce9
|
[
"MIT"
] | null | null | null |
import queue
import threading
import time
print_lock = threading.Lock()
def init_queue():
""" Initialize the Queue """
return queue.Queue()
def job(queue_item):
""" What is actually being worked on """
with print_lock:
print('+ Job:', threading.current_thread().getName())
args = dict()
for key in queue_item:
if key == 'headers':
args['headers'] = queue_item[key]
# Disabled Neo4j
# http.http_header(**args)
else:
args = queue_item
def worker(queue_list):
""" Worker puts the things to work """
while True:
queue_item = queue_list.get()
if queue_item:
with print_lock:
print('+ Worker:', threading.current_thread().getName())
# TODO: Find a way to figure out what data is before doing work on it
# TODO: Add failure routine if queue_item is a str and not a list
job(queue_item)
queue_list.task_done()
else:
return
def start_worker(queue_list):
""" This starts all the threading """
print('+ Queue:', queue_list.qsize())
try:
num_worker_threads = 100 if queue_list.qsize() > 100 else queue_list.qsize()
threads = []
for _ in range(num_worker_threads):
t = threading.Thread(target=worker, args=(queue_list,))
t.setDaemon(True)
t.start()
threads.append(t)
start = time.time()
print('+ Threads started:', len(threads))
for _ in threads:
print('`-', _)
# queue_list.join() # block until all tasks are done
print('+ Entire job took:', time.time() - start)
print('+ Threads enumerate:', len(threading.enumerate()))
for _ in threading.enumerate():
print('`-', _)
for _ in range(num_worker_threads): # stop workers
queue_list.put(None)
# for _ in threading:
# _.join()
# print(threading.current_thread().getName(), 'is', threading.Thread.is_alive(threading.current_thread()))
except:
raise
| 24.101124
| 114
| 0.570629
|
a74d6eabcc929c89cd712b36ec9a6dfaf62781ab
| 3,457
|
py
|
Python
|
aliyun-python-sdk-hbr/aliyunsdkhbr/request/v20170908/CreatePlanRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 1,001
|
2015-07-24T01:32:41.000Z
|
2022-03-25T01:28:18.000Z
|
aliyun-python-sdk-hbr/aliyunsdkhbr/request/v20170908/CreatePlanRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 363
|
2015-10-20T03:15:00.000Z
|
2022-03-08T12:26:19.000Z
|
aliyun-python-sdk-hbr/aliyunsdkhbr/request/v20170908/CreatePlanRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 682
|
2015-09-22T07:19:02.000Z
|
2022-03-22T09:51:46.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkhbr.endpoint import endpoint_data
class CreatePlanRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'hbr', '2017-09-08', 'CreatePlan','hbr')
self.set_protocol_type('https')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_DiffPolicyId(self):
return self.get_query_params().get('DiffPolicyId')
def set_DiffPolicyId(self,DiffPolicyId):
self.add_query_param('DiffPolicyId',DiffPolicyId)
def get_ScheduleType(self):
return self.get_query_params().get('ScheduleType')
def set_ScheduleType(self,ScheduleType):
self.add_query_param('ScheduleType',ScheduleType)
def get_ClientId(self):
return self.get_query_params().get('ClientId')
def set_ClientId(self,ClientId):
self.add_query_param('ClientId',ClientId)
def get_ServerType(self):
return self.get_query_params().get('ServerType')
def set_ServerType(self,ServerType):
self.add_query_param('ServerType',ServerType)
def get_VaultId(self):
return self.get_query_params().get('VaultId')
def set_VaultId(self,VaultId):
self.add_query_param('VaultId',VaultId)
def get_IncPolicyId(self):
return self.get_query_params().get('IncPolicyId')
def set_IncPolicyId(self,IncPolicyId):
self.add_query_param('IncPolicyId',IncPolicyId)
def get_Source(self):
return self.get_query_params().get('Source')
def set_Source(self,Source):
self.add_query_param('Source',Source)
def get_PlanName(self):
return self.get_query_params().get('PlanName')
def set_PlanName(self,PlanName):
self.add_query_param('PlanName',PlanName)
def get_SourceType(self):
return self.get_query_params().get('SourceType')
def set_SourceType(self,SourceType):
self.add_query_param('SourceType',SourceType)
def get_FullPolicyId(self):
return self.get_query_params().get('FullPolicyId')
def set_FullPolicyId(self,FullPolicyId):
self.add_query_param('FullPolicyId',FullPolicyId)
def get_Retention(self):
return self.get_query_params().get('Retention')
def set_Retention(self,Retention):
self.add_query_param('Retention',Retention)
def get_ServerId(self):
return self.get_query_params().get('ServerId')
def set_ServerId(self,ServerId):
self.add_query_param('ServerId',ServerId)
def get_Token(self):
return self.get_query_params().get('Token')
def set_Token(self,Token):
self.add_query_param('Token',Token)
| 31.144144
| 74
| 0.75499
|
3d437e4017959469aae249b7c162626dda9f98fe
| 1,015
|
py
|
Python
|
LeetcodeAlgorithms/367. Valid Perfect Square/valid-perfect-square.py
|
Fenghuapiao/PyLeetcode
|
d804a62643fe935eb61808196a2c093ea9583654
|
[
"MIT"
] | 3
|
2019-08-20T06:54:38.000Z
|
2022-01-07T12:56:46.000Z
|
LeetcodeAlgorithms/367. Valid Perfect Square/valid-perfect-square.py
|
Fenghuapiao/PyLeetcode
|
d804a62643fe935eb61808196a2c093ea9583654
|
[
"MIT"
] | null | null | null |
LeetcodeAlgorithms/367. Valid Perfect Square/valid-perfect-square.py
|
Fenghuapiao/PyLeetcode
|
d804a62643fe935eb61808196a2c093ea9583654
|
[
"MIT"
] | 2
|
2018-06-07T02:56:39.000Z
|
2018-08-01T15:27:55.000Z
|
class Solution(object):
def isPerfectSquare(self, num):
"""
:type num: int
:rtype: bool
"""
r = num
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
r = (r + num / r) / 2
return r * r == num
| 29
| 36
| 0.263054
|
4c82fee24f893f4b481bcdcbf5d8bcc6f1746923
| 1,173
|
py
|
Python
|
bin/colors.py
|
PyreFramework/pyre
|
345c7449a3416eea1c1affa74fb32faff30a6aaa
|
[
"BSD-3-Clause"
] | null | null | null |
bin/colors.py
|
PyreFramework/pyre
|
345c7449a3416eea1c1affa74fb32faff30a6aaa
|
[
"BSD-3-Clause"
] | null | null | null |
bin/colors.py
|
PyreFramework/pyre
|
345c7449a3416eea1c1affa74fb32faff30a6aaa
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- Python -*-
# -*- coding: utf-8 -*-
#
# michael a.g. aïvázis
# orthologue
# (c) 1998-2022 all rights reserved
#
# support
import pyre
# the app
class Colors(pyre.application):
"""
A generator of colorized directory listings that is repository aware
"""
# user configurable state
palette = pyre.properties.str(default="x11")
palette.doc = "the set of colors to render"
# protocol obligations
@pyre.export
def main(self, *args, **kwds):
"""
The main entry point
"""
# grab my terminal
terminal = self.executive.terminal
# get the reset code
normal = terminal.ansi["normal"]
# get the palette
palette = getattr(terminal, self.palette)
# go through the color names
for name in palette.keys():
# print the name in its color
print(f"{palette[name]}{name}{normal}")
#
# all done
return 0
# bootstrap
if __name__ == "__main__":
# instantiate
app = Colors(name="colors")
# invoke
status = app.run()
# share
raise SystemExit(status)
# end of file
| 19.881356
| 72
| 0.585678
|
cf1d674011c98ffa7e444513c0302da93a086098
| 3,637
|
py
|
Python
|
tests/api/v2/upgrade_api_test.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 5
|
2016-08-23T17:52:22.000Z
|
2019-05-16T08:45:30.000Z
|
tests/api/v2/upgrade_api_test.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 2
|
2016-11-10T05:30:21.000Z
|
2019-04-05T15:03:37.000Z
|
tests/api/v2/upgrade_api_test.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 7
|
2016-08-25T16:11:44.000Z
|
2021-02-22T05:31:25.000Z
|
#!/usr/bin/env python
# coding: utf-8
"""
upgrade_api_test.py
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import unittest
from netapp.santricity.rest import ApiException
from netapp.santricity.api.v2.upgrade_api import UpgradeApi
class UpgradeApiTest(unittest.TestCase):
def test_get_software_versions(self):
api = UpgradeApi()
upgrade_api = None
try:
upgrade_api = api.get_software_versions()
# For the DELETE calls, there's no reponse returned and we want to set that as a valid sdk call.
if upgrade_api is None:
upgrade_api = 1
except (ApiException, IOError) as exp:
# The API call went through but got a HTTP errorcode, which means the SDK works
upgrade_api = 1
self.assertNotEqual(upgrade_api, None)
def test_start_software_download(self):
api = UpgradeApi()
upgrade_api = None
try:
upgrade_api = api.start_software_download()
# For the DELETE calls, there's no reponse returned and we want to set that as a valid sdk call.
if upgrade_api is None:
upgrade_api = 1
except (ApiException, IOError) as exp:
# The API call went through but got a HTTP errorcode, which means the SDK works
upgrade_api = 1
self.assertNotEqual(upgrade_api, None)
def test_start_software_reload(self):
api = UpgradeApi()
upgrade_api = None
try:
upgrade_api = api.start_software_reload()
# For the DELETE calls, there's no reponse returned and we want to set that as a valid sdk call.
if upgrade_api is None:
upgrade_api = 1
except (ApiException, IOError) as exp:
# The API call went through but got a HTTP errorcode, which means the SDK works
upgrade_api = 1
self.assertNotEqual(upgrade_api, None)
| 47.855263
| 844
| 0.698378
|
56634d1fbc9d52e88168177edeed0da1e03430f4
| 14,965
|
py
|
Python
|
scripts/exp-application/fwt_oTree/fwt/pages.py
|
ffzg-erudito/inter-testing-feedback-2018
|
368f3fb8960535dd0d99213a360491b1edde8b39
|
[
"CC0-1.0",
"CC-BY-4.0"
] | null | null | null |
scripts/exp-application/fwt_oTree/fwt/pages.py
|
ffzg-erudito/inter-testing-feedback-2018
|
368f3fb8960535dd0d99213a360491b1edde8b39
|
[
"CC0-1.0",
"CC-BY-4.0"
] | null | null | null |
scripts/exp-application/fwt_oTree/fwt/pages.py
|
ffzg-erudito/inter-testing-feedback-2018
|
368f3fb8960535dd0d99213a360491b1edde8b39
|
[
"CC0-1.0",
"CC-BY-4.0"
] | null | null | null |
from otree.api import Currency as c, currency_range
from ._builtin import Page
from .models import Constants
import time
import math
# this function outputs an integer in the range from 1 to 4 which
# represents a particular experimental situation
def switch(x):
return {
'a': 1,
'b': 2,
'c': 3,
'd': 4
}[x]
# FIRST PAGE
class enter_id(Page):
template_name = 'fwt/auxiliary/enter_id.html'
form_model = 'player'
form_fields = ['identity']
class instructions(Page):
template_name = 'fwt/auxiliary/instructions.html'
pass
class timer_start(Page):
template_name = 'fwt/auxiliary/timer_start.html'
def before_next_page(self):
# user has 5 minutes to complete as many pages as possible
self.participant.vars['timer_start'] = time.time()
# PRACTICE TEXT AND QUESTION PAGES
class practice_text(Page):
template_name = 'fwt/texts/practice_text.html'
def before_next_page(self):
# user has 5 minutes to complete as many pages as possible
self.participant.vars['reading_time_estimate'] = time.time() - self.participant.vars['timer_start']
class practice_q1(Page):
template_name = 'fwt/practice/q1.html'
form_model = 'player'
form_fields = ['practice_q1']
def before_next_page(self):
self.player.feedback(self.player.practice_q1)
print(self.player.practice_q1)
class practice_q2(Page):
template_name = 'fwt/practice/q2.html'
form_model = 'player'
form_fields = ['practice_q2']
class practice_q3(Page):
template_name = 'fwt/practice/q3.html'
form_model = 'player'
form_fields = ['practice_q3']
class practice_q4(Page):
template_name = 'fwt/practice/q4.html'
form_model = 'player'
form_fields = ['practice_q4']
# GET READY PAGE
class get_ready(Page):
template_name = 'fwt/auxiliary/get_ready.html'
pass
# SECTION DEFINING TEXT PAGES AND ACTIVITIES
# First text section
class text_1(Page):
template_name = 'fwt/texts/text_1.html'
def get_timeout_seconds(self):
estimate = self.participant.vars['reading_time_estimate'] * 3 # multiplied by 3 because the main text sections have about 3x more words
minutes = math.ceil(estimate / 60)
return minutes * 60
# PAGES FOR ACTIVITIES AFTER FIRST SECTION
class activity1_task1(Page):
template_name = 'fwt/content/test1/q1.html'
form_model = 'player'
form_fields = ['test1_q1']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test1/q1.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test1/q1.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test1/q1.html'
# else:
# template_name = 'fwt/text_1.html'
pass
class activity1_task2(Page):
template_name = 'fwt/content/test1/q2.html'
form_model = 'player'
form_fields = ['test1_q2']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test1/q2.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test1/q2.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test1/q2.html'
pass
class activity1_task3(Page):
template_name = 'fwt/content/test1/q3.html'
form_model = 'player'
form_fields = ['test1_q3']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test1/q3.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test1/q3.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test1/q3.html'
pass
class activity1_task4(Page):
template_name = 'fwt/content/test1/q4.html'
form_model = 'player'
form_fields = ['test1_q4']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test1/q4.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test1/q4.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test1/q4.html'
pass
class activity1_task5(Page):
template_name = 'fwt/content/test1/q5.html'
form_model = 'player'
form_fields = ['test1_q5']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test1/q5.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test1/q5.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test1/q5.html'
pass
class activity1_task6(Page):
template_name = 'fwt/content/test1/q6.html'
form_model = 'player'
form_fields = ['test1_q6']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test1/q6.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test1/q6.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test1/q6.html'
pass
class activity1_task7(Page):
template_name = 'fwt/content/test1/q7.html'
form_model = 'player'
form_fields = ['test1_q7']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test1/q7.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test1/q7.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test1/q7.html'
pass
class activity1_task8(Page):
template_name = 'fwt/content/test1/q8.html'
form_model = 'player'
form_fields = ['test1_q8']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test1/q8.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test1/q8.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test1/q8.html'
pass
class activity1_task9(Page):
template_name = 'fwt/content/test1/q9.html'
form_model = 'player'
form_fields = ['test1_q9']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test1/q9.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test1/q9.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test1/q9.html'
pass
class activity1_task10(Page):
template_name = 'fwt/content/test1/q10.html'
form_model = 'player'
form_fields = ['test1_q10']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test1/q10.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test1/q10.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test1/q10.html'
pass
# SECOND TEXT SECTION
class text_2(Page):
template_name = 'fwt/texts/text_2.html'
def get_timeout_seconds(self):
estimate = self.participant.vars['reading_time_estimate'] * 3 # multiplied by 3 because the main text sections have about 3x more words
minutes = math.ceil(estimate / 60)
return minutes * 60
# PAGES FOR ACTIVITIES AFTER SECOND SECTION
class activity2_task1(Page):
template_name = 'fwt/content/test2/q1.html'
form_model = 'player'
form_fields = ['test2_q1']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test2/q1.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test2/q1.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test2/q1.html'
# else:
# template_name = 'fwt/text_2.html'
pass
class activity2_task2(Page):
template_name = 'fwt/content/test2/q2.html'
form_model = 'player'
form_fields = ['test2_q2']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test2/q2.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test2/q2.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test2/q2.html'
pass
class activity2_task3(Page):
template_name = 'fwt/content/test2/q3.html'
form_model = 'player'
form_fields = ['test2_q3']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test2/q3.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test2/q3.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test2/q3.html'
pass
class activity2_task4(Page):
template_name = 'fwt/content/test2/q4.html'
form_model = 'player'
form_fields = ['test2_q4']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test2/q4.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test2/q4.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test2/q4.html'
pass
class activity2_task5(Page):
template_name = 'fwt/content/test2/q5.html'
form_model = 'player'
form_fields = ['test2_q5']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test2/q5.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test2/q5.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test2/q5.html'
pass
class activity2_task6(Page):
template_name = 'fwt/content/test2/q6.html'
form_model = 'player'
form_fields = ['test2_q6']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test2/q6.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test2/q6.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test2/q6.html'
pass
class activity2_task7(Page):
template_name = 'fwt/content/test2/q7.html'
form_model = 'player'
form_fields = ['test2_q7']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test2/q7.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test2/q7.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test2/q7.html'
pass
class activity2_task8(Page):
template_name = 'fwt/content/test2/q8.html'
form_model = 'player'
form_fields = ['test2_q8']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test2/q8.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test2/q8.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test2/q8.html'
pass
class activity2_task9(Page):
template_name = 'fwt/content/test2/q9.html'
form_model = 'player'
form_fields = ['test2_q9']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test2/q9.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test2/q9.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test2/q9.html'
pass
class activity2_task10(Page):
template_name = 'fwt/content/test2/q10.html'
form_model = 'player'
form_fields = ['test2_q10']
# exp_sit = switch()
# if exp_sit == 1:
# template_name = 'fwt/content/test2/q10.html'
# elif exp_sit == 2:
# template_name = 'fwt/general/test2/q10.html'
# elif exp_sit == 3:
# template_name = 'fwt/math/test2/q10.html'
pass
# THIRD AND FINAL TEXT SECTION
class text_3(Page):
template_name = 'fwt/texts/text_3.html'
def get_timeout_seconds(self):
estimate = self.participant.vars['reading_time_estimate'] * 3 # multiplied by 3 because the main text sections have about 3x more words
minutes = math.ceil(estimate / 60)
return minutes * 60
# PAGES FOR ACTIVITIES AFTER THIRD AND FINAL TEXT SECTION
class content_test3_q1(Page):
template_name = 'fwt/final/q1.html'
form_model = 'player'
form_fields = ['test3_q1']
pass
class content_test3_q2(Page):
template_name = 'fwt/final/q2.html'
form_model = 'player'
form_fields = ['test3_q2']
pass
class content_test3_q3(Page):
template_name = 'fwt/final/q3.html'
form_model = 'player'
form_fields = ['test3_q3']
pass
class content_test3_q4(Page):
template_name = 'fwt/final/q4.html'
form_model = 'player'
form_fields = ['test3_q4']
pass
class content_test3_q5(Page):
template_name = 'fwt/final/q5.html'
form_model = 'player'
form_fields = ['test3_q5']
pass
class content_test3_q6(Page):
template_name = 'fwt/final/q6.html'
form_model = 'player'
form_fields = ['test3_q6']
pass
class content_test3_q7(Page):
template_name = 'fwt/final/q7.html'
form_model = 'player'
form_fields = ['test3_q7']
pass
class content_test3_q8(Page):
template_name = 'fwt/final/q8.html'
form_model = 'player'
form_fields = ['test3_q8']
pass
class content_test3_q9(Page):
template_name = 'fwt/final/q9.html'
form_model = 'player'
form_fields = ['test3_q9']
pass
class content_test3_q10(Page):
template_name = 'fwt/final/q10.html'
form_model = 'player'
form_fields = ['test3_q10']
pass
# GOODBYE PAGE
class end_page(Page):
template_name = 'fwt/auxilliary/end_page.html'
pass
# here define sequences depending on experimental situation
#exp_sit = switch() - if exp_sit 1-3, then choose this sequence
page_sequence = [
enter_id,
# instructions,
timer_start,
practice_text,
practice_q1,
practice_q2,
practice_q3,
practice_q4,
get_ready,
text_1,
activity1_task1,
activity1_task2,
activity1_task3,
activity1_task4,
activity1_task5,
activity1_task6,
activity1_task7,
activity1_task8,
activity1_task9,
activity1_task10,
get_ready,
text_2,
activity2_task1,
activity2_task2,
activity2_task3,
activity2_task4,
activity2_task5,
activity2_task6,
activity2_task7,
activity2_task8,
activity2_task9,
activity2_task10,
get_ready,
text_3,
content_test3_q1,
content_test3_q2,
content_test3_q3,
content_test3_q4,
content_test3_q5,
content_test3_q6,
content_test3_q7,
content_test3_q8,
content_test3_q9,
content_test3_q10,
end_page
]
# else choose this sequence
#page_sequence = [
# enter_id,
# instructions,
# timer_start,
# practice_text,
# practice_q1,
# practice_q2,
# practice_q3,
# text_1,
# text_1,
# text_2,
# text_2,
# text_3,
# content_test3_q1,
# content_test3_q2,
# content_test3_q3,
# content_test3_q4,
# content_test3_q5,
# content_test3_q6,
# content_test3_q7,
# content_test3_q8,
# content_test3_q9,
# content_test3_q10,
# end_page
# ]
| 25.067002
| 144
| 0.609288
|
3ed1db36c1caea286e3d582531bec037ca4be573
| 29,057
|
py
|
Python
|
kivymd/theming.py
|
yaxter/KivyMD
|
5094cdcab6298c63b1edac33a214601b8f022fa8
|
[
"MIT"
] | null | null | null |
kivymd/theming.py
|
yaxter/KivyMD
|
5094cdcab6298c63b1edac33a214601b8f022fa8
|
[
"MIT"
] | null | null | null |
kivymd/theming.py
|
yaxter/KivyMD
|
5094cdcab6298c63b1edac33a214601b8f022fa8
|
[
"MIT"
] | null | null | null |
"""
Themes/Theming
==============
.. seealso::
`Material Design spec, Material theming <https://material.io/design/material-theming>`_
Material App
------------
The main class of your application, which in `Kivy` inherits from the App class,
in `KivyMD` must inherit from the `MDApp` class. The `MDApp` class has
properties that allow you to control application properties
such as :attr:`color/style/font` of interface elements and much more.
Control material properties
---------------------------
The main application class inherited from the `MDApp` class has the :attr:`theme_cls`
attribute, with which you control the material properties of your application.
"""
from kivy.app import App
from kivy.atlas import Atlas
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.event import EventDispatcher
from kivy.metrics import dp
from kivy.properties import (
AliasProperty,
BooleanProperty,
ColorProperty,
DictProperty,
ObjectProperty,
OptionProperty,
StringProperty,
)
from kivy.utils import get_color_from_hex
from kivymd import images_path
from kivymd.color_definitions import colors, hue, palette
from kivymd.font_definitions import theme_font_styles # NOQA: F401
from kivymd.material_resources import DEVICE_IOS, DEVICE_TYPE
class ThemeManager(EventDispatcher):
primary_palette = OptionProperty("Blue", options=palette)
"""
The name of the color scheme that the application will use.
All major `material` components will have the color
of the specified color theme.
Available options are: `'Red'`, `'Pink'`, `'Purple'`, `'DeepPurple'`,
`'Indigo'`, `'Blue'`, `'LightBlue'`, `'Cyan'`, `'Teal'`, `'Green'`,
`'LightGreen'`, `'Lime'`, `'Yellow'`, `'Amber'`, `'Orange'`, `'DeepOrange'`,
`'Brown'`, `'Gray'`, `'BlueGray'`.
To change the color scheme of an application:
.. code-block:: python
from kivy.uix.screenmanager import Screen
from kivymd.app import MDApp
from kivymd.uix.button import MDRectangleFlatButton
class MainApp(MDApp):
def build(self):
self.theme_cls.primary_palette = "Green" # "Purple", "Red"
screen = Screen()
screen.add_widget(
MDRectangleFlatButton(
text="Hello, World",
pos_hint={"center_x": 0.5, "center_y": 0.5},
)
)
return screen
MainApp().run()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/primary-palette.png
:attr:`primary_palette` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'Blue'`.
"""
primary_hue = OptionProperty("500", options=hue)
"""
The color hue of the application.
Available options are: `'50'`, `'100'`, `'200'`, `'300'`, `'400'`, `'500'`,
`'600'`, `'700'`, `'800'`, `'900'`, `'A100'`, `'A200'`, `'A400'`, `'A700'`.
To change the hue color scheme of an application:
.. code-block:: python
from kivy.uix.screenmanager import Screen
from kivymd.app import MDApp
from kivymd.uix.button import MDRectangleFlatButton
class MainApp(MDApp):
def build(self):
self.theme_cls.primary_palette = "Green" # "Purple", "Red"
self.theme_cls.primary_hue = "200" # "500"
screen = Screen()
screen.add_widget(
MDRectangleFlatButton(
text="Hello, World",
pos_hint={"center_x": 0.5, "center_y": 0.5},
)
)
return screen
MainApp().run()
With a value of ``self.theme_cls.primary_hue = "500"``:
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/primary-palette.png
With a value of ``self.theme_cls.primary_hue = "200"``:
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/primary-hue.png
:attr:`primary_hue` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'500'`.
"""
primary_light_hue = OptionProperty("200", options=hue)
"""
Hue value for :attr:`primary_light`.
:attr:`primary_light_hue` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'200'`.
"""
primary_dark_hue = OptionProperty("700", options=hue)
"""
Hue value for :attr:`primary_dark`.
:attr:`primary_light_hue` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'700'`.
"""
def _get_primary_color(self):
return get_color_from_hex(
colors[self.primary_palette][self.primary_hue]
)
primary_color = AliasProperty(
_get_primary_color, bind=("primary_palette", "primary_hue")
)
"""
The color of the current application theme in ``rgba`` format.
:attr:`primary_color` is an :class:`~kivy.properties.AliasProperty` that
returns the value of the current application theme, property is readonly.
"""
def _get_primary_light(self):
return get_color_from_hex(
colors[self.primary_palette][self.primary_light_hue]
)
primary_light = AliasProperty(
_get_primary_light, bind=("primary_palette", "primary_light_hue")
)
"""
Colors of the current application color theme in ``rgba`` format
(in lighter color).
.. code-block:: python
from kivy.lang import Builder
from kivymd.app import MDApp
KV = '''
Screen:
MDRaisedButton:
text: "primary_light"
pos_hint: {"center_x": 0.5, "center_y": 0.7}
md_bg_color: app.theme_cls.primary_light
MDRaisedButton:
text: "primary_color"
pos_hint: {"center_x": 0.5, "center_y": 0.5}
MDRaisedButton:
text: "primary_dark"
pos_hint: {"center_x": 0.5, "center_y": 0.3}
md_bg_color: app.theme_cls.primary_dark
'''
class MainApp(MDApp):
def build(self):
self.theme_cls.primary_palette = "Green"
return Builder.load_string(KV)
MainApp().run()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/primary-colors-light-dark.png
:align: center
:attr:`primary_light` is an :class:`~kivy.properties.AliasProperty` that
returns the value of the current application theme (in lighter color),
property is readonly.
"""
def _get_primary_dark(self):
return get_color_from_hex(
colors[self.primary_palette][self.primary_dark_hue]
)
primary_dark = AliasProperty(
_get_primary_dark, bind=("primary_palette", "primary_dark_hue")
)
"""
Colors of the current application color theme
in ``rgba`` format (in darker color).
:attr:`primary_dark` is an :class:`~kivy.properties.AliasProperty` that
returns the value of the current application theme (in darker color),
property is readonly.
"""
accent_palette = OptionProperty("Amber", options=palette)
"""
The application color palette used for items such as the tab indicator
in the :attr:`MDTabsBar` class and so on...
The image below shows the color schemes with the values
``self.theme_cls.accent_palette = 'Blue'``, ``Red'`` and ``Yellow'``:
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/accent-palette.png
:attr:`accent_palette` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'Amber'`.
"""
accent_hue = OptionProperty("500", options=hue)
"""Similar to :attr:`primary_hue`,
but returns a value for :attr:`accent_palette`.
:attr:`accent_hue` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'500'`.
"""
accent_light_hue = OptionProperty("200", options=hue)
"""
Hue value for :attr:`accent_light`.
:attr:`accent_light_hue` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'200'`.
"""
accent_dark_hue = OptionProperty("700", options=hue)
"""
Hue value for :attr:`accent_dark`.
:attr:`accent_dark_hue` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'700'`.
"""
def _get_accent_color(self):
return get_color_from_hex(colors[self.accent_palette][self.accent_hue])
accent_color = AliasProperty(
_get_accent_color, bind=["accent_palette", "accent_hue"]
)
"""Similar to :attr:`primary_color`,
but returns a value for :attr:`accent_color`.
:attr:`accent_color` is an :class:`~kivy.properties.AliasProperty` that
returns the value in ``rgba`` format for :attr:`accent_color`,
property is readonly.
"""
def _get_accent_light(self):
return get_color_from_hex(
colors[self.accent_palette][self.accent_light_hue]
)
accent_light = AliasProperty(
_get_accent_light, bind=["accent_palette", "accent_light_hue"]
)
"""Similar to :attr:`primary_light`,
but returns a value for :attr:`accent_light`.
:attr:`accent_light` is an :class:`~kivy.properties.AliasProperty` that
returns the value in ``rgba`` format for :attr:`accent_light`,
property is readonly.
"""
def _get_accent_dark(self):
return get_color_from_hex(
colors[self.accent_palette][self.accent_dark_hue]
)
accent_dark = AliasProperty(
_get_accent_dark, bind=["accent_palette", "accent_dark_hue"]
)
"""Similar to :attr:`primary_dark`,
but returns a value for :attr:`accent_dark`.
:attr:`accent_dark` is an :class:`~kivy.properties.AliasProperty` that
returns the value in ``rgba`` format for :attr:`accent_dark`,
property is readonly.
"""
theme_style = OptionProperty("Light", options=["Light", "Dark"])
"""App theme style.
.. code-block:: python
from kivy.uix.screenmanager import Screen
from kivymd.app import MDApp
from kivymd.uix.button import MDRectangleFlatButton
class MainApp(MDApp):
def build(self):
self.theme_cls.theme_style = "Dark" # "Light"
screen = Screen()
screen.add_widget(
MDRectangleFlatButton(
text="Hello, World",
pos_hint={"center_x": 0.5, "center_y": 0.5},
)
)
return screen
MainApp().run()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/theme-style.png
:attr:`theme_style` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'Light'`.
"""
def _get_theme_style(self, opposite):
if opposite:
return "Light" if self.theme_style == "Dark" else "Dark"
else:
return self.theme_style
def _get_bg_darkest(self, opposite=False):
theme_style = self._get_theme_style(opposite)
if theme_style == "Light":
return get_color_from_hex(colors["Light"]["StatusBar"])
elif theme_style == "Dark":
return get_color_from_hex(colors["Dark"]["StatusBar"])
bg_darkest = AliasProperty(_get_bg_darkest, bind=["theme_style"])
"""
Similar to :attr:`bg_dark`,
but the color values are a tone lower (darker) than :attr:`bg_dark`.
.. code-block:: python
KV = '''
<Box@BoxLayout>:
bg: 0, 0, 0, 0
canvas:
Color:
rgba: root.bg
Rectangle:
pos: self.pos
size: self.size
BoxLayout:
Box:
bg: app.theme_cls.bg_light
Box:
bg: app.theme_cls.bg_normal
Box:
bg: app.theme_cls.bg_dark
Box:
bg: app.theme_cls.bg_darkest
'''
from kivy.lang import Builder
from kivymd.app import MDApp
class MainApp(MDApp):
def build(self):
self.theme_cls.theme_style = "Dark" # "Light"
return Builder.load_string(KV)
MainApp().run()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/bg-normal-dark-darkest.png
:attr:`bg_darkest` is an :class:`~kivy.properties.AliasProperty` that
returns the value in ``rgba`` format for :attr:`bg_darkest`,
property is readonly.
"""
def _get_op_bg_darkest(self):
return self._get_bg_darkest(True)
opposite_bg_darkest = AliasProperty(
_get_op_bg_darkest, bind=["theme_style"]
)
"""
The opposite value of color in the :attr:`bg_darkest`.
:attr:`opposite_bg_darkest` is an :class:`~kivy.properties.AliasProperty`
that returns the value in ``rgba`` format for :attr:`opposite_bg_darkest`,
property is readonly.
"""
def _get_bg_dark(self, opposite=False):
theme_style = self._get_theme_style(opposite)
if theme_style == "Light":
return get_color_from_hex(colors["Light"]["AppBar"])
elif theme_style == "Dark":
return get_color_from_hex(colors["Dark"]["AppBar"])
bg_dark = AliasProperty(_get_bg_dark, bind=["theme_style"])
"""
Similar to :attr:`bg_normal`,
but the color values are one tone lower (darker) than :attr:`bg_normal`.
:attr:`bg_dark` is an :class:`~kivy.properties.AliasProperty` that
returns the value in ``rgba`` format for :attr:`bg_dark`,
property is readonly.
"""
def _get_op_bg_dark(self):
return self._get_bg_dark(True)
opposite_bg_dark = AliasProperty(_get_op_bg_dark, bind=["theme_style"])
"""
The opposite value of color in the :attr:`bg_dark`.
:attr:`opposite_bg_dark` is an :class:`~kivy.properties.AliasProperty` that
returns the value in ``rgba`` format for :attr:`opposite_bg_dark`,
property is readonly.
"""
def _get_bg_normal(self, opposite=False):
theme_style = self._get_theme_style(opposite)
if theme_style == "Light":
return get_color_from_hex(colors["Light"]["Background"])
elif theme_style == "Dark":
return get_color_from_hex(colors["Dark"]["Background"])
bg_normal = AliasProperty(_get_bg_normal, bind=["theme_style"])
"""
Similar to :attr:`bg_light`,
but the color values are one tone lower (darker) than :attr:`bg_light`.
:attr:`bg_normal` is an :class:`~kivy.properties.AliasProperty` that
returns the value in ``rgba`` format for :attr:`bg_normal`,
property is readonly.
"""
def _get_op_bg_normal(self):
return self._get_bg_normal(True)
opposite_bg_normal = AliasProperty(_get_op_bg_normal, bind=["theme_style"])
"""
The opposite value of color in the :attr:`bg_normal`.
:attr:`opposite_bg_normal` is an :class:`~kivy.properties.AliasProperty`
that returns the value in ``rgba`` format for :attr:`opposite_bg_normal`,
property is readonly.
"""
def _get_bg_light(self, opposite=False):
theme_style = self._get_theme_style(opposite)
if theme_style == "Light":
return get_color_from_hex(colors["Light"]["CardsDialogs"])
elif theme_style == "Dark":
return get_color_from_hex(colors["Dark"]["CardsDialogs"])
bg_light = AliasProperty(_get_bg_light, bind=["theme_style"])
""""
Depending on the style of the theme (`'Dark'` or `'Light`')
that the application uses, :attr:`bg_light` contains the color value
in ``rgba`` format for the widgets background.
:attr:`bg_light` is an :class:`~kivy.properties.AliasProperty` that
returns the value in ``rgba`` format for :attr:`bg_light`,
property is readonly.
"""
def _get_op_bg_light(self):
return self._get_bg_light(True)
opposite_bg_light = AliasProperty(_get_op_bg_light, bind=["theme_style"])
"""
The opposite value of color in the :attr:`bg_light`.
:attr:`opposite_bg_light` is an :class:`~kivy.properties.AliasProperty`
that returns the value in ``rgba`` format for :attr:`opposite_bg_light`,
property is readonly.
"""
def _get_divider_color(self, opposite=False):
theme_style = self._get_theme_style(opposite)
if theme_style == "Light":
color = get_color_from_hex("000000")
elif theme_style == "Dark":
color = get_color_from_hex("FFFFFF")
color[3] = 0.12
return color
divider_color = AliasProperty(_get_divider_color, bind=["theme_style"])
"""
Color for dividing lines such as :class:`~kivymd.uix.card.MDSeparator`.
:attr:`divider_color` is an :class:`~kivy.properties.AliasProperty` that
returns the value in ``rgba`` format for :attr:`divider_color`,
property is readonly.
"""
def _get_op_divider_color(self):
return self._get_divider_color(True)
opposite_divider_color = AliasProperty(
_get_op_divider_color, bind=["theme_style"]
)
"""
The opposite value of color in the :attr:`divider_color`.
:attr:`opposite_divider_color` is an :class:`~kivy.properties.AliasProperty`
that returns the value in ``rgba`` format for :attr:`opposite_divider_color`,
property is readonly.
"""
def _get_text_color(self, opposite=False):
theme_style = self._get_theme_style(opposite)
if theme_style == "Light":
color = get_color_from_hex("000000")
color[3] = 0.87
elif theme_style == "Dark":
color = get_color_from_hex("FFFFFF")
return color
text_color = AliasProperty(_get_text_color, bind=["theme_style"])
"""
Color of the text used in the :class:`~kivymd.uix.label.MDLabel`.
:attr:`text_color` is an :class:`~kivy.properties.AliasProperty` that
returns the value in ``rgba`` format for :attr:`text_color`,
property is readonly.
"""
def _get_op_text_color(self):
return self._get_text_color(True)
opposite_text_color = AliasProperty(
_get_op_text_color, bind=["theme_style"]
)
"""
The opposite value of color in the :attr:`text_color`.
:attr:`opposite_text_color` is an :class:`~kivy.properties.AliasProperty`
that returns the value in ``rgba`` format for :attr:`opposite_text_color`,
property is readonly.
"""
def _get_secondary_text_color(self, opposite=False):
theme_style = self._get_theme_style(opposite)
if theme_style == "Light":
color = get_color_from_hex("000000")
color[3] = 0.54
elif theme_style == "Dark":
color = get_color_from_hex("FFFFFF")
color[3] = 0.70
return color
secondary_text_color = AliasProperty(
_get_secondary_text_color, bind=["theme_style"]
)
"""
The color for the secondary text that is used in classes
from the module :class:`~kivymd/uix/list.TwoLineListItem`.
:attr:`secondary_text_color` is an :class:`~kivy.properties.AliasProperty`
that returns the value in ``rgba`` format for :attr:`secondary_text_color`,
property is readonly.
"""
def _get_op_secondary_text_color(self):
return self._get_secondary_text_color(True)
opposite_secondary_text_color = AliasProperty(
_get_op_secondary_text_color, bind=["theme_style"]
)
"""
The opposite value of color in the :attr:`secondary_text_color`.
:attr:`opposite_secondary_text_color`
is an :class:`~kivy.properties.AliasProperty` that returns the value
in ``rgba`` format for :attr:`opposite_secondary_text_color`,
property is readonly.
"""
def _get_icon_color(self, opposite=False):
theme_style = self._get_theme_style(opposite)
if theme_style == "Light":
color = get_color_from_hex("000000")
color[3] = 0.54
elif theme_style == "Dark":
color = get_color_from_hex("FFFFFF")
return color
icon_color = AliasProperty(_get_icon_color, bind=["theme_style"])
"""
Color of the icon used in the :class:`~kivymd.uix.button.MDIconButton`.
:attr:`icon_color` is an :class:`~kivy.properties.AliasProperty` that
returns the value in ``rgba`` format for :attr:`icon_color`,
property is readonly.
"""
def _get_op_icon_color(self):
return self._get_icon_color(True)
opposite_icon_color = AliasProperty(
_get_op_icon_color, bind=["theme_style"]
)
"""
The opposite value of color in the :attr:`icon_color`.
:attr:`opposite_icon_color` is an :class:`~kivy.properties.AliasProperty`
that returns the value in ``rgba`` format for :attr:`opposite_icon_color`,
property is readonly.
"""
def _get_disabled_hint_text_color(self, opposite=False):
theme_style = self._get_theme_style(opposite)
if theme_style == "Light":
color = get_color_from_hex("000000")
color[3] = 0.38
elif theme_style == "Dark":
color = get_color_from_hex("FFFFFF")
color[3] = 0.50
return color
disabled_hint_text_color = AliasProperty(
_get_disabled_hint_text_color, bind=["theme_style"]
)
"""
Color of the disabled text used in the :class:`~kivymd.uix.textfield.MDTextField`.
:attr:`disabled_hint_text_color`
is an :class:`~kivy.properties.AliasProperty` that returns the value
in ``rgba`` format for :attr:`disabled_hint_text_color`,
property is readonly.
"""
def _get_op_disabled_hint_text_color(self):
return self._get_disabled_hint_text_color(True)
opposite_disabled_hint_text_color = AliasProperty(
_get_op_disabled_hint_text_color, bind=["theme_style"]
)
"""
The opposite value of color in the :attr:`disabled_hint_text_color`.
:attr:`opposite_disabled_hint_text_color`
is an :class:`~kivy.properties.AliasProperty` that returns the value
in ``rgba`` format for :attr:`opposite_disabled_hint_text_color`,
property is readonly.
"""
# Hardcoded because muh standard
def _get_error_color(self):
return get_color_from_hex(colors["Red"]["A700"])
error_color = AliasProperty(_get_error_color)
"""
Color of the error text used
in the :class:`~kivymd.uix.textfield.MDTextField`.
:attr:`error_color` is an :class:`~kivy.properties.AliasProperty` that
returns the value in ``rgba`` format for :attr:`error_color`,
property is readonly.
"""
def _get_ripple_color(self):
return self._ripple_color
def _set_ripple_color(self, value):
self._ripple_color = value
_ripple_color = ColorProperty(get_color_from_hex(colors["Gray"]["400"]))
"""Private value."""
ripple_color = AliasProperty(
_get_ripple_color, _set_ripple_color, bind=["_ripple_color"]
)
"""
Color of ripple effects.
:attr:`ripple_color` is an :class:`~kivy.properties.AliasProperty` that
returns the value in ``rgba`` format for :attr:`ripple_color`,
property is readonly.
"""
def _determine_device_orientation(self, _, window_size):
if window_size[0] > window_size[1]:
self.device_orientation = "landscape"
elif window_size[1] >= window_size[0]:
self.device_orientation = "portrait"
device_orientation = StringProperty("")
"""
Device orientation.
:attr:`device_orientation` is an :class:`~kivy.properties.StringProperty`.
"""
def _get_standard_increment(self):
if DEVICE_TYPE == "mobile":
if self.device_orientation == "landscape":
return dp(48)
else:
return dp(56)
else:
return dp(64)
standard_increment = AliasProperty(
_get_standard_increment, bind=["device_orientation"]
)
"""
Value of standard increment.
:attr:`standard_increment` is an :class:`~kivy.properties.AliasProperty`
that returns the value in ``rgba`` format for :attr:`standard_increment`,
property is readonly.
"""
def _get_horizontal_margins(self):
if DEVICE_TYPE == "mobile":
return dp(16)
else:
return dp(24)
horizontal_margins = AliasProperty(_get_horizontal_margins)
"""
Value of horizontal margins.
:attr:`horizontal_margins` is an :class:`~kivy.properties.AliasProperty`
that returns the value in ``rgba`` format for :attr:`horizontal_margins`,
property is readonly.
"""
def on_theme_style(self, instance, value):
if (
hasattr(App.get_running_app(), "theme_cls")
and App.get_running_app().theme_cls == self
):
self.set_clearcolor_by_theme_style(value)
set_clearcolor = BooleanProperty(True)
def set_clearcolor_by_theme_style(self, theme_style):
if not self.set_clearcolor:
return
Window.clearcolor = get_color_from_hex(
colors[theme_style]["Background"]
)
# font name, size (sp), always caps, letter spacing (sp)
font_styles = DictProperty(
{
"H1": ["RobotoLight", 96, False, -1.5],
"H2": ["RobotoLight", 60, False, -0.5],
"H3": ["Roboto", 48, False, 0],
"H4": ["Roboto", 34, False, 0.25],
"H5": ["Roboto", 24, False, 0],
"H6": ["RobotoMedium", 20, False, 0.15],
"Subtitle1": ["Roboto", 16, False, 0.15],
"Subtitle2": ["RobotoMedium", 14, False, 0.1],
"Body1": ["Roboto", 16, False, 0.5],
"Body2": ["Roboto", 14, False, 0.25],
"Button": ["RobotoMedium", 14, True, 1.25],
"Caption": ["Roboto", 12, False, 0.4],
"Overline": ["Roboto", 10, True, 1.5],
"Icon": ["Icons", 24, False, 0],
}
)
"""
Data of default font styles.
Add custom font:
.. code-block:: python
KV = '''
Screen:
MDLabel:
text: "JetBrainsMono"
halign: "center"
font_style: "JetBrainsMono"
'''
from kivy.core.text import LabelBase
from kivy.lang import Builder
from kivymd.app import MDApp
from kivymd.font_definitions import theme_font_styles
class MainApp(MDApp):
def build(self):
LabelBase.register(
name="JetBrainsMono",
fn_regular="JetBrainsMono-Regular.ttf")
theme_font_styles.append('JetBrainsMono')
self.theme_cls.font_styles["JetBrainsMono"] = [
"JetBrainsMono",
16,
False,
0.15,
]
return Builder.load_string(KV)
MainApp().run()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/font-styles.png
:attr:`font_styles` is an :class:`~kivy.properties.DictProperty`.
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.rec_shadow = Atlas(f"{images_path}rec_shadow.atlas")
self.rec_st_shadow = Atlas(f"{images_path}rec_st_shadow.atlas")
self.quad_shadow = Atlas(f"{images_path}quad_shadow.atlas")
self.round_shadow = Atlas(f"{images_path}round_shadow.atlas")
Clock.schedule_once(lambda x: self.on_theme_style(0, self.theme_style))
self._determine_device_orientation(None, Window.size)
Window.bind(size=self._determine_device_orientation)
class ThemableBehavior(EventDispatcher):
theme_cls = ObjectProperty()
"""
Instance of :class:`~ThemeManager` class.
:attr:`theme_cls` is an :class:`~kivy.properties.ObjectProperty`.
"""
device_ios = BooleanProperty(DEVICE_IOS)
"""
``True`` if device is ``iOS``.
:attr:`device_ios` is an :class:`~kivy.properties.BooleanProperty`.
"""
opposite_colors = BooleanProperty(False)
def __init__(self, **kwargs):
if self.theme_cls is not None:
pass
else:
try:
if not isinstance(
App.get_running_app().property("theme_cls", True),
ObjectProperty,
):
raise ValueError(
"KivyMD: App object must be inherited from "
"`kivymd.app.MDApp`"
)
except AttributeError:
raise ValueError(
"KivyMD: App object must be initialized before loading "
"root widget. See "
"https://github.com/kivymd/KivyMD/wiki/Modules-Material-App#exceptions"
)
self.theme_cls = App.get_running_app().theme_cls
super().__init__(**kwargs)
| 32.142699
| 115
| 0.625219
|
2162766a8a2dee0bd12ff7b69d20d3a4f58431f2
| 2,744
|
py
|
Python
|
conferences/list_participants.py
|
mickstevens/python3-twilio-sdkv6-examples
|
aac0403533b35fec4e8483de18d8fde2d783cfb2
|
[
"MIT"
] | 1
|
2018-11-23T20:11:27.000Z
|
2018-11-23T20:11:27.000Z
|
conferences/list_participants.py
|
mickstevens/python3-twilio-sdkv6-examples
|
aac0403533b35fec4e8483de18d8fde2d783cfb2
|
[
"MIT"
] | null | null | null |
conferences/list_participants.py
|
mickstevens/python3-twilio-sdkv6-examples
|
aac0403533b35fec4e8483de18d8fde2d783cfb2
|
[
"MIT"
] | null | null | null |
# *** List Conference Participants ***
# Code based on https://www.twilio.com/docs/voice/api/conference-participant
# Download Python 3 from https://www.python.org/downloads/
# Download the Twilio helper library from https://www.twilio.com/docs/python/install
import os
from twilio.rest import Client
# from datetime import date # | not required for this example
import logging
#write requests & responses from Twilio to log file, useful for debugging:
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(levelname)s %(message)s',
filename='/usr/local/twilio/python3/sdkv6x/conferences/logs/twilio_conferences.log',
filemode='a')
# Your Account Sid and Auth Token from twilio.com/console & stored in Mac OS ~/.bash_profile in this example
account_sid = os.environ.get('$TWILIO_ACCOUNT_SID')
auth_token = os.environ.get('$TWILIO_AUTH_TOKEN')
client = Client(account_sid, auth_token)
# A list of conference participant parameters & their permissable values, comment out (#) those lines not required
participants = client.conferences('CFxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx') \
.participants \
.list()
#print list of all conference participant properties to console, useful for learning info available you can work with?
for record in participants:
print(record.account_sid)
print(record.call_sid)
print(record.conference_sid)
print(record.date_created)
print(record.date_updated)
print(record.end_conference_on_exit)
print(record.from_)
print(record.hold)
print(record.muted)
print(record.start_conference_on_enter)
print(record.status)
print(record.to)
print(record.uri)
#create variable for this participant
cdr = (record.conference_sid)
#open *.log file with cdr var as filename...
f = open("/usr/local/twilio/python3/sdkv6x/conferences/logs/" + str( cdr ) + ".log", "a")
#write list of all participant properties to above file...
f.write("Account SID : " + str(record.account_sid) + "\n")
f.write("Call SID : " + str(record.call_sid) + "\n")
f.write("Conference SID : " + str(record.conference_sid) + "\n")
f.write("Date Created : " + str(record.date_created) + "\n")
f.write("Date Updated : " + str(record.date_updated) + "\n")
f.write("End Conference on Exit : " + str(record.end_conference_on_exit) + "\n")
f.write("From : " + str(record.from_) + "\n")
f.write("Hold : " + str(record.hold) + "\n")
f.write("Mute : " + str(record.mute) + "\n")
f.write("Start Conference on Enter : " + str(record.start_conference_on_enter) + "\n")
f.write("Status : " + str(record.status) + "\n")
f.write("To : " + str(record.to) + "\n")
f.write("URI : " + str(record.uri) + "\n")
f.close()
| 46.508475
| 118
| 0.694971
|
3533f02307e66834392f24f3041ab6fdb1ae0f99
| 24,764
|
py
|
Python
|
netbox/dcim/api/views.py
|
promasu/netbox
|
14d2a499420aa1fd28508311781c769adfbab4d1
|
[
"Apache-2.0"
] | null | null | null |
netbox/dcim/api/views.py
|
promasu/netbox
|
14d2a499420aa1fd28508311781c769adfbab4d1
|
[
"Apache-2.0"
] | null | null | null |
netbox/dcim/api/views.py
|
promasu/netbox
|
14d2a499420aa1fd28508311781c769adfbab4d1
|
[
"Apache-2.0"
] | null | null | null |
import socket
from collections import OrderedDict
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.db.models import F
from django.http import HttpResponseForbidden, HttpResponse
from django.shortcuts import get_object_or_404
from drf_yasg import openapi
from drf_yasg.openapi import Parameter
from drf_yasg.utils import swagger_auto_schema
from rest_framework.decorators import action
from rest_framework.mixins import ListModelMixin
from rest_framework.response import Response
from rest_framework.routers import APIRootView
from rest_framework.viewsets import GenericViewSet, ViewSet
from circuits.models import Circuit
from dcim import filters
from dcim.models import (
Cable, CablePath, ConsolePort, ConsolePortTemplate, ConsoleServerPort, ConsoleServerPortTemplate, Device, DeviceBay,
DeviceBayTemplate, DeviceRole, DeviceType, FrontPort, FrontPortTemplate, Interface, InterfaceTemplate,
Manufacturer, InventoryItem, Platform, PowerFeed, PowerOutlet, PowerOutletTemplate, PowerPanel, PowerPort,
PowerPortTemplate, Rack, RackGroup, RackReservation, RackRole, RearPort, RearPortTemplate, Region, Site,
VirtualChassis,
)
from extras.api.views import ConfigContextQuerySetMixin, CustomFieldModelViewSet
from ipam.models import Prefix, VLAN
from netbox.api.views import ModelViewSet
from netbox.api.authentication import IsAuthenticatedOrLoginNotRequired
from netbox.api.exceptions import ServiceUnavailable
from netbox.api.metadata import ContentTypeMetadata
from utilities.api import get_serializer_for_model
from utilities.utils import count_related
from virtualization.models import VirtualMachine
from . import serializers
from .exceptions import MissingFilterException
class DCIMRootView(APIRootView):
"""
DCIM API root view
"""
def get_view_name(self):
return 'DCIM'
# Mixins
class PathEndpointMixin(object):
@action(detail=True, url_path='trace')
def trace(self, request, pk):
"""
Trace a complete cable path and return each segment as a three-tuple of (termination, cable, termination).
"""
obj = get_object_or_404(self.queryset, pk=pk)
# Initialize the path array
path = []
for near_end, cable, far_end in obj.trace():
if near_end is None:
# Split paths
break
# Serialize each object
serializer_a = get_serializer_for_model(near_end, prefix='Nested')
x = serializer_a(near_end, context={'request': request}).data
if cable is not None:
y = serializers.TracedCableSerializer(cable, context={'request': request}).data
else:
y = None
if far_end is not None:
serializer_b = get_serializer_for_model(far_end, prefix='Nested')
z = serializer_b(far_end, context={'request': request}).data
else:
z = None
path.append((x, y, z))
return Response(path)
class PassThroughPortMixin(object):
@action(detail=True, url_path='paths')
def paths(self, request, pk):
"""
Return all CablePaths which traverse a given pass-through port.
"""
obj = get_object_or_404(self.queryset, pk=pk)
cablepaths = CablePath.objects.filter(path__contains=obj).prefetch_related('origin', 'destination')
serializer = serializers.CablePathSerializer(cablepaths, context={'request': request}, many=True)
return Response(serializer.data)
#
# Regions
#
class RegionViewSet(ModelViewSet):
queryset = Region.objects.add_related_count(
Region.objects.all(),
Site,
'region',
'site_count',
cumulative=True
)
serializer_class = serializers.RegionSerializer
filterset_class = filters.RegionFilterSet
#
# Sites
#
class SiteViewSet(CustomFieldModelViewSet):
queryset = Site.objects.prefetch_related(
'region', 'tenant', 'tags'
).annotate(
device_count=count_related(Device, 'site'),
rack_count=count_related(Rack, 'site'),
prefix_count=count_related(Prefix, 'site'),
vlan_count=count_related(VLAN, 'site'),
circuit_count=count_related(Circuit, 'terminations__site'),
virtualmachine_count=count_related(VirtualMachine, 'cluster__site')
)
serializer_class = serializers.SiteSerializer
filterset_class = filters.SiteFilterSet
#
# Rack groups
#
class RackGroupViewSet(ModelViewSet):
queryset = RackGroup.objects.add_related_count(
RackGroup.objects.all(),
Rack,
'group',
'rack_count',
cumulative=True
).prefetch_related('site')
serializer_class = serializers.RackGroupSerializer
filterset_class = filters.RackGroupFilterSet
#
# Rack roles
#
class RackRoleViewSet(ModelViewSet):
queryset = RackRole.objects.annotate(
rack_count=count_related(Rack, 'role')
)
serializer_class = serializers.RackRoleSerializer
filterset_class = filters.RackRoleFilterSet
#
# Racks
#
class RackViewSet(CustomFieldModelViewSet):
queryset = Rack.objects.prefetch_related(
'site', 'group__site', 'role', 'tenant', 'tags'
).annotate(
device_count=count_related(Device, 'rack'),
powerfeed_count=count_related(PowerFeed, 'rack')
)
serializer_class = serializers.RackSerializer
filterset_class = filters.RackFilterSet
@swagger_auto_schema(
responses={200: serializers.RackUnitSerializer(many=True)},
query_serializer=serializers.RackElevationDetailFilterSerializer
)
@action(detail=True)
def elevation(self, request, pk=None):
"""
Rack elevation representing the list of rack units. Also supports rendering the elevation as an SVG.
"""
rack = get_object_or_404(self.queryset, pk=pk)
serializer = serializers.RackElevationDetailFilterSerializer(data=request.GET)
if not serializer.is_valid():
return Response(serializer.errors, 400)
data = serializer.validated_data
if data['render'] == 'svg':
# Render and return the elevation as an SVG drawing with the correct content type
drawing = rack.get_elevation_svg(
face=data['face'],
user=request.user,
unit_width=data['unit_width'],
unit_height=data['unit_height'],
legend_width=data['legend_width'],
include_images=data['include_images'],
base_url=request.build_absolute_uri('/')
)
return HttpResponse(drawing.tostring(), content_type='image/svg+xml')
else:
# Return a JSON representation of the rack units in the elevation
elevation = rack.get_rack_units(
face=data['face'],
user=request.user,
exclude=data['exclude'],
expand_devices=data['expand_devices']
)
# Enable filtering rack units by ID
q = data['q']
if q:
elevation = [u for u in elevation if q in str(u['id']) or q in str(u['name'])]
page = self.paginate_queryset(elevation)
if page is not None:
rack_units = serializers.RackUnitSerializer(page, many=True, context={'request': request})
return self.get_paginated_response(rack_units.data)
#
# Rack reservations
#
class RackReservationViewSet(ModelViewSet):
queryset = RackReservation.objects.prefetch_related('rack', 'user', 'tenant')
serializer_class = serializers.RackReservationSerializer
filterset_class = filters.RackReservationFilterSet
# Assign user from request
def perform_create(self, serializer):
serializer.save(user=self.request.user)
#
# Manufacturers
#
class ManufacturerViewSet(ModelViewSet):
queryset = Manufacturer.objects.annotate(
devicetype_count=count_related(DeviceType, 'manufacturer'),
inventoryitem_count=count_related(InventoryItem, 'manufacturer'),
platform_count=count_related(Platform, 'manufacturer')
)
serializer_class = serializers.ManufacturerSerializer
filterset_class = filters.ManufacturerFilterSet
#
# Device types
#
class DeviceTypeViewSet(CustomFieldModelViewSet):
queryset = DeviceType.objects.prefetch_related('manufacturer', 'tags').annotate(
device_count=count_related(Device, 'device_type')
)
serializer_class = serializers.DeviceTypeSerializer
filterset_class = filters.DeviceTypeFilterSet
brief_prefetch_fields = ['manufacturer']
#
# Device type components
#
class ConsolePortTemplateViewSet(ModelViewSet):
queryset = ConsolePortTemplate.objects.prefetch_related('device_type__manufacturer')
serializer_class = serializers.ConsolePortTemplateSerializer
filterset_class = filters.ConsolePortTemplateFilterSet
class ConsoleServerPortTemplateViewSet(ModelViewSet):
queryset = ConsoleServerPortTemplate.objects.prefetch_related('device_type__manufacturer')
serializer_class = serializers.ConsoleServerPortTemplateSerializer
filterset_class = filters.ConsoleServerPortTemplateFilterSet
class PowerPortTemplateViewSet(ModelViewSet):
queryset = PowerPortTemplate.objects.prefetch_related('device_type__manufacturer')
serializer_class = serializers.PowerPortTemplateSerializer
filterset_class = filters.PowerPortTemplateFilterSet
class PowerOutletTemplateViewSet(ModelViewSet):
queryset = PowerOutletTemplate.objects.prefetch_related('device_type__manufacturer')
serializer_class = serializers.PowerOutletTemplateSerializer
filterset_class = filters.PowerOutletTemplateFilterSet
class InterfaceTemplateViewSet(ModelViewSet):
queryset = InterfaceTemplate.objects.prefetch_related('device_type__manufacturer')
serializer_class = serializers.InterfaceTemplateSerializer
filterset_class = filters.InterfaceTemplateFilterSet
class FrontPortTemplateViewSet(ModelViewSet):
queryset = FrontPortTemplate.objects.prefetch_related('device_type__manufacturer')
serializer_class = serializers.FrontPortTemplateSerializer
filterset_class = filters.FrontPortTemplateFilterSet
class RearPortTemplateViewSet(ModelViewSet):
queryset = RearPortTemplate.objects.prefetch_related('device_type__manufacturer')
serializer_class = serializers.RearPortTemplateSerializer
filterset_class = filters.RearPortTemplateFilterSet
class DeviceBayTemplateViewSet(ModelViewSet):
queryset = DeviceBayTemplate.objects.prefetch_related('device_type__manufacturer')
serializer_class = serializers.DeviceBayTemplateSerializer
filterset_class = filters.DeviceBayTemplateFilterSet
#
# Device roles
#
class DeviceRoleViewSet(ModelViewSet):
queryset = DeviceRole.objects.annotate(
device_count=count_related(Device, 'device_role'),
virtualmachine_count=count_related(VirtualMachine, 'role')
)
serializer_class = serializers.DeviceRoleSerializer
filterset_class = filters.DeviceRoleFilterSet
#
# Platforms
#
class PlatformViewSet(ModelViewSet):
queryset = Platform.objects.annotate(
device_count=count_related(Device, 'platform'),
virtualmachine_count=count_related(VirtualMachine, 'platform')
)
serializer_class = serializers.PlatformSerializer
filterset_class = filters.PlatformFilterSet
#
# Devices
#
class DeviceViewSet(ConfigContextQuerySetMixin, CustomFieldModelViewSet):
queryset = Device.objects.prefetch_related(
'device_type__manufacturer', 'device_role', 'tenant', 'platform', 'site', 'rack', 'parent_bay',
'virtual_chassis__master', 'primary_ip4__nat_outside', 'primary_ip6__nat_outside', 'tags',
)
filterset_class = filters.DeviceFilterSet
def get_serializer_class(self):
"""
Select the specific serializer based on the request context.
If the `brief` query param equates to True, return the NestedDeviceSerializer
If the `exclude` query param includes `config_context` as a value, return the DeviceSerializer
Else, return the DeviceWithConfigContextSerializer
"""
request = self.get_serializer_context()['request']
if request.query_params.get('brief', False):
return serializers.NestedDeviceSerializer
elif 'config_context' in request.query_params.get('exclude', []):
return serializers.DeviceSerializer
return serializers.DeviceWithConfigContextSerializer
@swagger_auto_schema(
manual_parameters=[
Parameter(
name='method',
in_='query',
required=True,
type=openapi.TYPE_STRING
)
],
responses={'200': serializers.DeviceNAPALMSerializer}
)
@action(detail=True, url_path='napalm')
def napalm(self, request, pk):
"""
Execute a NAPALM method on a Device
"""
device = get_object_or_404(self.queryset, pk=pk)
if not device.primary_ip:
raise ServiceUnavailable("This device does not have a primary IP address configured.")
if device.platform is None:
raise ServiceUnavailable("No platform is configured for this device.")
if not device.platform.napalm_driver:
raise ServiceUnavailable(f"No NAPALM driver is configured for this device's platform: {device.platform}.")
# Check for primary IP address from NetBox object
if device.primary_ip:
host = str(device.primary_ip.address.ip)
else:
# Raise exception for no IP address and no Name if device.name does not exist
if not device.name:
raise ServiceUnavailable(
"This device does not have a primary IP address or device name to lookup configured."
)
try:
# Attempt to complete a DNS name resolution if no primary_ip is set
host = socket.gethostbyname(device.name)
except socket.gaierror:
# Name lookup failure
raise ServiceUnavailable(
f"Name lookup failure, unable to resolve IP address for {device.name}. Please set Primary IP or "
f"setup name resolution.")
# Check that NAPALM is installed
try:
import napalm
from napalm.base.exceptions import ModuleImportError
except ModuleNotFoundError as e:
if getattr(e, 'name') == 'napalm':
raise ServiceUnavailable("NAPALM is not installed. Please see the documentation for instructions.")
raise e
# Validate the configured driver
try:
driver = napalm.get_network_driver(device.platform.napalm_driver)
except ModuleImportError:
raise ServiceUnavailable("NAPALM driver for platform {} not found: {}.".format(
device.platform, device.platform.napalm_driver
))
# Verify user permission
if not request.user.has_perm('dcim.napalm_read_device'):
return HttpResponseForbidden()
napalm_methods = request.GET.getlist('method')
response = OrderedDict([(m, None) for m in napalm_methods])
username = settings.NAPALM_USERNAME
password = settings.NAPALM_PASSWORD
optional_args = settings.NAPALM_ARGS.copy()
if device.platform.napalm_args is not None:
optional_args.update(device.platform.napalm_args)
# Update NAPALM parameters according to the request headers
for header in request.headers:
if header[:9].lower() != 'x-napalm-':
continue
key = header[9:]
if key.lower() == 'username':
username = request.headers[header]
elif key.lower() == 'password':
password = request.headers[header]
elif key:
optional_args[key.lower()] = request.headers[header]
# Connect to the device
d = driver(
hostname=host,
username=username,
password=password,
timeout=settings.NAPALM_TIMEOUT,
optional_args=optional_args
)
try:
d.open()
except Exception as e:
raise ServiceUnavailable("Error connecting to the device at {}: {}".format(host, e))
# Validate and execute each specified NAPALM method
for method in napalm_methods:
if not hasattr(driver, method):
response[method] = {'error': 'Unknown NAPALM method'}
continue
if not method.startswith('get_'):
response[method] = {'error': 'Only get_* NAPALM methods are supported'}
continue
try:
response[method] = getattr(d, method)()
except NotImplementedError:
response[method] = {'error': 'Method {} not implemented for NAPALM driver {}'.format(method, driver)}
except Exception as e:
response[method] = {'error': 'Method {} failed: {}'.format(method, e)}
d.close()
return Response(response)
#
# Device components
#
class ConsolePortViewSet(PathEndpointMixin, ModelViewSet):
queryset = ConsolePort.objects.prefetch_related('device', '_path__destination', 'cable', '_cable_peer', 'tags')
serializer_class = serializers.ConsolePortSerializer
filterset_class = filters.ConsolePortFilterSet
brief_prefetch_fields = ['device']
class ConsoleServerPortViewSet(PathEndpointMixin, ModelViewSet):
queryset = ConsoleServerPort.objects.prefetch_related(
'device', '_path__destination', 'cable', '_cable_peer', 'tags'
)
serializer_class = serializers.ConsoleServerPortSerializer
filterset_class = filters.ConsoleServerPortFilterSet
brief_prefetch_fields = ['device']
class PowerPortViewSet(PathEndpointMixin, ModelViewSet):
queryset = PowerPort.objects.prefetch_related('device', '_path__destination', 'cable', '_cable_peer', 'tags')
serializer_class = serializers.PowerPortSerializer
filterset_class = filters.PowerPortFilterSet
brief_prefetch_fields = ['device']
class PowerOutletViewSet(PathEndpointMixin, ModelViewSet):
queryset = PowerOutlet.objects.prefetch_related('device', '_path__destination', 'cable', '_cable_peer', 'tags')
serializer_class = serializers.PowerOutletSerializer
filterset_class = filters.PowerOutletFilterSet
brief_prefetch_fields = ['device']
class InterfaceViewSet(PathEndpointMixin, ModelViewSet):
queryset = Interface.objects.prefetch_related(
'device', '_path__destination', 'cable', '_cable_peer', 'ip_addresses', 'tags'
)
serializer_class = serializers.InterfaceSerializer
filterset_class = filters.InterfaceFilterSet
brief_prefetch_fields = ['device']
class FrontPortViewSet(PassThroughPortMixin, ModelViewSet):
queryset = FrontPort.objects.prefetch_related('device__device_type__manufacturer', 'rear_port', 'cable', 'tags')
serializer_class = serializers.FrontPortSerializer
filterset_class = filters.FrontPortFilterSet
brief_prefetch_fields = ['device']
class RearPortViewSet(PassThroughPortMixin, ModelViewSet):
queryset = RearPort.objects.prefetch_related('device__device_type__manufacturer', 'cable', 'tags')
serializer_class = serializers.RearPortSerializer
filterset_class = filters.RearPortFilterSet
brief_prefetch_fields = ['device']
class DeviceBayViewSet(ModelViewSet):
queryset = DeviceBay.objects.prefetch_related('installed_device').prefetch_related('tags')
serializer_class = serializers.DeviceBaySerializer
filterset_class = filters.DeviceBayFilterSet
brief_prefetch_fields = ['device']
class InventoryItemViewSet(ModelViewSet):
queryset = InventoryItem.objects.prefetch_related('device', 'manufacturer').prefetch_related('tags')
serializer_class = serializers.InventoryItemSerializer
filterset_class = filters.InventoryItemFilterSet
brief_prefetch_fields = ['device']
#
# Connections
#
class ConsoleConnectionViewSet(ListModelMixin, GenericViewSet):
queryset = ConsolePort.objects.prefetch_related('device', '_path').filter(
_path__destination_id__isnull=False
)
serializer_class = serializers.ConsolePortSerializer
filterset_class = filters.ConsoleConnectionFilterSet
class PowerConnectionViewSet(ListModelMixin, GenericViewSet):
queryset = PowerPort.objects.prefetch_related('device', '_path').filter(
_path__destination_id__isnull=False
)
serializer_class = serializers.PowerPortSerializer
filterset_class = filters.PowerConnectionFilterSet
class InterfaceConnectionViewSet(ListModelMixin, GenericViewSet):
queryset = Interface.objects.prefetch_related('device', '_path').filter(
# Avoid duplicate connections by only selecting the lower PK in a connected pair
_path__destination_type__app_label='dcim',
_path__destination_type__model='interface',
_path__destination_id__isnull=False,
pk__lt=F('_path__destination_id')
)
serializer_class = serializers.InterfaceConnectionSerializer
filterset_class = filters.InterfaceConnectionFilterSet
#
# Cables
#
class CableViewSet(ModelViewSet):
metadata_class = ContentTypeMetadata
queryset = Cable.objects.prefetch_related(
'termination_a', 'termination_b'
)
serializer_class = serializers.CableSerializer
filterset_class = filters.CableFilterSet
#
# Virtual chassis
#
class VirtualChassisViewSet(ModelViewSet):
queryset = VirtualChassis.objects.prefetch_related('tags').annotate(
member_count=count_related(Device, 'virtual_chassis')
)
serializer_class = serializers.VirtualChassisSerializer
filterset_class = filters.VirtualChassisFilterSet
brief_prefetch_fields = ['master']
#
# Power panels
#
class PowerPanelViewSet(ModelViewSet):
queryset = PowerPanel.objects.prefetch_related(
'site', 'rack_group'
).annotate(
powerfeed_count=count_related(PowerFeed, 'power_panel')
)
serializer_class = serializers.PowerPanelSerializer
filterset_class = filters.PowerPanelFilterSet
#
# Power feeds
#
class PowerFeedViewSet(PathEndpointMixin, CustomFieldModelViewSet):
queryset = PowerFeed.objects.prefetch_related(
'power_panel', 'rack', '_path__destination', 'cable', '_cable_peer', 'tags'
)
serializer_class = serializers.PowerFeedSerializer
filterset_class = filters.PowerFeedFilterSet
#
# Miscellaneous
#
class ConnectedDeviceViewSet(ViewSet):
"""
This endpoint allows a user to determine what device (if any) is connected to a given peer device and peer
interface. This is useful in a situation where a device boots with no configuration, but can detect its neighbors
via a protocol such as LLDP. Two query parameters must be included in the request:
* `peer_device`: The name of the peer device
* `peer_interface`: The name of the peer interface
"""
permission_classes = [IsAuthenticatedOrLoginNotRequired]
_device_param = Parameter(
name='peer_device',
in_='query',
description='The name of the peer device',
required=True,
type=openapi.TYPE_STRING
)
_interface_param = Parameter(
name='peer_interface',
in_='query',
description='The name of the peer interface',
required=True,
type=openapi.TYPE_STRING
)
def get_view_name(self):
return "Connected Device Locator"
@swagger_auto_schema(
manual_parameters=[_device_param, _interface_param],
responses={'200': serializers.DeviceSerializer}
)
def list(self, request):
peer_device_name = request.query_params.get(self._device_param.name)
peer_interface_name = request.query_params.get(self._interface_param.name)
if not peer_device_name or not peer_interface_name:
raise MissingFilterException(detail='Request must include "peer_device" and "peer_interface" filters.')
# Determine local interface from peer interface's connection
peer_interface = get_object_or_404(
Interface.objects.all(),
device__name=peer_device_name,
name=peer_interface_name
)
local_interface = peer_interface.connected_endpoint
if local_interface is None:
return Response()
return Response(serializers.DeviceSerializer(local_interface.device, context={'request': request}).data)
| 35.326676
| 120
| 0.705177
|
9fa5beb745f9ea932d9a8651f2fdda11849c49bf
| 591
|
py
|
Python
|
amp/models/model_garden.py
|
szczurek-lab/hydramp
|
be4eb6defe227fe3fef4fe9882a85a0b717301c6
|
[
"MIT"
] | 4
|
2022-03-04T15:57:24.000Z
|
2022-03-24T11:13:01.000Z
|
amp/models/model_garden.py
|
szczurek-lab/hydramp
|
be4eb6defe227fe3fef4fe9882a85a0b717301c6
|
[
"MIT"
] | null | null | null |
amp/models/model_garden.py
|
szczurek-lab/hydramp
|
be4eb6defe227fe3fef4fe9882a85a0b717301c6
|
[
"MIT"
] | 1
|
2022-03-07T16:44:11.000Z
|
2022-03-07T16:44:11.000Z
|
from amp.models.discriminators import veltri_amp_classifier
from amp.models.discriminators import amp_classifier_noCONV
from amp.models.decoders import amp_expanded_decoder
from amp.models.encoders import amp_expanded_encoder
from amp.models.master import master
MODEL_GAREDN = {
'VeltriAMPClassifier': veltri_amp_classifier.VeltriAMPClassifier,
'NoConvAMPClassifier': amp_classifier_noCONV.NoConvAMPClassifier,
'AMPExpandedDecoder': amp_expanded_decoder.AMPDecoder,
'AMPExpandedEncoder': amp_expanded_encoder.AMPEncoder,
'MasterAMPTrainer': master.MasterAMPTrainer,
}
| 39.4
| 69
| 0.840948
|
572f33e8473bc647acef8757ec7244338ad24054
| 2,323
|
py
|
Python
|
experiments/networks/mnist/dcgan.py
|
yashkhasbage25/LTH
|
4a793980247d50846173071740e9036ab7bbe1a4
|
[
"MIT"
] | null | null | null |
experiments/networks/mnist/dcgan.py
|
yashkhasbage25/LTH
|
4a793980247d50846173071740e9036ab7bbe1a4
|
[
"MIT"
] | null | null | null |
experiments/networks/mnist/dcgan.py
|
yashkhasbage25/LTH
|
4a793980247d50846173071740e9036ab7bbe1a4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import torch
import torch.nn as nn
class Generator(nn.Module):
def __init__(self, d=4, z_len=100):
super(Generator, self).__init__()
layers = [
nn.ConvTranspose2d(z_len, d * 8, 4, 1, 0, bias=False), # (8d, 4, 4)
nn.BatchNorm2d(d * 8),
nn.ReLU(True),
nn.ConvTranspose2d(d * 8, d * 4, 4, 2, 1, bias=False), # (4d, 8, 8)
nn.BatchNorm2d(d * 4),
nn.ReLU(True),
nn.ConvTranspose2d(d * 4, d * 2, 4, 2, 1, bias=False), # (2d, 16, 16)
nn.BatchNorm2d(d * 2),
nn.ReLU(True),
nn.ConvTranspose2d(d * 2, d, 2, 2, 2, bias=False), # (d, 28, 28)
nn.BatchNorm2d(d),
nn.ReLU(True),
nn.ConvTranspose2d(d, 1, 3, 1, 1, bias=False), # (1, 28, 28)
nn.Tanh()
]
self.net = nn.Sequential(*layers)
for m in self._modules:
if isinstance(m, nn.ConvTranspose2d):
nn.init.normal_(m.weight.data, 0.0, 0.02)
elif isinstance(m, nn.BatchNorm2d):
m.init.normal_(m.weight.data, 1.0, 0.02)
if hasattr(m, 'bias'):
m.init.constant_(m.bias.data, 0)
def forward(self, x):
return self.net(x)
class Discriminator(nn.Module):
def __init__(self, d=4):
super(Discriminator, self).__init__()
layers = [
nn.Conv2d(1, d, 4, 2, 3, bias=False), # (d, 16, 16)
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(d, d * 2, 4, 2, 1, bias=False), # (2d, 8, 8)
nn.BatchNorm2d(d * 2),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(d * 2, d * 4, 4, 2, 1, bias=False), # (4d, 4, 4)
nn.BatchNorm2d(d * 4),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(d * 4, d * 8, 4, 2, 1, bias=False), # (8d, 2, 2)
nn.BatchNorm2d(d * 8),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(d * 8, 1, 4, 2, 1, bias=False), # (1, 1, 1)
nn.Sigmoid()
]
self.net = nn.Sequential(*layers)
for m in self._modules:
if isinstance(m, nn.Conv2d):
m.init.normal_(0.0, 0.02)
def forward(self, x):
return self.net(x)
| 31.391892
| 81
| 0.480844
|
dea8fe0a992dc364cee290980afdcd9094d16816
| 1,651
|
py
|
Python
|
ds_info/utils/io_utils.py
|
camgbus/dataset_statistics
|
8aa38c2f3c24c0db83fc8c5b57c2d211ab732e35
|
[
"MIT"
] | null | null | null |
ds_info/utils/io_utils.py
|
camgbus/dataset_statistics
|
8aa38c2f3c24c0db83fc8c5b57c2d211ab732e35
|
[
"MIT"
] | null | null | null |
ds_info/utils/io_utils.py
|
camgbus/dataset_statistics
|
8aa38c2f3c24c0db83fc8c5b57c2d211ab732e35
|
[
"MIT"
] | null | null | null |
import os
import SimpleITK as sitk
def list_files(dataset_path):
"""List files in a dataset directory with the format of the Medical
Segmentation Decathlon.
Parameters:
dataset_path (str): path to a dataset
Returns:
lst(str): full file names
"""
train_img_path = os.path.join(dataset_path, 'imagesTr')
file_names = [f for f in os.listdir(train_img_path) if
os.path.isfile(os.path.join(train_img_path, f))]
return file_names
def get_img_label(dataset_path, file_name):
"""Load an image or label map.
Parameters:
dataset_path (str): path to a dataset with the Medical Segmentation
Decathlon structure
file_name (str): name of the file, including ending
Returns:
(SimpleITK.SimpleITK.Image, SimpleITK.SimpleITK.Image): image and label map
"""
img_path = os.path.join(dataset_path, 'imagesTr', file_name)
label_path = os.path.join(dataset_path, 'labelsTr', file_name)
return sitk.ReadImage(img_path), sitk.ReadImage(label_path)
def get_arrays_from_img_label(img, label, img_mode=None):
"""Transform a SimpleITK image and label map into numpy arrays, and
optionally select a channel.
Parameters:
img (SimpleITK.SimpleITK.Image): image
label (SimpleITK.SimpleITK.Image): label map
img_mode (int or None): optional mode channel, so output is 3D
Returns:
(numpy.ndarray, numpy.ndarray): image and label in numpy format
"""
img_np = sitk.GetArrayFromImage(img)
if img_mode is not None:
img_np = img_np[img_mode]
label_np = sitk.GetArrayFromImage(label)
return img_np, label_np.astype(int)
| 33.02
| 79
| 0.706844
|
bb75e8baadc1411acd4d3d210b1b2a804a65b3c0
| 11,647
|
py
|
Python
|
main.py
|
RussellPacheco/Discord-Message-Deleter-Bot
|
f724a08c277ff86c6bc4c9979e5797172a8ef0ba
|
[
"MIT",
"Unlicense"
] | null | null | null |
main.py
|
RussellPacheco/Discord-Message-Deleter-Bot
|
f724a08c277ff86c6bc4c9979e5797172a8ef0ba
|
[
"MIT",
"Unlicense"
] | null | null | null |
main.py
|
RussellPacheco/Discord-Message-Deleter-Bot
|
f724a08c277ff86c6bc4c9979e5797172a8ef0ba
|
[
"MIT",
"Unlicense"
] | null | null | null |
from dotenv import load_dotenv
import discord
from discord.ext import commands
import logging
from datetime import datetime
import os
load_dotenv()
####################
#
# Channels
#
####################
OFF_TOPIC_CHANNEL = 824643646748885022
BB_GENERAL_CHANNEL = 711441407914672162
BB_TEST_CHANNEL = 858188090610155530
BOBSANDERS_TEST_SERVER_GENERAL_CHANNEL = 857208225974452247
####################
#
# Allowed Users
#
####################
BOBSANDERS_ID = 636913307605008407
PERSONAL_ID_LIST = [BOBSANDERS_ID, ]
####################
#
# Passive Deletion Time Config
#
####################
TIME = "day" # "day", "hour", or "minute"
AMOUNT_TIME = 1
AMOUNT_TO_PASSIVE_DELETE = 24 #how many messages to passively delete.
####################
#
# Setup
#
####################
#### LOGGER
logger = logging.getLogger('discord')
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(filename='discord_debug.log', encoding='utf-8', mode='w')
handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
### DISCORD
intents = discord.Intents.default()
intents.members = True
description = "A bot for deleting old messages!"
bot = commands.Bot(command_prefix="$", description=description, intents=intents)
####################
#
# Util Functions
#
####################
def log_login():
file = open("bot_login_times.log", "a")
file.write(f"\n{datetime.now()}")
file.close()
print(f"Bot has logged in at <{datetime.now()}>")
async def log_all_messages_id(channel):
all_messages = await channel.history(limit=None, oldest_first=True).flatten()
file = open("message_ids.txt", "w")
for message in all_messages:
file.write(str(message.id) + "\n")
file.close()
def check_messages_for_deletion(message):
if message.channel.id == BB_TEST_CHANNEL:
file = open("deletion_times.log", "a")
file.close()
login_times_file = open("bot_login_times.log", "r")
deletion_times_file = open("deletion_times.log", "r")
deletion_times = deletion_times_file.readlines()
current_time = datetime.now()
login_times_file_data = login_times_file.readlines()
login_times_file_length = len(login_times_file_data)
if login_times_file_length > 0:
if len(deletion_times) > 0:
last_deletion_unparsed = deletion_times[-1]
last_deletion = datetime.fromisoformat(last_deletion_unparsed)
last_deletion_plus_time = None
if TIME == "day":
print("I went to day")
last_deletion_plus_time = last_deletion.replace(day=last_deletion.day + AMOUNT_TIME)
elif TIME == "hour":
print("I went to hour")
last_deletion_plus_time = last_deletion.replace(hour=last_deletion.hour + AMOUNT_TIME / 60)
elif TIME == "minute":
print("I went to minute")
print(f"{last_deletion.minute}")
last_deletion_plus_time = last_deletion.replace(minute=last_deletion.minute + AMOUNT_TIME % 60)
deletion_times_file.close()
if last_deletion_plus_time <= current_time:
return True
else:
return False
else:
last_login_time_unparsed = login_times_file_data[-1]
last_login_time = datetime.fromisoformat(last_login_time_unparsed)
last_login_time_plus_time = None
if TIME == "day":
last_login_time_plus_time = last_login_time.replace(day=last_login_time.day + AMOUNT_TIME)
elif TIME == "hour":
last_login_time_plus_time = last_login_time.replace(hour=last_login_time.hour + AMOUNT_TIME)
elif TIME == "minute":
last_login_time_plus_time = last_login_time.replace(minute=last_login_time.minute + AMOUNT_TIME)
login_times_file.close()
if last_login_time_plus_time <= current_time:
return True
else:
return False
else:
return False
async def delete_old_messages(channel):
file = open("message_ids.txt", "a")
file.close()
file = open("message_ids.txt", "r")
all_saved_ids = file.readlines()
file.close()
counter = 0
while counter != AMOUNT_TO_PASSIVE_DELETE and len(all_saved_ids) > 0:
for number in range(AMOUNT_TO_PASSIVE_DELETE):
message = await channel.fetch_message(all_saved_ids[number])
await channel.send(f"I am deleting {message.content} by {message.author} made on {message.created_at}.")
await message.delete()
all_saved_ids.remove(all_saved_ids[number])
counter += 1
file = open("message_ids.txt", "w")
for line in all_saved_ids:
file.write(line)
file.close()
file = open("deletion_times.log", "r")
past_deletion_times = file.readlines()
file.close()
file = open("deletion_times.log", "w")
for times in past_deletion_times:
file.write(times + "\n")
file.write(f"{datetime.now()}")
file.close()
await log_all_messages_id(channel)
####################
#
# Events
#
####################
@bot.event
async def on_ready():
channel = bot.get_channel(BB_TEST_CHANNEL)
message = await channel.history().flatten()
if check_messages_for_deletion(message[0]):
await delete_old_messages(channel)
log_login()
await log_all_messages_id(channel)
print('We have logged in as {0.user}'.format(bot))
print(f"The channel I am focusing on is {channel}")
await channel.send("Hey guys, I'm new!")
@bot.event
async def on_message(message):
channel = bot.get_channel(BB_TEST_CHANNEL)
await log_all_messages_id(channel)
if check_messages_for_deletion(message):
await delete_old_messages(channel)
if message.author == bot.user:
return
if message.author.id in PERSONAL_ID_LIST and message.content[0] == "$":
if message.author.id == BOBSANDERS_ID:
await message.channel.send("Hey-o Bobby! You got a command for me? Right away, sir!")
await bot.process_commands(message)
else:
await message.channel.send(f"Hey there {message.author}. I'll take care of that for you.")
elif message.content[0] == "$":
await message.channel.send(f"Nice try @{message.author}, but this is a private bot only for God's hands.")
@bot.command()
async def getallmessages(ctx):
try:
counter = 0
async for message in ctx.history(limit=None):
counter += 1
await ctx.send(f"There are {counter - 2} previous messages.")
except Exception as e:
await ctx.send(f"Error: {e}")
@bot.command()
async def getfirstmessage(ctx):
try:
first_message = await ctx.history(oldest_first=True, limit=2).flatten()
await ctx.send(f"The first message ID is {first_message[0].id}")
await ctx.send(f"The first message author is {first_message[0].author}")
await ctx.send(f"The first message created_at is {first_message[0].created_at}")
await ctx.send(f"The first message is {first_message[0].content}")
except Exception as e:
await ctx.send(e)
@bot.command()
async def deletemsg(ctx, id):
try:
message_to_delete = await ctx.fetch_message(id)
created_at = message_to_delete.created_at
content = message_to_delete.content
author = message_to_delete.author
await message_to_delete.delete()
await ctx.send(f"Deleted {content}. Created At: {created_at}. Author: {author}")
except Exception as e:
await ctx.send(f"Error: {e}")
@bot.command()
async def deleteallmsgs(ctx):
try:
all_messages = await ctx.history().flatten()
await ctx.send(f"I will delete {len(all_messages)} messages now.")
counter = 0
for message in all_messages:
counter += 1
await message.delete()
await ctx.send(f"I deleted {counter} message(s)")
except Exception as e:
await ctx.send(f"Error: {e}")
@bot.command()
async def deletefirst(ctx, num):
try:
first_n_msgs = await ctx.history(limit=int(num), oldest_first=True).flatten()
await ctx.send(f"I will be deleting the first {len(first_n_msgs)} msg(s).")
counter = 0
for message in first_n_msgs:
counter += 1
await ctx.send(f"{counter}: Deleted {message.content} posted by {message.author} on {message.created_at}")
await message.delete()
await ctx.send(f"Deleted {counter} msg(s).")
except Exception as e:
await ctx.send(f"Error: {e}")
@bot.command(name="getcommands", description="Returns all commands available")
async def getcommands(ctx):
helptext = "```"
for command in bot.commands:
helptext+=f"{command}\n"
helptext += "```"
await ctx.send(helptext)
@bot.command()
async def deletionschedule(ctx):
bot_login_times_file = open("bot_login_times.log", "r")
bot_login_times = bot_login_times_file.readlines()
latest_bot_login = bot_login_times[-1]
deletion_times_files = open("deletion_times.log", "r")
deletion_times = deletion_times_files.readlines()
current_time = datetime.now()
current_time = current_time.replace(microsecond=0)
await ctx.send(f"This bot is scheduled to delete {AMOUNT_TO_PASSIVE_DELETE} messages every {AMOUNT_TIME} {TIME}(s)")
if len(deletion_times) > 0:
latest_deletion = deletion_times[-1]
latest_deletion = datetime.fromisoformat(latest_deletion)
latest_deletion_plus_time = None
if TIME == "day":
latest_deletion_plus_time = latest_deletion.replace(day=latest_deletion.day + AMOUNT_TIME, microsecond=0)
elif TIME == "hour":
latest_deletion_plus_time = latest_deletion.replace(hour=latest_deletion.hour + AMOUNT_TIME, microsecond=0)
elif TIME == "minute":
latest_deletion_plus_time = latest_deletion.replace(minute=latest_deletion.minute + AMOUNT_TIME, microsecond=0)
if latest_deletion_plus_time > current_time:
await ctx.send(f"There is {latest_deletion_plus_time - current_time} left before the next deletion")
else:
latest_bot_login = datetime.fromisoformat(latest_bot_login)
latest_bot_login_plus_time = None
if TIME == "day":
latest_bot_login_plus_time = latest_bot_login.replace(day=latest_bot_login.day + AMOUNT_TIME, microsecond=0)
elif TIME == "hour":
latest_bot_login_plus_time = latest_bot_login.replace(hour=latest_bot_login.hour + AMOUNT_TIME, microsecond=0)
elif TIME == "minute":
latest_bot_login_plus_time = latest_bot_login.replace(minute=latest_bot_login.minute + AMOUNT_TIME, microsecond=0)
if latest_bot_login_plus_time > current_time:
await ctx.send(f"There is {latest_bot_login_plus_time - current_time} left before the next deletion")
@bot.command()
async def getmessagecount(ctx):
file = open("message_ids.txt", "r")
all_ids = file.readlines()
total_messages = await ctx.history().flatten()
file.close()
await ctx.send(f"There are {len(all_ids)} message IDs registered in the database.")
await ctx.send(f"There are {len(total_messages)} messages in the channel")
bot.run(os.getenv("BOT_TOKEN"))
| 30.730871
| 126
| 0.643857
|
9cba158ae328844fbf94650cf0eefe57d5e57d71
| 45,514
|
py
|
Python
|
algorithms/puzzles.py
|
jcarreiro/jmc-python
|
979c67ca489d4e80e6be96d23f8bcecacabbee59
|
[
"MIT"
] | null | null | null |
algorithms/puzzles.py
|
jcarreiro/jmc-python
|
979c67ca489d4e80e6be96d23f8bcecacabbee59
|
[
"MIT"
] | null | null | null |
algorithms/puzzles.py
|
jcarreiro/jmc-python
|
979c67ca489d4e80e6be96d23f8bcecacabbee59
|
[
"MIT"
] | null | null | null |
# ------------------------------------------------------------------------------
# Algorithmic puzzles!
#
# Copyright (c) 2013, Jason M. Carreiro.
#
# See LICENSE file for license information.
#
# todo: It would be nice to be able to view the call graph for the
# tree-recursive problems using pylab.
#
# problem todos:
# - add two binary strings
# - anagram printing
# - return index of max value from array (uniformly at random)
# - largest runway on island
# - validate BST
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import collections # namedtuple
import os
import random
import sys
import unittest
# Fix up import path.
#
# TODO: figure out how to do module-relative imports
sys.path.append(os.path.expanduser('~/src/python'))
from jmc.decorators import countcalls, memoized
# -----------------------------------------------------------------------------
# Permutations
#
# Question
# --------
# Given a string, "generate" all of the permutations of the string.
#
# Solution
# --------
# This is a straightforward tree-recursive problem with a simple solution.
# -----------------------------------------------------------------------------
def permutations(s):
def helper(s, t):
if len(s) == 0:
print(t)
else:
for i in xrange(0, len(s)):
helper(s[0:i] + s[i+1:], t + s[i])
helper(s, '')
# More than one candidate has given me this "solution". Since it only prints
# n^2 strings and there are n! permutations, it can't possibly be correct...
def permutations_wrong(s):
for i in xrange(len(s)):
c = s[i]
t = s[0:i] + s[i+1:]
for j in xrange(len(t) + 1):
yield t[0:j] + c + t[j:]
# Another bad solution attempt -- this one tries to print one character at a
# time, which can't work since we need to print the character at position N
# in the string (N-1)! times (once for each permutation in which it occurs at
# that position).
def permutations_wrong2(s):
if not s:
print() # print a newline at the end
for i in xrange(len(s)):
print(s[i], end='')
permutations_wrong2(s[0:i] + s[i+1:])
# This version returns a list -- which is bad idea, since the number of list
# entries grows as O(n!).
def permutations_list(s):
if len(s) == 1:
return [s]
l = []
for i in xrange(len(s)):
for p in permutations_list(s[0:i] + s[i+1:]):
l += [s[i] + p]
return l
def permutations_yield(s):
if len(s) == 1:
yield s
elif len(s) == 0:
raise ValueError()
else:
for i in range(len(s)):
rest = s[0:i] + s[i+1:]
for p in permutations_yield(rest):
yield s[i] + p
# An elegant solution that builds the permutation in place inside the input
# string.
def permutations_swap(s):
def swap(s, a, b):
x = s[a]
s[a] = s[b]
s[b] = x
def helper(s, i):
if i == len(s):
# print ''.join(s)
return
for j in range(i, len(s)):
swap(s, i, j)
helper(s, i+1)
swap(s, i, j)
helper(list(s), 0)
# More than one candidate has given me this solution. Initially I thought the
# complexity was worse than factorial but now I'm not so sure.
def permutations_dfs(s):
def helper(s, p, d):
if len(p) == len(s):
return # print p
else:
for c in s:
if not d[c]:
d[c] = True
helper(s, p + c, d)
d[c] = False
d = {}
for c in s:
d[c] = False
helper(s, '', d)
# ------------------------------------------------------------------------------
# Towers of Hanoi
#
# Question
# --------
#
# Solution
# --------
#
# ------------------------------------------------------------------------------
def towers_of_hanoi():
pass
# -----------------------------------------------------------------------------
# Tree to (Doubly-linked) List
# -----------------------------------------------------------------------------
#
# Question
# --------
# Given a binary tree, convert the tree into a list, in place.
#
# Solution
# --------
# A recursive solution in C is easy ... but how do we do it in Python?
#
# Ref: Interview question used at Facebook.
# -----------------------------------------------------------------------------
def tree_to_list(t):
pass
# ------------------------------------------------------------------------------
# Missing Integer and Variations
# ------------------------------------------------------------------------------
#
# Question
# --------
# Given an array, A[1..N], containing all of the numbers from 0 to N, except for
# one, find the missing integer in linear time. Additionally, you may not access
# a complete integer; the elements of A are stored in binary, and the only
# operation supported is fetching the j-th bit of A[i], in constant time.
#
# Solution
# --------
# The solution is to recursively divide the array into two parts based on the
# j-th bit, one of which contains all of the integers where the bit is set, the
# other of which contains all of the integers where it isn't. If the array
# contained every number from 0 to N, then we would expect both parts to be the
# same size, so the smaller half must 'contain' the missing integer. Hence the
# j-th bit of the missing integer must be equal to a 1 if the '1's part is
# smaller or a zero otherwise.
#
# This insight alone is only enough to get us to an O(N lg N) solution, if we
# examine the entire array for each possible bit position. However we can also
# eliminate half the remaining entries at each step, since we only need to
# recurse on the half that 'contains' the missing integer. This leads us to the
# recurrence:
#
# T(N) = T(N/2) + \Theta(N)
#
# The solution to which is O(N) by the master method:
#
# If T(n) = aT(n/b) + f(n), then
#
# (1) If f(n) = O(n^{log_b a - \epsilon}) for some constant \epsilon > 0,
# then T(n) = \Theta(n^{log_b a})
# (2) If f(n) = \Theta(n^{log_b a}, then T(n) = \Theta(n^{log_b a} lg n)
# (3) If f(n) = \Omega(n^{log_b a + \epsilon}) for some constant \epsilon > 0,
# and af(n/b) \le cf(n) for some constant c < 1 and all sufficiently large
# n, then T(n) = \Theta(f(n)).
#
# For our recurrence, n^{log_b a} = n^{log_2 1} = 0. But f(n) = \Theta(n) =
# \Theta(n^{log_2 1 + \epsilon}), where \epsilon = 1. For sufficiently large n,
# af(n/b) = n/2 = cf(n) for c = 1/2. Consequently, by case 3, the solution to
# the recurrence is T(n) = \Theta(n).
#
# Ref: Intro. Algorithms (pages?), interview question used at Microsoft.
# ------------------------------------------------------------------------------
def missing_integer(A):
def missing_integer_int(A, j):
# base case -- finished searching entire array
if len(A) == 0:
return 0
bit_set = [] # entries where j-th bit was set
bit_unset = [] # entries where j-th bit was unset
for x in A:
# check j-th bit
if x & 2**j:
bit_set.append(x)
else:
bit_unset.append(x)
if len(bit_set) < len(bit_unset):
# the missing bit is a 1
return 2**j + missing_integer_int(bit_set, j+1)
else:
# the missing bit is a 0
#
# Note that we break ties by picking 0, this is required for the
# algorithm to work when N is even!
return missing_integer_int(bit_unset, j+1)
return missing_integer_int(A, 0)
# This is a variation of the missing integer problem. You are given a sequence
# A[1..N] of N numbers that follow some arithmetic progression, with one of the
# numbers missing. Find the missing number. For example, the sequence: [1, 3, 7,
# 9, 11, 13], the missing number is 5.
#
# Our solution takes advantage of the fact that the input array is sorted to
# find the missing number in logarithmic time. We calculate the difference
# between the terms (taking advantage of the fact that we must always have the
# first and last terms in the sequence), then we check the element in the middle
# of the sequence to see if it matches what we expect. If it does, we know the
# missing integer must be in the top half of the array, so we recurse on that
# half (and vice versa if the element isn't what we expect).
def missing_integer_arithmetic_progression(A):
print(A)
d = (A[-1]-A[0])/len(A)
if len(A) == 2:
return A[0]+d
else:
p = len(A)/2
if A[p] == A[0]+p*d:
return missing_integer_arithmetic_progression(A[p:])
else:
return missing_integer_arithmetic_progression(A[0:p+1])
# -----------------------------------------------------------------------------
# Randomly return index of max value from array
# -----------------------------------------------------------------------------
#
# Question
# --------
# Given an array of integers, return the index of the maximum element. If there
# are several elements reaching the maximum, choose one uniformly at random.
#
# Solution
# --------
# As we loop over the array, we store the index of the max value that we plan to
# return. Every time we see another instance of the max value, we replace the
# saved index with the index of that instance with probability 1/x, where x is
# number of max values we have seen so far. This is also known as "reservoir
# sampling" (https://en.wikipedia.org/wiki/Reservoir_sampling).
# -----------------------------------------------------------------------------
# This is a basic solution which requires O(N) time, and O(N) additional space
# (in the worst case).
def get_index_of_max_value_from_array_basic(A):
max_value = A[0]
max_indices = [0]
for i in xrange(0, len(A)):
if A[i] > max_value:
max_value = A[i]
max_indices = [i]
elif A[i] == max_value:
max_indices.append(i)
return random.choice(max_indices)
# This is a slightly better solution: still O(N) time, but O(1) extra space.
def get_index_of_max_value_from_array_better(A):
max_value = A[0]
max_count = 1
for i in xrange(0, len(A)):
if A[i] > max_value:
max_value = A[i]
max_count = 1
elif A[i] == max_value:
max_count += 1
j = random.randint(0, max_count - 1)
max_count = 0
for i in xrange(0, len(A)):
if A[i] == max_value:
if max_count == j:
return i
else:
max_count += 1
# not reached, we should always return a value above
raise Exception('not reached')
# This version is the best posible: we do a single pass over the array, and only
# use a constant amount of additional space. The trick is to understand that we
# need to keep track of the index that we plan to return; each time we see the
# max value again, we overwrite our saved index with the current index with
# probability 1/x, where x is the number of times we've seen the max value so
# far.
def get_index_of_max_value_from_array_best(A):
max_value = A[0]
max_count = 1
j = 0 # saved index to return
for i in xrange(0, len(A)):
if A[i] > max_value:
max_value = A[i]
max_count = 1
j = i
elif A[i] == max_value:
max_count += 1
if random.uniform(0, 1) < 1.0 / max_count:
j = i # lucky winner
return j
# This version just picks a uniformly distributed random number each time we see
# the max, and saves the index of the new instance if the new random number is
# greater than the saved value.
def get_index_of_max_value_from_array_best_alt(A):
max_value = A[0]
r = 0 # random number from last max
j = 0 # saved index to return
for i in xrange(0, len(A)):
if A[i] > max_value:
max_value = A[i]
r = random.uniform(0, 1)
j = i
elif A[i] == max_value:
s = random.uniform(0, 1)
if s > r:
r = s
j = i # lucky winner
return j
# ------------------------------------------------------------------------------
# Making Change
# ------------------------------------------------------------------------------
#
# Question
# --------
# Given an amount, N, find the total number of ways to make change for M, using
# pennies, nickels, dimes, and quarters.
#
# Solution
# --------
# We can break this down into two subproblems:
#
# 1. The number of ways to make change for N, after having used a coin of the
# largest denomination.
#
# 2. The number of ways to make change for N, without using any coins of the
# largest denomination.
#
# Solving the subproblems recursively and combining the answers gives us the
# answer to the problem.
#
# Ref: SICP (pages?)
# ------------------------------------------------------------------------------
@memoized
@countcalls
def make_change(n, d):
if n == 0:
return 1
elif n < 0 or len(d) == 0:
return 0
else:
return make_change(n - d[-1], d) + make_change(n, d[:-1])
# ------------------------------------------------------------------------------
# Adding Up
# ------------------------------------------------------------------------------
#
# Like making change, but we count the ways to sum the numbers (1, N-1) to make
# N.
#
# Interestingly, these numbers are the partition numbers (Sloane's A000041),
# with the only difference being that we don't count 0 + N as a partition of
# N.
#
# See also http://mathworld.wolfram.com/PartitionFunctionP.html.
#
# Ref: interview question used at Facebook.
# ------------------------------------------------------------------------------
@memoized
@countcalls
def number_of_partitions(n):
def count_partitions(n, d):
if n == 0:
return 1
elif n < 0 or len(d) == 0:
return 0
else:
return count_partitions(n - d[-1], d) + count_partitions(n, d[:-1])
return count_partitions(n, range(1, n))
# ------------------------------------------------------------------------------
# Counting Palindromes
# ------------------------------------------------------------------------------
#
# Question
# --------
# Given a string, S, count all of the palindromes in the string. For example,
# the string 'abba' contains the following palindromes: 'a', 'b', 'b', 'a',
# 'bb', and 'abba', for a total of 6.
#
# Solution
# --------
# This is an O(n^2) solution that counts the number of palindromes at each index
# of the string.
#
# TODO: can we do this in linear time?
#
# Ref: interview question used at Facebook.
# ------------------------------------------------------------------------------
def count_palindromes(s):
def count_odd_palindromes(s, i):
c = 1 # trivial palindrome
d = 1 # distance to check
while i - d >= 0 and i + d < len(s) and s[i-d] == s[i+d]:
print('Found odd palindrome {0} at i = {1}, d = {2}'.format(s[i-d:i+d+1], i, d))
c += 1
d += 1
return c
def count_even_palindromes(s, i):
c = 0
d = 1
while i - (d - 1) >= 0 and i + d < len(s) and s[i-(d-1)] == s[i+d]:
print('Found even palindrome {0} at i = {1}, d = {2}'.format(s[i-(d-1):i+d+1], i, d))
c += 1
d += 1
return c
c = 0
for i in range(0, len(s)):
print('Checking index {0}'.format(i))
c += count_odd_palindromes(s, i)
c += count_even_palindromes(s, i)
return c
# ------------------------------------------------------------------------------
# Maximum Sub-Array and Variations
# ------------------------------------------------------------------------------
#
# Question
# --------
# Given an array of pairs of the form (a, b), find a sub-array s.t. the 1st
# element of each pair is in increasing order and the sum of the second element
# of each pair is the maximum possible.
#
# Solution
# --------
# Kadane's algorithm can be used to find the maximum sub-array, so let's start
# with that. Then we modify it s.t. we reset the maximum sum seen so far each
# time we see an inversion in the first element in the pair.
#
# Ref: interview question used at Google.
# ------------------------------------------------------------------------------
def maximum_subarray(A):
max_ending_here = 0
max_so_far = 0
for x in A:
max_ending_here = max(0, max_ending_here+x)
max_so_far = max(max_so_far, max_ending_here)
return max_so_far
# This variant of Kadane's algorithm also handles the case where the array
# contains only negative numbers (in which case, the maximum contiguous sum is
# simply the largest number in the array).
def maximum_subarray_v2(A):
max_ending_here = A[0]
max_so_far = A[0]
for x in A:
print('max_ending_here: {0}, max_so_far: {1}'.format(max_ending_here, max_so_far))
max_ending_here = max(x, max_ending_here+x)
max_so_far = max(max_so_far, max_ending_here)
return max_so_far;
def maximum_subarray_in_order(A):
max_ending_here = max_so_far = A[0][1]
max_so_far_start = max_so_far_end = 0
start = end = 0
for i in range(1, len(A)):
if A[i][0] <= A[i-1][0]:
# the first element in this pair isn't in increasing order, so
# reset the sum
max_ending_here = A[i][1]
start = end = i
else:
# keep going...
max_ending_here = max_ending_here+A[i][1]
end = i
if max_ending_here > max_so_far:
max_so_far_start = start
max_so_far_end = end
max_so_far = max_ending_here
print('Max sequence was {0} with sum {1}.'.format(A[max_so_far_start:max_so_far_end+1], max_so_far))
return A[max_so_far_start:max_so_far_end+1]
# ------------------------------------------------------------------------------
# Phone Numbers
# ------------------------------------------------------------------------------
#
# Question
# --------
# Given a phone number, for example 555-1212, print out all possible strings
# that can be made from that number by substituing the letters found on the
# keypad of a telephone. For example, 623-6537 => 'MCDNLDS' (among others).
#
# Solution
# -------
# We simply use a tree recursive process. For each position in the number, we
# try each of the possible letters in turn, recursing on the remaining digits
# each time. We we run out of digits, we print the string.
#
# Ref: interview question used at Nvidia.
# ------------------------------------------------------------------------------
def phone_numbers(D):
digits = { 0: '0', 1: '1', 2: '2ABC', 3: '3DEF', 4: '4GHI', 5: '5JKL',
6: '6MNO', 7: '7PQRS', 8: '8TUV', 9: '9WXYZ' }
def phone_numbers_int(D, s):
if len(D) == 0:
print(s)
else:
for d in digits[D[0]]:
phone_numbers_int(D[1:], s+d)
phone_numbers_int(D, '')
# ------------------------------------------------------------------------------
# String Reversal
# ------------------------------------------------------------------------------
#
# Question
# --------
# Write a function to reverse the order of the words in a string in place.
#
# Solution
# --------
# Simply go through the string, swapping the first character with the last, then
# the second with the second-to-last, etc. Then, go through the string again,
# looking for spaces. For each word found this way, reverse the characters in
# the word.
#
# This solution doesn't handle punctutation.
#
# Ref: interview question used at Google.
# ------------------------------------------------------------------------------
def reverse_string(S):
S = list(S)
def swap(i,j):
x = S[i]
S[i] = S[j]
S[j] = x
for i in range(0, len(S)/2):
swap(i, len(S)-(i+1))
i = 0
while i < len(S):
j = i+1
while j < len(S) and S[j] != ' ':
j += 1
k = j-1
while i < k:
swap(i,k)
i += 1
k -= 1
i = j+1
return ''.join(S)
# ------------------------------------------------------------------------------
# Order Statistics
# ------------------------------------------------------------------------------
#
# Question
# --------
# Given an unordered array of numbers, find the i-th largest, where the 0-th
# largest is the smallest number in the array.
#
# A variation of this asks about minimizing the sum of the perpendicular
# distances to a given line (for example, choosing the location of a new
# street given the (x,y) positions of a set of houses). The solution is to
# find the median distance to the x axis; putting the road there minimizes
# the overall distance from all of the houses.
#
# Solution
# --------
# We use the partition algorithm from quicksort with a random pivot. This gives
# us a solution that runs in linear time on average. We can improve this to be
# linear in the worst case if we use a 'median-of-medians' approach to select
# the pivot -- this code doesn't do that, however.
#
# Ref: interview question used at Facebook.
# ------------------------------------------------------------------------------
def find_ith_largest_number(A,i):
def swap(A,i,j):
print('Swapped A[{0}]={1} and A[{2}]={3}'.format(i,A[i],j,A[j]))
x = A[i]
A[i] = A[j]
A[j] = x
def partition(A,p,r):
x = A[p]
i = p-1
j = r+1
while True:
j -= 1
while A[j] > x:
j -= 1
i += 1
while A[i] < x:
i += 1
if i < j:
swap(A,i,j)
else:
return j
def randomized_partition(A,p,r):
i = random.randint(p,r)
print('Pivot is A[{0}] = {1}'.format(i,A[i]))
swap(A,i,p)
return partition(A,p,r)
def select(A,p,r,i):
print('Select({0}, {1}, {2}, {3})'.format(A,p,r,i))
print(A)
if p == r:
return A[p]
q = randomized_partition(A,p,r)
k = q-p+1 # number of elements in lower half
print('q =', q, ', k =', k)
if i < k:
return select(A,p,q,i)
else:
return select(A,q+1,r,i-k)
return select(A,0,len(A)-1,i)
# ------------------------------------------------------------------------------
# Graph and Tree Problems
# ------------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Question
# --------
# Given an undirected graph G = (V, E), print all possible paths from a given
# source vertex a to a given target vertex b.
#
# A variant is to ask for the total number of distinct vertices visited on all
# simple paths from a -> b; this is trivally done by adding each vertex on every
# path we discover to a set, then counting the keys in the set once we've
# finished.
#
# Solution
# --------
# We use a variant of depth-first search.
#
# Ref: interview question used at Facebook.
# ------------------------------------------------------------------------------
def find_all_paths(G,a,b):
Q = {} # set of visited nodes
def find_path(G,a,b,P):
if a == b:
print(P + [b])
return
# color this node to prevent us from entering it again
Q[a] = 1
# recursively try each edge incident on a
for v in G[a]:
if not v in Q:
find_path(G,v,b,P+[a])
del Q[a]
find_path(G,a,b,[])
# ------------------------------------------------------------------------------
# Question: Given a binary tree (but not necessarily a binary search tree), find
# the length of the shortest path between two nodes.
#
# Solution: We perform a depth-first search, returning from each recursive call
# a state vector where the first element is:
# - 0 if neither node has been found;
# - 1 if one node has been found, and;
# - 2 otherwise;
#
# and the second element is the current path length, which is the sum of the
# path lengths for the left and right subtrees, plus 1 if we have seen one of
# the nodes we're searching for, but not both.
#
# This solution assumes that nodes in the tree are unique.
#
# Reference: Interview question used at Facebook.
# ------------------------------------------------------------------------------
def length_of_shortest_path_in_tree(T, a, b):
def helper(T, a, b):
if len(T) == 0:
return (0, 0)
l = helper(T[1], a, b)
r = helper(T[2], a, b)
s = l[0] + r[0]
if T[0] == a or T[0] == b:
s += 1
n = l[1] + r[1]
if s == 1:
n += 1
return (s, n)
return helper(T, a, b)[1]
# Class used to represent a node in a binary tree.
BNode = collections.namedtuple('BNode', ['value', 'left', 'right'])
# ------------------------------------------------------------------------------
# Question: Given a binary tree, T, find the value of the maximum path in T.
#
# Solution: We define a path as a sequence of nodes (a, b, ..., g) s.t. each
# node in the sequence has an edge connecting it to the previous and the next
# node in the sequence. We define the value of a path as the sum of the values
# of the nodes in the sequence, v(p) = \sum n forall n in p.
#
# This problem admits an easy recursive solution. Consider an arbitrary node
# n in the tree. Then there are three possible cases we need to consider:
#
# 1. The max path is contained within the node's left subtree.
# 2. The max path goes through this node.
# 3. The max path is contained within the node's right subtree.
# ------------------------------------------------------------------------------
def max_path_in_tree(t):
def helper(t):
if t is None:
return (0, 0)
l_contained, l_through = helper(t.left)
r_contained, r_through = helper(t.right)
# Our max is one of:
# 1. the best path contained in our left subtree
# 2. the path through us that continues into our left subtree
# 3. the path through us that continues into our right subtree
# 4. the best path contained in our right subtree
# 5. the best path contained in our left subtree, through us,
# into our right subtree
# (whichever is greater).
max_contained = max(l_contained,
r_contained,
l_through + r_through + t.value)
max_through = max(l_through + t.value, r_through + t.value)
return (max_contained, max_through)
a, b = helper(t)
return max(a, b)
# -----------------------------------------------------------------------------
# Word Walk
#
# Given a starting word, an ending word, and a dictionary, return a list of
# valid words which transforms the start word into the end word. Successive
# words in the list can differ by at most one character. All words on the list
# must be in the provided dictionary.
#
# For example, given the start "Walk" and the end "Bard", and assuming the
# dictionary is a typical dictionary of English words, one possible result
# could be: ["walk", "balk", "bald", "bard"].
#
# Solution:
#
# Ref: Interview question used at Facebook (and I swear I saw this in an ad
# on the T one time...).
# -----------------------------------------------------------------------------
def word_walk(start, end, dictionary):
pass
# -----------------------------------------------------------------------------
# Array and List Problems
# -----------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# Question: Determine if any three integers in an array sum to 0.
#
# Solution: We assume that reptitions are allowed. The naive solution is simply
# to check all possible combinations, one at a time. This has O(n^3) complexity.
#
# We can do slightly better by first inserting all of the values in the array
# into a hash table; this optimizes our inner loop, reducing the complexity to
# O(n^2) but requiring O(n) space for the hash table.
#
# Finally we can maintain the O(n^2) complexity but reduce the extra space
# required to O(1) by sorting the input list, then, for each element in the
# list, performing a linear search over the sorted list to find the pair of
# elements (if any) for which the sum of all three elements is zero.
#
# Ref: Interview question used at Facebook.
#-------------------------------------------------------------------------------
def list_three_int_sum_to_zero_naive(A):
solutions = []
for i in range(len(A)):
for j in range(i, len(A)):
for k in range(j, len(A)):
if A[i] + A[j] + A[k] == 0:
solutions.append((i, j, k))
return solutions
# This solution uses hash table to slightly improve the complexity (to O(n^2))
# but requires O(n) extra space.
def list_three_int_sum_to_zero_hashtable(A):
solutions = []
d = {}
for i in range(len(A)):
d[A[i]] = i
for i in range(len(A)):
for j in range(i, len(A)):
sum = -(A[i] + A[j])
if sum in d:
solutions.append((i, j, d[sum]))
return solutions
# This solution first sorts the list, then uses a linear search over the right-
# hand side of the list for each element in it. This maintains the quadratic
# complexity, but requires no extra space.
def list_three_int_sum_to_zero(A):
solutions = []
A.sort()
for i in range(len(A) - 2):
j = i + 1
k = len(A) - 1
while j <= k:
sum = A[i] + A[j] + A[k]
if sum == 0:
solutions.append((i, j, k))
j += 1
k -= 1
elif sum < 0:
# We need a larger number; move the left end of the range up
j += 1
else:
# We need a smaller number; move the right end of the range down
k -= 1
# There are some special cases to handle. Any 0s in the array are also a
# solution (simply pick 0 three times). Finally, if we find A[j] = -2 * A[i]
# in the array for some some i, j, then (i, i, j) is also a solution.
#
# Note that this additional quadratic step doesn't change our asymptotic
# complexity.
for i in range(len(A)):
if A[i] == 0:
solutions.append((i, i, i))
else:
for j in range(i + 1, len(A)):
if 2 * A[i] + A[j] == 0:
solutions.append((i, i, j))
return solutions
# ------------------------------------------------------------------------------
# Island Airport
#
# Question: Consider a grid where each cell may either be water or land. The
# non-water cells make up a set of "islands" (note that there may be more than
# one disconnected island). You would like to build an airport on one of the
# islands, subject to two constraints:
#
# 1. The airport must have two runways, one aligned N/S, the other E/W. They
# must intersect at exactly one point (forming a cross).
#
# 2. You want the runways to have the largest area possible.
#
# Return the (x, y) coordinates of the intersection.
#
# Solution:
#
# ------------------------------------------------------------------------------
# G is the grid, as a list of lists, where G[i][j] is the cell at (i, j). The
# value at each cell is 1 if the cell is "land" and 0 otherwise.
#
# This is the most naive soution -- just try to grow the runways from every
# cell and remember the max. If the grid has length N on each side, then this
# solution is O(N^3), since for each of the N^2 grid intersections, we examine
# (in the worst case) 2N cells (N cells along each axis).
def island_runway(G):
def grow(G, i, j, d):
# grow the runway starting at (i,j) as far as possible in direction d;
# then return the length in that direction. d should be a tuple of the
# form (dy, dx).
y = i
x = j
while y >= 0 and y <= i and x >= 0 and x <= j and G[y][x] == 1:
y += d[0]
x += d[1]
# XXX this assumes we only ever move along one axis
return abs(y - i) + abs(x - j)
p = None
max_length = 0
for i in range(0, len(G)):
for j in range(0, len(G[i])):
print('Considering ({i}, {j})...'.format(i=i, j=j))
if G[i][j] != 1:
# this cell isn't even land
continue
length = grow(G, i, j, ( 0, -1)) # go west
length += grow(G, i, j, ( 0, 1)) # go east
length += grow(G, i, j, ( 1, 0)) # go north
length += grow(G, i, j, (-1, 0)) # go south
if length > max_length:
# found a new longest runway
p = (i, j)
max_length = length
return p
# The solution above repeatedly recomputes the longest runway we can build for
# each grid row and column, so if we could avoid doing that, we could solve the
# problem in less time. One insight that might help is to realize that, even
# for rows (columns) which aren't fully connected, we can compute the maximum
# connected length for each row (column); that's the longest that the runway can
# possibly be in that dimension if we pick that intersection! Then we look for
# the maximum combined extent in the obvious way. I think?
def island_runway_fast(G):
# row_lengths = []
# for i in range(0, len(G)):
# max_l = 0 # max length for this row
# l = 0 # current length for this row
# for j in range(0, len(G[i])):
# if G[i][j] == 1:
# l += 1
# else:
# l = 0 # reset when we hit water
# if l > max_l:
# max_l = l
#
pass
def display_island(G):
key = {0: '~', 1: '_'}
for r in G:
for c in r:
print('{0} '.format(key[c]), end='')
print()
def expect(expected, actual):
if expected != actual:
raise ValueError('Expected {0} but got {1}!'.format(expected, actual))
def test_runway():
# The simplest possible island is no island at all.
G = [[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]]
p = island_runway(G)
print('No land:')
display_island(G)
expect(None, p)
print('Runway at: None.')
# If only one row and one column are land, then there's only one possible
# intersection.
G = [[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[1, 1, 1, 1, 1],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0]]
print('Cross-shaped island:')
p = island_runway(G)
display_island(G)
print('Runway at ({0}, {1}).'.format(p[1], p[0])) # put x first in output
expect((2, 2), p)
# Two disconnected islands, where the longest extent in x is on one and the
# longest extent in y is on the other. Tricky!
G = [[1, 1, 1, 1, 0],
[1, 1, 1, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 1, 1, 1]]
print('Tricky island:')
p = island_runway(G)
display_island(G)
print('Runway at ({0}, {1}).'.format(p[1], p[0])) # put x first in output
expect((4, 4), p)
# ------------------------------------------------------------------------------
# Card Shuffling
#
# Question: A deck of 2N cards is split into two piles of exactly N cards each
# and shuffled. Generate all possible arrangements of the deck after 1, 2, ...,
# S shuffles. Note that the shuffles are not perfect shuffles!
# ------------------------------------------------------------------------------
def card_shuffle(n, s):
pass
# ------------------------------------------------------------------------------
# McNugget Numbers
# ------------------------------------------------------------------------------
# Simple recursive solution; basically DFS on a tree, where the nodes are
# numbers and the edges represent buying carton with one of the given sizes.
def mcnugget(n, sizes=[3, 6, 20]):
if n == 0:
return True
elif n < 0:
return False
else:
for s in sizes:
if mcnugget(n - s, sizes):
return True
return False
# Variant of the recursive solution that prints the path taken in the success case.
def mcnugget_again(n, p, sizes=[3, 6, 20]):
if n == 0:
print(', '.join([str(x) for x in p]))
return True
elif n < 0:
return False
else:
for s in sizes:
if mcnugget_again(n - s, p + [s], sizes):
return True
return False
# Builds the table for the DP solution and returns it.
def mcnugget_table(n, sizes=[3, 6, 20]):
# Build a table, starting from 0. For the k-th entry in the table, we store
# True if any of the {k-m for m \in sizes} slots are True, and False
# otherwise.
table = [False] * (n+1)
table[0] = True # we can always buy 0 McNuggets
for k in range(1, n+1):
y = False
for m in sizes:
if k - m >= 0 and table[k - m]:
y = True
table[k] = y
return table
def mcnugget_dp(n, sizes=[3, 6, 20]):
table = mcnugget_table(n, sizes)
return table[-1]
# Simple solution: we can buy N McNuggets if (given sizes of [3,6,20]),
# n % 20 == 0 or n % 3 == 0 or (n % 20) % 3 == 0.
#
# Note that this solution doesn't actually work! This is an example of a
# frequently encountered incorrect solution.
def mcnugget_simple(n, sizes=[3, 6, 20]):
for s in sizes[::-1]:
n = n % s
return n == 0
# Variation of above where we remove sizes if we get a false result; still
# doesn't work.
#
# TODO: counter-example and general proof of why this won't work.
def mcnugget_simple_2(n, sizes=[3, 6, 20]):
s = sizes[:] # copy list so pop doesn't mutate original
while s:
if mcnugget_simple(n, s):
return True
s.pop() # didn't work, try again without largest size
return False
# Test another McNugget solution against the known good DP solution and return
# inputs where it fails.
def mcnugget_test(fn, sizes=[3, 6, 20]):
a = set(filter(lambda x: mcnugget_dp(x, sizes), range(1, 101)))
b = set(filter(lambda x: fn(x, sizes), range(1, 101)))
return a ^ b
# TODO: balance a mobile with unequal weights (David's question)
# ------------------------------------------------------------------------------
# Tree Print by Levels
# ------------------------------------------------------------------------------
class Node(object):
def __init__(self, data, left=None, right=None):
self.data = data
self.left = left
self.right = right
def __repr__(self):
s = 'Node({0}'.format(self.data)
if self.left:
s += ', '
s += repr(self.left)
if self.right:
s += ', '
s += repr(self.right)
s += ')'
return s
def tree_print_by_levels(T):
def helper(node, levels, depth):
if node:
l = levels.get(depth, [])
l.append(node)
levels[depth] = l
helper(node.left, levels, depth + 1)
helper(node.right, levels, depth + 1)
# dictionary, keys = depth in tree, values = list of nodes at that depth
levels = {}
helper(tree, levels, 0)
# print dictionary
for k, v in levels.iteritems():
print('{0}: {1}'.format(k, ', '.join([str(x.data) for x in v])))
# ------------------------------------------------------------------------------
# Balance Parentheses
# ------------------------------------------------------------------------------
#
# Question
# --------
# Given a string, remove the minimum number of characters necessary to balance
# the parentheses in the string. For example:
#
# f("") => ""
# f("()") => "()"
# f(")a(b)c(") => "a(b)c"
#
# Solution
# --------
# We can do this in two passes: one to count the number of closing braces, the
# other to find the matching open braces and to do the mutations. Note that
# strings are immutable in Python; by requiring the input be a list, we can do
# the mutations in-place, so that we use only a constant amount of extra space.
# If we can't require the input to be a list then this solution requires a
# linear amount of space (we'd need to turn the string into a list ourselves so
# that we can mutate it). The resulting solution requires O(n) time.
#
# Ref: interview question used at Facebook.
# ------------------------------------------------------------------------------
# For brevity, we take a list, since they are mutable and strings are not.
#
# This solution is the least efficient. We simply do a brute force search for
# each open brace, looking for a matching close brace.
def balance_parens_bad(s):
pass
# This solution is optimal: it uses linear time and constant additional space.
def balance_parens(s):
remaining_close = s.count(')') # pass one, count right braces
nest_count = 0
j = 0
for i in range(len(s)): # pass two, remove unmatched braces
c = s[i]
if c == ')':
remaining_close -= 1
if nest_count:
# this close brace matches an open brace
nest_count -= 1
else:
# this close brace doesn't match an open brace
continue
if s[i] == '(':
if nest_count == remaining_close: # no more close braces to match
continue
nest_count += 1
# if we got here, we want to output this character
s[j] = c
j += 1
return s[:j] # trim list to only output characters
# -----------------------------------------------------------------------------
# List Problems
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Deep Copy List
#
# Question: Given a list of nodes, where each node has a pointer to the next
# node, and a pointer to another node on the list, return a deep copy of the
# list. The "extra" pointer in each node may point to any entry in the list,
# including the node itself, or null.
#
# Solution: We proceed from the head, copying nodes one at a time. When we
# encounter a node, there are 4 possible values for its "extra" pointer:
#
# 1) null
# 2) itself
# 3) a node we've already copied
# 4) a node we haven't encountered yet
#
# Cases (1) and (2) are trivial. To handle (3) and (4), we keep a map from
# nodes in the source list to nodes in the destination list. If the node is
# already present in the map, then we can use the value to set the "extra"
# pointer. If the node is not present yet, we allocate it and add it to the
# map.
#
# Ref: Interview question used at Facebook.
# -----------------------------------------------------------------------------
class ExtraNode(object):
def __init__(self, data, next_=None, extra=None):
self.data = data
self.next_ = next_
self.extra = extra
def print_extra_list(head):
while head:
extra_data = None
if head.extra:
extra_data = head.extra.data
print('{0} -> {1}'.format(head.data, extra_data))
head = head.next_
def deep_copy_list(head):
d = {}
prev = None
l = None
while head:
# Copy the node
n = None
if not head in d:
n = ExtraNode(head.data, None, None)
d[head] = n
else:
n = d[head]
print('n={}'.format(n))
# Save a pointer to the head of the copied list.
if not l:
l = n
# If we have a previous node, make it point to this one.
if prev:
prev.next_ = n
prev = n
# Fix up extra pointer.
ex = head.extra
if ex:
if ex in d:
n.extra = ex
else:
n.extra = ExtraNode(ex.data, None, None)
d[ex] = n.extra
head = head.next_
return l
# ------------------------------------------------------------------------------
# Has Duplicates
# ------------------------------------------------------------------------------
#
# Question
# --------
# Given a list of integers of length N, in which every number is between 1 and
# N, return True iff the list contains at least one duplicate entry.
#
# Solution
# --------
# Trivial.
#
# Ref: practice interview question used at Facebook.
# ------------------------------------------------------------------------------
# Linear time, linear space.
def has_duplicates(x):
seen = [False] * (len(x) + 1)
for i in x:
if seen[i]:
return True
else:
seen[i] = True
return False
# Sorting first lets us do this in-place, if we're allowed to modify the original
# list. This takes O(n ln n) time.
def has_duplicates_sort(x):
x.sort()
for i in range(0, len(x) - 1):
if x[i] == x[i+1]:
return True
return False
| 34.823259
| 104
| 0.52909
|
22b06532cdd180bde8460253d9a2c781b4d77746
| 4,553
|
py
|
Python
|
examples/testapp/testapp/views.py
|
benthomasson/gevent-socketio
|
0f9bd2744af033b7cba57bfd5b82106592e9f667
|
[
"BSD-3-Clause"
] | 625
|
2015-01-05T04:11:59.000Z
|
2022-03-14T13:29:59.000Z
|
examples/testapp/testapp/views.py
|
benthomasson/gevent-socketio
|
0f9bd2744af033b7cba57bfd5b82106592e9f667
|
[
"BSD-3-Clause"
] | 53
|
2015-01-30T07:55:45.000Z
|
2021-02-28T10:50:34.000Z
|
examples/testapp/testapp/views.py
|
benthomasson/gevent-socketio
|
0f9bd2744af033b7cba57bfd5b82106592e9f667
|
[
"BSD-3-Clause"
] | 213
|
2015-01-05T10:18:51.000Z
|
2022-01-23T08:57:38.000Z
|
from pyramid.view import view_config
import gevent
from socketio import socketio_manage
from socketio.namespace import BaseNamespace
from socketio.mixins import RoomsMixin, BroadcastMixin
from gevent import socket
def index(request):
""" Base view to load our template """
return {}
"""
ACK model:
The client sends a message of the sort:
{type: 'message',
id: 140,
ack: true,
endpoint: '/tobi',
data: ''
}
The 'ack' value is 'true', marking that we want an automatic 'ack' when it
receives the packet. The Node.js version sends the ack itself, without any
server-side code interaction. It dispatches the packet only after sending back
an ack, so the ack isn't really a reply. It's just marking the server received
it, but not if the event/message/json was properly processed.
The automated reply from such a request is:
{type: 'ack',
ackId: '140',
endpoint: '',
args: []
}
Where 'ackId' corresponds to the 'id' of the originating message. Upon
reception of this 'ack' message, the client then looks in an object if there
is a callback function to call associated with this message id (140). If so,
runs it, otherwise, drops the packet.
There is a second way to ask for an ack, sending a packet like this:
{type: 'event',
id: 1,
ack: 'data',
endpoint: '',
name: 'chat',
args: ['', '']
}
{type: 'json',
id: 1,
ack: 'data',
endpoint: '',
data: {a: 'b'}
}
.. the same goes for a 'message' packet, which has the 'ack' equal to 'data'.
When the server receives such a packet, it dispatches the corresponding event
(either the named event specified in an 'event' type packet, or 'message' or
'json, if the type is so), and *adds* as a parameter, in addition to the
'args' passed by the event (or 'data' for 'message'/'json'), the ack() function
to call (it encloses the packet 'id' already). Any number of arguments passed
to that 'ack()' function will be passed on to the client-side, and given as
parameter on the client-side function.
That is the returning 'ack' message, with the data ready to be passed as
arguments to the saved callback on the client side:
{type: 'ack',
ackId: '12',
endpoint: '',
args: ['woot', 'wa']
}
"""
class GlobalIONamespace(BaseNamespace, BroadcastMixin):
def on_chat(self, *args):
self.emit("bob", {'hello': 'world'})
print "Received chat message", args
self.broadcast_event_not_me('chat', *args)
def recv_connect(self):
print "CONNNNNNNN"
self.emit("you_just_connected", {'bravo': 'kid'})
self.spawn(self.cpu_checker_process)
def recv_json(self, data):
self.emit("got_some_json", data)
def on_bob(self, *args):
self.broadcast_event('broadcasted', args)
self.socket['/chat'].emit('bob')
def cpu_checker_process(self):
"""This will be a greenlet"""
ret = os.system("cat /proc/cpu/stuff")
self.emit("cpu_value", ret)
class ChatIONamespace(BaseNamespace, RoomsMixin):
def on_mymessage(self, msg):
print "In on_mymessage"
self.send("little message back")
self.send({'blah': 'blah'}, json=True)
for x in xrange(2):
self.emit("pack", {'the': 'more', 'you': 'can'})
def on_my_callback(self, packet):
return (1, 2)
def on_trigger_server_callback(self, superbob):
def cb():
print "OK, WE WERE CALLED BACK BY THE ACK! THANKS :)"
self.emit('callmeback', 'this is a first param',
'this is the last param', callback=cb)
def cb2(param1, param2):
print "OK, GOT THOSE VALUES BACK BY CB", param1, param2
self.emit('callmeback', 'this is a first param',
'this is the last param', callback=cb2)
def on_rtc_invite(self, sdp):
print "Got an RTC invite, now pushing to others..."
self.emit_to_room('room1', 'rtc_invite', self.session['nickname'],
sdp)
def recv_connect(self):
self.session['nickname'] = 'guest123'
self.join('room1')
def recv_message(self, data):
print "Received a 'message' with data:", data
def on_disconnect_me(self, data):
print "Disconnecting you buddy", data
self.disconnect()
nsmap = {'': GlobalIONamespace,
'/chat': ChatIONamespace}
@view_config(route_name='socket_io')
def socketio_service(request):
""" The view that will launch the socketio listener """
socketio_manage(request.environ, namespaces=nsmap, request=request)
return {}
| 29
| 79
| 0.654514
|
2ae0b105061b41403efcb0d9dad4680380011011
| 7,656
|
py
|
Python
|
tests/slack_bolt/context/test_ack.py
|
hirosassa/bolt-python
|
befc3a1463f3ac8dbb780d66decc304e2bdf3e7a
|
[
"MIT"
] | 504
|
2020-08-07T05:02:57.000Z
|
2022-03-31T14:32:46.000Z
|
tests/slack_bolt/context/test_ack.py
|
hirosassa/bolt-python
|
befc3a1463f3ac8dbb780d66decc304e2bdf3e7a
|
[
"MIT"
] | 560
|
2020-08-07T01:16:06.000Z
|
2022-03-30T00:40:56.000Z
|
tests/slack_bolt/context/test_ack.py
|
hirosassa/bolt-python
|
befc3a1463f3ac8dbb780d66decc304e2bdf3e7a
|
[
"MIT"
] | 150
|
2020-08-07T09:41:14.000Z
|
2022-03-30T04:54:51.000Z
|
from slack_sdk.models.blocks import PlainTextObject, DividerBlock
from slack_sdk.models.views import View
from slack_bolt import Ack, BoltResponse
class TestAck:
def setup_method(self):
pass
def teardown_method(self):
pass
def test_text(self):
ack = Ack()
response: BoltResponse = ack(text="foo")
assert (response.status, response.body) == (200, "foo")
sample_attachments = [
{
"fallback": "Plain-text summary of the attachment.",
"color": "#2eb886",
"pretext": "Optional text that appears above the attachment block",
"author_name": "Bobby Tables",
"author_link": "http://flickr.com/bobby/",
"author_icon": "http://flickr.com/icons/bobby.jpg",
"title": "Slack API Documentation",
"title_link": "https://api.slack.com/",
"text": "Optional text that appears within the attachment",
"fields": [{"title": "Priority", "value": "High", "short": False}],
"image_url": "http://my-website.com/path/to/image.jpg",
"thumb_url": "http://example.com/path/to/thumb.png",
"footer": "Slack API",
"footer_icon": "https://platform.slack-edge.com/img/default_application_icon.png",
"ts": 123456789,
}
]
def test_attachments(self):
ack = Ack()
response: BoltResponse = ack(text="foo", attachments=self.sample_attachments)
assert (response.status, response.body) == (
200,
'{"text": "foo", '
'"attachments": [{"fallback": "Plain-text summary of the attachment.", "color": "#2eb886", "pretext": "Optional text that appears above the attachment block", "author_name": "Bobby Tables", "author_link": "http://flickr.com/bobby/", "author_icon": "http://flickr.com/icons/bobby.jpg", "title": "Slack API Documentation", "title_link": "https://api.slack.com/", "text": "Optional text that appears within the attachment", "fields": [{"title": "Priority", "value": "High", "short": false}], "image_url": "http://my-website.com/path/to/image.jpg", "thumb_url": "http://example.com/path/to/thumb.png", "footer": "Slack API", "footer_icon": "https://platform.slack-edge.com/img/default_application_icon.png", "ts": 123456789}]'
"}",
)
def test_blocks(self):
ack = Ack()
response: BoltResponse = ack(text="foo", blocks=[{"type": "divider"}])
assert (response.status, response.body) == (
200,
'{"text": "foo", "blocks": [{"type": "divider"}]}',
)
def test_unfurl_options(self):
ack = Ack()
response: BoltResponse = ack(
text="foo",
blocks=[{"type": "divider"}],
unfurl_links=True,
unfurl_media=True,
)
assert (response.status, response.body) == (
200,
'{"text": "foo", "unfurl_links": true, "unfurl_media": true, "blocks": [{"type": "divider"}]}',
)
sample_options = [{"text": {"type": "plain_text", "text": "Maru"}, "value": "maru"}]
def test_options(self):
ack = Ack()
response: BoltResponse = ack(text="foo", options=self.sample_options)
assert response.status == 200
assert (
response.body
== '{"options": [{"text": {"type": "plain_text", "text": "Maru"}, "value": "maru"}]}'
)
sample_option_groups = [
{
"label": {"type": "plain_text", "text": "Group 1"},
"options": [
{"text": {"type": "plain_text", "text": "Option 1"}, "value": "1-1"},
{"text": {"type": "plain_text", "text": "Option 2"}, "value": "1-2"},
],
},
{
"label": {"type": "plain_text", "text": "Group 2"},
"options": [
{"text": {"type": "plain_text", "text": "Option 1"}, "value": "2-1"},
],
},
]
def test_option_groups(self):
ack = Ack()
response: BoltResponse = ack(
text="foo", option_groups=self.sample_option_groups
)
assert response.status == 200
assert response.body.startswith('{"option_groups":')
def test_response_type(self):
ack = Ack()
response: BoltResponse = ack(text="foo", response_type="in_channel")
assert (response.status, response.body) == (
200,
'{"text": "foo", "response_type": "in_channel"}',
)
def test_dialog_errors(self):
expected_body = '{"errors": [{"name": "loc_origin", "error": "Pickup Location must be longer than 3 characters"}]}'
errors = [
{
"name": "loc_origin",
"error": "Pickup Location must be longer than 3 characters",
}
]
ack = Ack()
response: BoltResponse = ack(errors=errors)
assert (response.status, response.body) == (200, expected_body)
response: BoltResponse = ack({"errors": errors})
assert (response.status, response.body) == (200, expected_body)
def test_view_errors(self):
ack = Ack()
response: BoltResponse = ack(
response_action="errors",
errors={
"block_title": "Title is required",
"block_description": "Description must be longer than 10 characters",
},
)
assert (response.status, response.body) == (
200,
'{"response_action": "errors", '
'"errors": {'
'"block_title": "Title is required", '
'"block_description": "Description must be longer than 10 characters"'
"}"
"}",
)
def test_view_update(self):
ack = Ack()
response: BoltResponse = ack(
response_action="update",
view={
"type": "modal",
"callback_id": "view-id",
"title": {
"type": "plain_text",
"text": "My App",
},
"close": {
"type": "plain_text",
"text": "Cancel",
},
"blocks": [{"type": "divider", "block_id": "b"}],
},
)
assert (response.status, response.body) == (
200,
'{"response_action": "update", '
'"view": {'
'"type": "modal", '
'"callback_id": "view-id", '
'"title": {"type": "plain_text", "text": "My App"}, '
'"close": {"type": "plain_text", "text": "Cancel"}, '
'"blocks": [{"type": "divider", "block_id": "b"}]'
"}"
"}",
)
def test_view_update_2(self):
ack = Ack()
response: BoltResponse = ack(
response_action="update",
view=View(
type="modal",
callback_id="view-id",
title=PlainTextObject(text="My App"),
close=PlainTextObject(text="Cancel"),
blocks=[DividerBlock(block_id="b")],
),
)
assert (response.status, response.body) == (
200,
""
'{"response_action": "update", '
'"view": {'
'"blocks": [{"block_id": "b", "type": "divider"}], '
'"callback_id": "view-id", '
'"close": {"text": "Cancel", "type": "plain_text"}, '
'"title": {"text": "My App", "type": "plain_text"}, '
'"type": "modal"'
"}"
"}",
)
| 37.714286
| 734
| 0.502874
|
2f9e1ef7cc33f61994ce9aa5e62ad860a795bc58
| 1,974
|
py
|
Python
|
src/beaches_test.py
|
nathaningle/beaches
|
d2aef7b635657d8c5b9f1551755d38b41c2611a2
|
[
"0BSD"
] | null | null | null |
src/beaches_test.py
|
nathaningle/beaches
|
d2aef7b635657d8c5b9f1551755d38b41c2611a2
|
[
"0BSD"
] | null | null | null |
src/beaches_test.py
|
nathaningle/beaches
|
d2aef7b635657d8c5b9f1551755d38b41c2611a2
|
[
"0BSD"
] | null | null | null |
#!/usr/bin/env python
#
# ISC License
#
# Copyright (c) 2021, Nathan Ingle
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
from ipaddress import IPv4Network
from beaches import make_net
class TestBeaches(unittest.TestCase):
"""Test suite for Beaches."""
def test_make_net(self):
self.assertEqual(make_net('0/0'), IPv4Network('0.0.0.0/0'))
self.assertEqual(make_net('10/8'), IPv4Network('10.0.0.0/8'))
self.assertEqual(make_net('172.16/16'), IPv4Network('172.16.0.0/16'))
self.assertEqual(make_net('192.168.0/24'), IPv4Network('192.168.0.0/24'))
self.assertEqual(make_net('10.0.0.1/32'), IPv4Network('10.0.0.1/32'))
# Underspecified networks should fail.
with self.assertRaises(ValueError):
make_net('10/9')
with self.assertRaises(ValueError):
make_net('172.16/17')
with self.assertRaises(ValueError):
make_net('192.168.0/25')
# Overspecified networks should fail.
with self.assertRaises(ValueError):
make_net('10/6')
with self.assertRaises(ValueError):
make_net('172.17/15')
with self.assertRaises(ValueError):
make_net('192.168.1/23')
if __name__ == '__main__':
unittest.main(verbosity=2)
| 37.245283
| 81
| 0.68997
|
d92e2f074ecb4b5b3d6acf05e7f95c05fd3ead07
| 2,297
|
py
|
Python
|
reviewboard/accounts/tests/test_x509_auth_middleware.py
|
pombredanne/reviewboard
|
15f1d7236ec7a5cb4778ebfeb8b45d13a46ac71d
|
[
"MIT"
] | null | null | null |
reviewboard/accounts/tests/test_x509_auth_middleware.py
|
pombredanne/reviewboard
|
15f1d7236ec7a5cb4778ebfeb8b45d13a46ac71d
|
[
"MIT"
] | null | null | null |
reviewboard/accounts/tests/test_x509_auth_middleware.py
|
pombredanne/reviewboard
|
15f1d7236ec7a5cb4778ebfeb8b45d13a46ac71d
|
[
"MIT"
] | null | null | null |
"""Unit tests for reviewboard.accounts.middleware.x509_auth_middleware."""
from django.contrib.auth.models import AnonymousUser
from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpResponse
from django.test.client import RequestFactory
from djblets.siteconfig.models import SiteConfiguration
from reviewboard.accounts.backends import X509Backend
from reviewboard.accounts.middleware import x509_auth_middleware
from reviewboard.testing import TestCase
class X509AuthMiddlewareTests(TestCase):
"""Unit tests for reviewboard.accounts.middleware.x509_auth_middleware."""
fixtures = ['test_users']
def setUp(self):
super(X509AuthMiddlewareTests, self).setUp()
self.middleware = x509_auth_middleware(
lambda request: HttpResponse(''))
self.siteconfig = SiteConfiguration.objects.get_current()
self.enabled_settings = {
'auth_backend': X509Backend.backend_id,
}
self.request = RequestFactory().get('/')
self.request.user = AnonymousUser()
self.request.is_secure = lambda: True
SessionMiddleware().process_request(self.request)
def test_process_request_without_enabled(self):
"""Testing x509_auth_middleware without backend enabled
"""
self.request.environ['SSL_CLIENT_S_DN_CN'] = 'doc'
self.middleware(self.request)
self.assertFalse(self.request.user.is_authenticated)
def test_process_request_with_enabled_and_no_username(self):
"""Testing x509_auth_middleware with backend enabled and
no username environment variable present
"""
with self.siteconfig_settings(self.enabled_settings):
self.middleware(self.request)
self.assertFalse(self.request.user.is_authenticated)
def test_process_request_with_enabled_and_username(self):
"""Testing x509_auth_middleware with backend enabled and
username environment variable present
"""
self.request.environ['SSL_CLIENT_S_DN_CN'] = 'doc'
with self.siteconfig_settings(self.enabled_settings):
self.middleware(self.request)
self.assertTrue(self.request.user.is_authenticated)
self.assertEqual(self.request.user.username, 'doc')
| 35.338462
| 78
| 0.729212
|
f9efda3798f41cbe083aff647662f13dbce6aba6
| 2,719
|
py
|
Python
|
server/deploy_code.py
|
opendevops-cn/codo-publish
|
ff01ddbcaadc503314a3b4f1bb5919864b9c82ab
|
[
"MIT"
] | 13
|
2019-01-09T14:56:05.000Z
|
2021-11-16T03:12:56.000Z
|
server/deploy_code.py
|
opendevops-cn/codo-publish
|
ff01ddbcaadc503314a3b4f1bb5919864b9c82ab
|
[
"MIT"
] | null | null | null |
server/deploy_code.py
|
opendevops-cn/codo-publish
|
ff01ddbcaadc503314a3b4f1bb5919864b9c82ab
|
[
"MIT"
] | 25
|
2019-01-24T01:50:49.000Z
|
2021-12-08T14:30:53.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/12/7 13:42
# @Author : Fred Yang
# @File : deploy_code.py
# @Role : 部署代码,下发代码,此步将代码最终发布到代码目录
import os
import sys
Base_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(Base_DIR)
import uuid
from public import exec_shell
from public import exec_thread
from get_publish_info import get_publish_data, get_all_hosts
import fire
class DeployCode():
def __init__(self, data):
self.publish_path = data.get('publish_path') # 发布目录
self.repository = data.get('repository') # 代码仓库
self.repo_name = self.repository.split('/')[-1].replace('.git', '') # 仓库名字
self.uuid_file = '/tmp/publish_{}'.format(uuid.uuid1()) #错误文件判断用。
def code_deploy(self, host):
if not isinstance(host, dict):
raise ValueError()
'''/tmp下的代码是upload_code脚本上传过来的'''
tmp_code_path = '/tmp/{}'.format(self.repo_name)
# if not os.path.exists(tmp_code_path):
# print('[Error]: No code found')
# sys.exit(-100)
ip = host.get('ip')
port = host.get('port', 22)
user = host.get('user', 'root')
password = host.get('password')
# code_path = self.publish_path + self.repo_name
# depoly_cmd = "sshpass -p {} rsync -ahqzt --delete -e 'ssh -p {} -o StrictHostKeyChecking=no ' {} {}@{}:{}".format(
# password, port, tmp_code_path, user, ip, self.publish_path)
rsync_cmd = 'rsync -ahqzt --delete {} {}'.format(tmp_code_path, self.publish_path)
depoly_cmd = "sshpass -p {} ssh -p {} -o StrictHostKeyChecking=no {}@{} '{}'".format(
password,
port,
user, ip,
rsync_cmd)
# print('[CMD:]', depoly_cmd)
try:
depoly_status, depoly_output = exec_shell(depoly_cmd)
if depoly_status == 0:
print('[Success]: Host:{} 发布成功'.format(ip))
else:
os.mknod(self.uuid_file)
print('[Error]: Host:{} 失败,错误信息: {}'.format(ip, depoly_output))
exit(-3)
except Exception as e:
print(e)
exit(-500)
def check_err(self):
if os.path.exists(self.uuid_file):
print('[Error]')
exit(-1)
def main(flow_id):
print('[INFO]: 这部分是将代码正式下发/同步到你的代码目录')
data = get_publish_data(flow_id) # 获取发布信息
obj = DeployCode(data)
all_hosts = get_all_hosts(flow_id)
exec_thread(func=obj.code_deploy, iterable1=all_hosts)
obj.check_err()
if __name__ == '__main__':
fire.Fire(main)
| 32.759036
| 127
| 0.568224
|
33e6576780706d0357615f441c85472524869708
| 305
|
py
|
Python
|
2016/07/biomass-energy-20160622/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | 14
|
2015-05-08T13:41:51.000Z
|
2021-02-24T12:34:55.000Z
|
2016/07/biomass-energy-20160622/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | null | null | null |
2016/07/biomass-energy-20160622/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | 7
|
2015-04-04T04:45:54.000Z
|
2021-02-18T11:12:48.000Z
|
#!/usr/bin/env python
import base_filters
COPY_GOOGLE_DOC_KEY = '1Ckcepy7l-wOkqc_NFHuY3vyCVBccBZnPODrYWfuCm1A'
USE_ASSETS = False
# Use these variables to override the default cache timeouts for this graphic
# DEFAULT_MAX_AGE = 20
# ASSETS_MAX_AGE = 300
JINJA_FILTER_FUNCTIONS = base_filters.FILTERS
| 21.785714
| 77
| 0.816393
|
c9e93173482d493dc40fcca14e7f8b3c229139fa
| 2,742
|
py
|
Python
|
2017/get_all.py
|
badgercl/servel-election-results
|
68d5f746d0c8bc01f335954ad2cd992064f88908
|
[
"MIT"
] | null | null | null |
2017/get_all.py
|
badgercl/servel-election-results
|
68d5f746d0c8bc01f335954ad2cd992064f88908
|
[
"MIT"
] | null | null | null |
2017/get_all.py
|
badgercl/servel-election-results
|
68d5f746d0c8bc01f335954ad2cd992064f88908
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf8 -*-
import requests
import json
from time import sleep
import os
import sys
reload(sys)
sys.setdefaultencoding('utf8')
def getAll():
d = []
j = get_data(0,'all')
for r in j:
d.append(getCircSenatoriales(r))
return d
def getCircSenatoriales(c):
j = get_data(c['c'],'senatorial')
c['senatoriales'] = []
for r in j:
c['senatoriales'].append(getDistritos(r))
return c
def getDistritos(s):
j = get_data(s['c'], 'distritro')
s['distritos'] = []
for r in j:
s['distritos'].append( getComunas(r) )
return s
def getComunas(d):
j = get_data(d['c'], 'comuna')
d['comunas'] = []
for r in j:
d['comunas'].append( getCircElectorales(r) )
return d
def getCircElectorales(c):
j = get_data(c['c'], 'electoral')
c['electoral'] = []
for r in j:
c['electoral'].append( getLocales(r) )
return c
def getLocales(c):
j = get_data(c['c'], 'local')
c['mesas'] = []
for r in j:
c['mesas'].append(getMesas(r))
return c
def getMesas(l):
j = get_data(l['c'], 'mesa')
l['conteo'] = []
for r in j:
l['conteo'].append(getConteo(r))
return l
def getConteo(m):
j = get_data(m['c'], 'conteo')
return j
def get_url(i,t):
if t == 'all': url = "http://www.servelelecciones.cl/data/elecciones_presidente/filters/regiones/all.json"
elif t == 'senatorial': url = "http://www.servelelecciones.cl/data/elecciones_presidente/filters/circ_senatorial/byregion/{}.json".format(i)
elif t == 'distritro': url = "http://www.servelelecciones.cl/data/elecciones_presidente/filters/distritos/bycirc_senatorial/{}.json".format(i)
elif t == 'comuna':url = "http://www.servelelecciones.cl/data/elecciones_presidente/filters/comunas/bydistrito/{}.json".format(i)
elif t == 'electoral':url = "http://www.servelelecciones.cl/data/elecciones_presidente/filters/circ_electoral/bycomuna/{}.json".format(i)
elif t == 'local':url = "http://www.servelelecciones.cl/data/elecciones_presidente/filters/locales/bycirc_electoral/{}.json".format(i)
elif t == 'mesa':url = "http://www.servelelecciones.cl/data/elecciones_presidente/filters/mesas/bylocales/{}.json".format(i)
elif t == 'conteo':url= "http://www.servelelecciones.cl/data/elecciones_presidente/computomesas/{}.json".format(i)
return url
def get_data(i, t):
url = ""
path = "./data/{}_{}.json".format(t,i)
if os.path.isfile( path ) :
print("cache: {}".format(path))
f = open ( path )
res = json.loads(f.read())
f.close()
return res
else:
sleep(1)
url = get_url(i,t)
print("req: {}".format(url))
r = requests.get(url)
j = json.loads(r.text)
save(path, r.text)
return j
def save(path,txt):
f = open(path, 'w')
f.write(txt)
f.close()
res = getAll()
f = open('servel2017.json', 'w')
f.write(json.dumps(res))
f.close()
| 26.114286
| 143
| 0.670314
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.