hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1242b70d24751dd58f2e174b2b6dafa4b0cf5800 | 600 | py | Python | netbox_secretstore/__init__.py | motad333/netbox-secretstore | 6ddb3f7bd1b83cc96f140338926aa56a99ce21f3 | [
"Apache-2.0"
] | null | null | null | netbox_secretstore/__init__.py | motad333/netbox-secretstore | 6ddb3f7bd1b83cc96f140338926aa56a99ce21f3 | [
"Apache-2.0"
] | null | null | null | netbox_secretstore/__init__.py | motad333/netbox-secretstore | 6ddb3f7bd1b83cc96f140338926aa56a99ce21f3 | [
"Apache-2.0"
] | null | null | null | from extras.plugins import PluginConfig
from django.utils.translation import gettext_lazy as _
class NetBoxSecretStore(PluginConfig):
name = 'netbox_secretstore'
verbose_name = _('Netbox Secret Store')
description = _('A Secret Storage for NetBox')
version = '1.0.8'
author = 'NetBox Maintainers'
author_email = ''
base_url = 'netbox_secretstore'
min_version = '3.0.0'
required_settings = []
caching_config = {
'*': {
'ops': 'all'
}
}
default_settings = {
'public_key_size': 2048
}
config = NetBoxSecretStore
| 24 | 54 | 0.638333 | 474 | 0.79 | 0 | 0 | 0 | 0 | 0 | 0 | 156 | 0.26 |
124700ddcb80e0e1797e3dbe2cb967b7c32e7eb3 | 722 | py | Python | functions/load_respiratory_disease_data.py | rlbarter/covid19-severity-prediction | 8846958576fe9cb54a5e111b5c6c71ab8cc8cd08 | [
"MIT"
] | 2 | 2020-05-15T14:42:02.000Z | 2020-05-22T08:51:47.000Z | functions/load_respiratory_disease_data.py | rahul263-stack/covid19-severity-prediction | f581adb2fccb12d5ab3f3c59ee120f484703edf5 | [
"MIT"
] | null | null | null | functions/load_respiratory_disease_data.py | rahul263-stack/covid19-severity-prediction | f581adb2fccb12d5ab3f3c59ee120f484703edf5 | [
"MIT"
] | null | null | null | import pandas as pd
def loadRespDiseaseSheet(sheet_name):
filepath = "data/respiratory_disease/IHME_USA_COUNTY_RESP_DISEASE_MORTALITY_1980_2014_NATIONAL_Y2017M09D26.XLSX"
orig_data = pd.read_excel(filepath,
sheet_name = "Chronic respiratory diseases",
skiprows = 1,
skipfooter = 2)
orig_data = orig_data.dropna(subset = ["FIPS"])
# omit the confidence intervals for now
resp_mortality = orig_data['Mortality Rate, 2014*'].str.split(expand = True).iloc[:, 0]
data = pd.DataFrame({'countyFIPS': orig_data['FIPS'].astype(int),
'Respiratory Mortality': resp_mortality})
return data | 51.571429 | 116 | 0.634349 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 240 | 0.33241 |
1247081add64badc149af8da67683a988746de63 | 111,647 | py | Python | angr/procedures/definitions/win32_mfplat.py | r4b3rt/angr | c133cfd4f83ffea2a1d9e064241e9459eaabc55f | [
"BSD-2-Clause"
] | null | null | null | angr/procedures/definitions/win32_mfplat.py | r4b3rt/angr | c133cfd4f83ffea2a1d9e064241e9459eaabc55f | [
"BSD-2-Clause"
] | null | null | null | angr/procedures/definitions/win32_mfplat.py | r4b3rt/angr | c133cfd4f83ffea2a1d9e064241e9459eaabc55f | [
"BSD-2-Clause"
] | null | null | null | # pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("mfplat.dll")
prototypes = \
{
#
'MFSerializeAttributesToStream': SimTypeFunction([SimTypeBottom(label="IMFAttributes"), SimTypeInt(signed=False, label="UInt32"), SimTypeBottom(label="IStream")], SimTypeInt(signed=True, label="Int32"), arg_names=["pAttr", "dwOptions", "pStm"]),
#
'MFDeserializeAttributesFromStream': SimTypeFunction([SimTypeBottom(label="IMFAttributes"), SimTypeInt(signed=False, label="UInt32"), SimTypeBottom(label="IStream")], SimTypeInt(signed=True, label="Int32"), arg_names=["pAttr", "dwOptions", "pStm"]),
#
'MFCreateTransformActivate': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFActivate"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppActivate"]),
#
'MFCreateSourceResolver': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFSourceResolver"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppISourceResolver"]),
#
'CreatePropertyStore': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IPropertyStore"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppStore"]),
#
'MFGetSupportedSchemes': SimTypeFunction([SimTypePointer(SimStruct({"Anonymous": SimUnion({"Anonymous": SimStruct({"vt": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16"), "wReserved2": SimTypeShort(signed=False, label="UInt16"), "wReserved3": SimTypeShort(signed=False, label="UInt16"), "Anonymous": SimUnion({"cVal": SimTypeBottom(label="CHAR"), "bVal": SimTypeChar(label="Byte"), "iVal": SimTypeShort(signed=True, label="Int16"), "uiVal": SimTypeShort(signed=False, label="UInt16"), "lVal": SimTypeInt(signed=True, label="Int32"), "ulVal": SimTypeInt(signed=False, label="UInt32"), "intVal": SimTypeInt(signed=True, label="Int32"), "uintVal": SimTypeInt(signed=False, label="UInt32"), "hVal": SimTypeBottom(label="LARGE_INTEGER"), "uhVal": SimTypeBottom(label="ULARGE_INTEGER"), "fltVal": SimTypeFloat(size=32), "dblVal": SimTypeFloat(size=64), "boolVal": SimTypeShort(signed=True, label="Int16"), "__OBSOLETE__VARIANT_BOOL": SimTypeShort(signed=True, label="Int16"), "scode": SimTypeInt(signed=True, label="Int32"), "cyVal": SimTypeBottom(label="CY"), "date": SimTypeFloat(size=64), "filetime": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "puuid": SimTypePointer(SimTypeBottom(label="Guid"), offset=0), "pclipdata": SimTypePointer(SimTypeBottom(label="CLIPDATA"), offset=0), "bstrVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "bstrblobVal": SimTypeBottom(label="BSTRBLOB"), "blob": SimTypeBottom(label="BLOB"), "pszVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "pwszVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "punkVal": SimTypeBottom(label="IUnknown"), "pdispVal": SimTypeBottom(label="IDispatch"), "pStream": SimTypeBottom(label="IStream"), "pStorage": SimTypeBottom(label="IStorage"), "pVersionedStream": SimTypePointer(SimStruct({"guidVersion": SimTypeBottom(label="Guid"), "pStream": SimTypeBottom(label="IStream")}, name="VERSIONEDSTREAM", pack=False, align=None), offset=0), "parray": SimTypePointer(SimTypeBottom(label="SAFEARRAY"), offset=0), "cac": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="CAC", pack=False, align=None), "caub": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="CAUB", pack=False, align=None), "cai": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0)}, name="CAI", pack=False, align=None), "caui": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0)}, name="CAUI", pack=False, align=None), "cal": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)}, name="CAL", pack=False, align=None), "caul": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)}, name="CAUL", pack=False, align=None), "cah": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="LARGE_INTEGER"), offset=0)}, name="CAH", pack=False, align=None), "cauh": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="ULARGE_INTEGER"), offset=0)}, name="CAUH", pack=False, align=None), "caflt": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=32), offset=0)}, name="CAFLT", pack=False, align=None), "cadbl": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=64), offset=0)}, name="CADBL", pack=False, align=None), "cabool": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0)}, name="CABOOL", pack=False, align=None), "cascode": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)}, name="CASCODE", pack=False, align=None), "cacy": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="CY"), offset=0)}, name="CACY", pack=False, align=None), "cadate": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=64), offset=0)}, name="CADATE", pack=False, align=None), "cafiletime": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), offset=0)}, name="CAFILETIME", pack=False, align=None), "cauuid": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="Guid"), offset=0)}, name="CACLSID", pack=False, align=None), "caclipdata": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="CLIPDATA"), offset=0)}, name="CACLIPDATA", pack=False, align=None), "cabstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="CABSTR", pack=False, align=None), "cabstrblob": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="BSTRBLOB"), offset=0)}, name="CABSTRBLOB", pack=False, align=None), "calpstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0)}, name="CALPSTR", pack=False, align=None), "calpwstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="CALPWSTR", pack=False, align=None), "capropvar": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="PROPVARIANT"), offset=0)}, name="CAPROPVARIANT", pack=False, align=None), "pcVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "pbVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "piVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "puiVal": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0), "plVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pulVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pintVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "puintVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pfltVal": SimTypePointer(SimTypeFloat(size=32), offset=0), "pdblVal": SimTypePointer(SimTypeFloat(size=64), offset=0), "pboolVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "pdecVal": SimTypePointer(SimTypeBottom(label="DECIMAL"), offset=0), "pscode": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pcyVal": SimTypePointer(SimTypeBottom(label="CY"), offset=0), "pdate": SimTypePointer(SimTypeFloat(size=64), offset=0), "pbstrVal": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "ppunkVal": SimTypePointer(SimTypeBottom(label="IUnknown"), offset=0), "ppdispVal": SimTypePointer(SimTypeBottom(label="IDispatch"), offset=0), "pparray": SimTypePointer(SimTypePointer(SimTypeBottom(label="SAFEARRAY"), offset=0), offset=0), "pvarVal": SimTypePointer(SimTypeBottom(label="PROPVARIANT"), offset=0)}, name="<anon>", label="None")}, name="_Anonymous_e__Struct", pack=False, align=None), "decVal": SimTypeBottom(label="DECIMAL")}, name="<anon>", label="None")}, name="PROPVARIANT", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pPropVarSchemeArray"]),
#
'MFGetSupportedMimeTypes': SimTypeFunction([SimTypePointer(SimStruct({"Anonymous": SimUnion({"Anonymous": SimStruct({"vt": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16"), "wReserved2": SimTypeShort(signed=False, label="UInt16"), "wReserved3": SimTypeShort(signed=False, label="UInt16"), "Anonymous": SimUnion({"cVal": SimTypeBottom(label="CHAR"), "bVal": SimTypeChar(label="Byte"), "iVal": SimTypeShort(signed=True, label="Int16"), "uiVal": SimTypeShort(signed=False, label="UInt16"), "lVal": SimTypeInt(signed=True, label="Int32"), "ulVal": SimTypeInt(signed=False, label="UInt32"), "intVal": SimTypeInt(signed=True, label="Int32"), "uintVal": SimTypeInt(signed=False, label="UInt32"), "hVal": SimTypeBottom(label="LARGE_INTEGER"), "uhVal": SimTypeBottom(label="ULARGE_INTEGER"), "fltVal": SimTypeFloat(size=32), "dblVal": SimTypeFloat(size=64), "boolVal": SimTypeShort(signed=True, label="Int16"), "__OBSOLETE__VARIANT_BOOL": SimTypeShort(signed=True, label="Int16"), "scode": SimTypeInt(signed=True, label="Int32"), "cyVal": SimTypeBottom(label="CY"), "date": SimTypeFloat(size=64), "filetime": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "puuid": SimTypePointer(SimTypeBottom(label="Guid"), offset=0), "pclipdata": SimTypePointer(SimTypeBottom(label="CLIPDATA"), offset=0), "bstrVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "bstrblobVal": SimTypeBottom(label="BSTRBLOB"), "blob": SimTypeBottom(label="BLOB"), "pszVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "pwszVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "punkVal": SimTypeBottom(label="IUnknown"), "pdispVal": SimTypeBottom(label="IDispatch"), "pStream": SimTypeBottom(label="IStream"), "pStorage": SimTypeBottom(label="IStorage"), "pVersionedStream": SimTypePointer(SimStruct({"guidVersion": SimTypeBottom(label="Guid"), "pStream": SimTypeBottom(label="IStream")}, name="VERSIONEDSTREAM", pack=False, align=None), offset=0), "parray": SimTypePointer(SimTypeBottom(label="SAFEARRAY"), offset=0), "cac": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="CAC", pack=False, align=None), "caub": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="CAUB", pack=False, align=None), "cai": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0)}, name="CAI", pack=False, align=None), "caui": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0)}, name="CAUI", pack=False, align=None), "cal": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)}, name="CAL", pack=False, align=None), "caul": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)}, name="CAUL", pack=False, align=None), "cah": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="LARGE_INTEGER"), offset=0)}, name="CAH", pack=False, align=None), "cauh": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="ULARGE_INTEGER"), offset=0)}, name="CAUH", pack=False, align=None), "caflt": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=32), offset=0)}, name="CAFLT", pack=False, align=None), "cadbl": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=64), offset=0)}, name="CADBL", pack=False, align=None), "cabool": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0)}, name="CABOOL", pack=False, align=None), "cascode": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)}, name="CASCODE", pack=False, align=None), "cacy": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="CY"), offset=0)}, name="CACY", pack=False, align=None), "cadate": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=64), offset=0)}, name="CADATE", pack=False, align=None), "cafiletime": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), offset=0)}, name="CAFILETIME", pack=False, align=None), "cauuid": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="Guid"), offset=0)}, name="CACLSID", pack=False, align=None), "caclipdata": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="CLIPDATA"), offset=0)}, name="CACLIPDATA", pack=False, align=None), "cabstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="CABSTR", pack=False, align=None), "cabstrblob": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="BSTRBLOB"), offset=0)}, name="CABSTRBLOB", pack=False, align=None), "calpstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0)}, name="CALPSTR", pack=False, align=None), "calpwstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="CALPWSTR", pack=False, align=None), "capropvar": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="PROPVARIANT"), offset=0)}, name="CAPROPVARIANT", pack=False, align=None), "pcVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "pbVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "piVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "puiVal": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0), "plVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pulVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pintVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "puintVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pfltVal": SimTypePointer(SimTypeFloat(size=32), offset=0), "pdblVal": SimTypePointer(SimTypeFloat(size=64), offset=0), "pboolVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "pdecVal": SimTypePointer(SimTypeBottom(label="DECIMAL"), offset=0), "pscode": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pcyVal": SimTypePointer(SimTypeBottom(label="CY"), offset=0), "pdate": SimTypePointer(SimTypeFloat(size=64), offset=0), "pbstrVal": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "ppunkVal": SimTypePointer(SimTypeBottom(label="IUnknown"), offset=0), "ppdispVal": SimTypePointer(SimTypeBottom(label="IDispatch"), offset=0), "pparray": SimTypePointer(SimTypePointer(SimTypeBottom(label="SAFEARRAY"), offset=0), offset=0), "pvarVal": SimTypePointer(SimTypeBottom(label="PROPVARIANT"), offset=0)}, name="<anon>", label="None")}, name="_Anonymous_e__Struct", pack=False, align=None), "decVal": SimTypeBottom(label="DECIMAL")}, name="<anon>", label="None")}, name="PROPVARIANT", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pPropVarMimeTypeArray"]),
#
'MFGetSystemTime': SimTypeFunction([], SimTypeLongLong(signed=True, label="Int64")),
#
'MFCreateSystemTimeSource': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFPresentationTimeSource"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppSystemTimeSource"]),
#
'MFCreatePresentationDescriptor': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFStreamDescriptor"), label="LPArray", offset=0), SimTypePointer(SimTypeBottom(label="IMFPresentationDescriptor"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["cStreamDescriptors", "apStreamDescriptors", "ppPresentationDescriptor"]),
#
'MFSerializePresentationDescriptor': SimTypeFunction([SimTypeBottom(label="IMFPresentationDescriptor"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pPD", "pcbData", "ppbData"]),
#
'MFDeserializePresentationDescriptor': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypePointer(SimTypeBottom(label="IMFPresentationDescriptor"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["cbData", "pbData", "ppPD"]),
#
'MFCreateStreamDescriptor': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFMediaType"), label="LPArray", offset=0), SimTypePointer(SimTypeBottom(label="IMFStreamDescriptor"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwStreamIdentifier", "cMediaTypes", "apMediaTypes", "ppDescriptor"]),
#
'MFCreateTrackedSample': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFTrackedSample"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppMFSample"]),
#
'MFCreateMFByteStreamOnStream': SimTypeFunction([SimTypeBottom(label="IStream"), SimTypePointer(SimTypeBottom(label="IMFByteStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pStream", "ppByteStream"]),
#
'MFCreateStreamOnMFByteStream': SimTypeFunction([SimTypeBottom(label="IMFByteStream"), SimTypePointer(SimTypeBottom(label="IStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pByteStream", "ppStream"]),
#
'MFCreateMFByteStreamOnStreamEx': SimTypeFunction([SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeBottom(label="IMFByteStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["punkStream", "ppByteStream"]),
#
'MFCreateStreamOnMFByteStreamEx': SimTypeFunction([SimTypeBottom(label="IMFByteStream"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pByteStream", "riid", "ppv"]),
#
'MFCreateMediaTypeFromProperties': SimTypeFunction([SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeBottom(label="IMFMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["punkStream", "ppMediaType"]),
#
'MFCreatePropertiesFromMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMediaType", "riid", "ppv"]),
#
'MFCreateContentProtectionDevice': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeBottom(label="IMFContentProtectionDevice"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ProtectionSystemId", "ContentProtectionDevice"]),
#
'MFIsContentProtectionDeviceSupported': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ProtectionSystemId", "isSupported"]),
#
'MFCreateContentDecryptorContext': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeBottom(label="IMFDXGIDeviceManager"), SimTypeBottom(label="IMFContentProtectionDevice"), SimTypePointer(SimTypeBottom(label="IMFContentDecryptorContext"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidMediaProtectionSystemId", "pD3DManager", "pContentProtectionDevice", "ppContentDecryptorContext"]),
#
'MFStartup': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["Version", "dwFlags"]),
#
'MFShutdown': SimTypeFunction([], SimTypeInt(signed=True, label="Int32")),
#
'MFLockPlatform': SimTypeFunction([], SimTypeInt(signed=True, label="Int32")),
#
'MFUnlockPlatform': SimTypeFunction([], SimTypeInt(signed=True, label="Int32")),
#
'MFPutWorkItem': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwQueue", "pCallback", "pState"]),
#
'MFPutWorkItem2': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwQueue", "Priority", "pCallback", "pState"]),
#
'MFPutWorkItemEx': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeBottom(label="IMFAsyncResult")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwQueue", "pResult"]),
#
'MFPutWorkItemEx2': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypeBottom(label="IMFAsyncResult")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwQueue", "Priority", "pResult"]),
#
'MFPutWaitingWorkItem': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=True, label="Int32"), SimTypeBottom(label="IMFAsyncResult"), SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hEvent", "Priority", "pResult", "pKey"]),
#
'MFAllocateSerialWorkQueue': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueue", "pdwWorkQueue"]),
#
'MFScheduleWorkItemEx': SimTypeFunction([SimTypeBottom(label="IMFAsyncResult"), SimTypeLongLong(signed=True, label="Int64"), SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pResult", "Timeout", "pKey"]),
#
'MFScheduleWorkItem': SimTypeFunction([SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown"), SimTypeLongLong(signed=True, label="Int64"), SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pCallback", "pState", "Timeout", "pKey"]),
#
'MFCancelWorkItem': SimTypeFunction([SimTypeLongLong(signed=False, label="UInt64")], SimTypeInt(signed=True, label="Int32"), arg_names=["Key"]),
#
'MFGetTimerPeriodicity': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Periodicity"]),
#
'MFAddPeriodicCallback': SimTypeFunction([SimTypePointer(SimTypeFunction([SimTypeBottom(label="IUnknown")], SimTypeBottom(label="Void"), arg_names=["pContext"]), offset=0), SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Callback", "pContext", "pdwKey"]),
#
'MFRemovePeriodicCallback': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwKey"]),
#
'MFAllocateWorkQueueEx': SimTypeFunction([SimTypeInt(signed=False, label="MFASYNC_WORKQUEUE_TYPE"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["WorkQueueType", "pdwWorkQueue"]),
#
'MFAllocateWorkQueue': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pdwWorkQueue"]),
#
'MFLockWorkQueue': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueue"]),
#
'MFUnlockWorkQueue': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueue"]),
#
'MFBeginRegisterWorkQueueWithMMCSS': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueueId", "wszClass", "dwTaskId", "pDoneCallback", "pDoneState"]),
#
'MFBeginRegisterWorkQueueWithMMCSSEx': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueueId", "wszClass", "dwTaskId", "lPriority", "pDoneCallback", "pDoneState"]),
#
'MFEndRegisterWorkQueueWithMMCSS': SimTypeFunction([SimTypeBottom(label="IMFAsyncResult"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pResult", "pdwTaskId"]),
#
'MFBeginUnregisterWorkQueueWithMMCSS': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueueId", "pDoneCallback", "pDoneState"]),
#
'MFEndUnregisterWorkQueueWithMMCSS': SimTypeFunction([SimTypeBottom(label="IMFAsyncResult")], SimTypeInt(signed=True, label="Int32"), arg_names=["pResult"]),
#
'MFGetWorkQueueMMCSSClass': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueueId", "pwszClass", "pcchClass"]),
#
'MFGetWorkQueueMMCSSTaskId': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueueId", "pdwTaskId"]),
#
'MFRegisterPlatformWithMMCSS': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=True, label="Int32"), arg_names=["wszClass", "pdwTaskId", "lPriority"]),
#
'MFUnregisterPlatformFromMMCSS': SimTypeFunction([], SimTypeInt(signed=True, label="Int32")),
#
'MFLockSharedWorkQueue': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["wszClass", "BasePriority", "pdwTaskId", "pID"]),
#
'MFGetWorkQueueMMCSSPriority': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueueId", "lPriority"]),
#
'MFCreateAsyncResult': SimTypeFunction([SimTypeBottom(label="IUnknown"), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeBottom(label="IMFAsyncResult"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["punkObject", "pCallback", "punkState", "ppAsyncResult"]),
#
'MFInvokeCallback': SimTypeFunction([SimTypeBottom(label="IMFAsyncResult")], SimTypeInt(signed=True, label="Int32"), arg_names=["pAsyncResult"]),
#
'MFCreateFile': SimTypeFunction([SimTypeInt(signed=False, label="MF_FILE_ACCESSMODE"), SimTypeInt(signed=False, label="MF_FILE_OPENMODE"), SimTypeInt(signed=False, label="MF_FILE_FLAGS"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeBottom(label="IMFByteStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["AccessMode", "OpenMode", "fFlags", "pwszFileURL", "ppIByteStream"]),
#
'MFCreateTempFile': SimTypeFunction([SimTypeInt(signed=False, label="MF_FILE_ACCESSMODE"), SimTypeInt(signed=False, label="MF_FILE_OPENMODE"), SimTypeInt(signed=False, label="MF_FILE_FLAGS"), SimTypePointer(SimTypeBottom(label="IMFByteStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["AccessMode", "OpenMode", "fFlags", "ppIByteStream"]),
#
'MFBeginCreateFile': SimTypeFunction([SimTypeInt(signed=False, label="MF_FILE_ACCESSMODE"), SimTypeInt(signed=False, label="MF_FILE_OPENMODE"), SimTypeInt(signed=False, label="MF_FILE_FLAGS"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeBottom(label="IUnknown"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["AccessMode", "OpenMode", "fFlags", "pwszFilePath", "pCallback", "pState", "ppCancelCookie"]),
#
'MFEndCreateFile': SimTypeFunction([SimTypeBottom(label="IMFAsyncResult"), SimTypePointer(SimTypeBottom(label="IMFByteStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pResult", "ppFile"]),
#
'MFCancelCreateFile': SimTypeFunction([SimTypeBottom(label="IUnknown")], SimTypeInt(signed=True, label="Int32"), arg_names=["pCancelCookie"]),
#
'MFCreateMemoryBuffer': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["cbMaxLength", "ppBuffer"]),
#
'MFCreateMediaBufferWrapper': SimTypeFunction([SimTypeBottom(label="IMFMediaBuffer"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pBuffer", "cbOffset", "dwLength", "ppBuffer"]),
#
'MFCreateLegacyMediaBufferOnMFMediaBuffer': SimTypeFunction([SimTypeBottom(label="IMFSample"), SimTypeBottom(label="IMFMediaBuffer"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSample", "pMFMediaBuffer", "cbOffset", "ppMediaBuffer"]),
#
'MFMapDX9FormatToDXGIFormat': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="DXGI_FORMAT"), arg_names=["dx9"]),
#
'MFMapDXGIFormatToDX9Format': SimTypeFunction([SimTypeInt(signed=False, label="DXGI_FORMAT")], SimTypeInt(signed=False, label="UInt32"), arg_names=["dx11"]),
#
'MFLockDXGIDeviceManager': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="IMFDXGIDeviceManager"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pResetToken", "ppManager"]),
#
'MFUnlockDXGIDeviceManager': SimTypeFunction([], SimTypeInt(signed=True, label="Int32")),
#
'MFCreateDXSurfaceBuffer': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeBottom(label="IUnknown"), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["riid", "punkSurface", "fBottomUpWhenLinear", "ppBuffer"]),
#
'MFCreateWICBitmapBuffer': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["riid", "punkSurface", "ppBuffer"]),
#
'MFCreateDXGISurfaceBuffer': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeBottom(label="IUnknown"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["riid", "punkSurface", "uSubresourceIndex", "fBottomUpWhenLinear", "ppBuffer"]),
#
'MFCreateVideoSampleAllocatorEx': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["riid", "ppSampleAllocator"]),
#
'MFCreateDXGIDeviceManager': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="IMFDXGIDeviceManager"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["resetToken", "ppDeviceManager"]),
#
'MFCreateAlignedMemoryBuffer': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["cbMaxLength", "cbAligment", "ppBuffer"]),
#
'MFCreateMediaEvent': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimStruct({"Anonymous": SimUnion({"Anonymous": SimStruct({"vt": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16"), "wReserved2": SimTypeShort(signed=False, label="UInt16"), "wReserved3": SimTypeShort(signed=False, label="UInt16"), "Anonymous": SimUnion({"cVal": SimTypeBottom(label="CHAR"), "bVal": SimTypeChar(label="Byte"), "iVal": SimTypeShort(signed=True, label="Int16"), "uiVal": SimTypeShort(signed=False, label="UInt16"), "lVal": SimTypeInt(signed=True, label="Int32"), "ulVal": SimTypeInt(signed=False, label="UInt32"), "intVal": SimTypeInt(signed=True, label="Int32"), "uintVal": SimTypeInt(signed=False, label="UInt32"), "hVal": SimTypeBottom(label="LARGE_INTEGER"), "uhVal": SimTypeBottom(label="ULARGE_INTEGER"), "fltVal": SimTypeFloat(size=32), "dblVal": SimTypeFloat(size=64), "boolVal": SimTypeShort(signed=True, label="Int16"), "__OBSOLETE__VARIANT_BOOL": SimTypeShort(signed=True, label="Int16"), "scode": SimTypeInt(signed=True, label="Int32"), "cyVal": SimTypeBottom(label="CY"), "date": SimTypeFloat(size=64), "filetime": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "puuid": SimTypePointer(SimTypeBottom(label="Guid"), offset=0), "pclipdata": SimTypePointer(SimTypeBottom(label="CLIPDATA"), offset=0), "bstrVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "bstrblobVal": SimTypeBottom(label="BSTRBLOB"), "blob": SimTypeBottom(label="BLOB"), "pszVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "pwszVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "punkVal": SimTypeBottom(label="IUnknown"), "pdispVal": SimTypeBottom(label="IDispatch"), "pStream": SimTypeBottom(label="IStream"), "pStorage": SimTypeBottom(label="IStorage"), "pVersionedStream": SimTypePointer(SimStruct({"guidVersion": SimTypeBottom(label="Guid"), "pStream": SimTypeBottom(label="IStream")}, name="VERSIONEDSTREAM", pack=False, align=None), offset=0), "parray": SimTypePointer(SimTypeBottom(label="SAFEARRAY"), offset=0), "cac": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="CAC", pack=False, align=None), "caub": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="CAUB", pack=False, align=None), "cai": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0)}, name="CAI", pack=False, align=None), "caui": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0)}, name="CAUI", pack=False, align=None), "cal": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)}, name="CAL", pack=False, align=None), "caul": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)}, name="CAUL", pack=False, align=None), "cah": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="LARGE_INTEGER"), offset=0)}, name="CAH", pack=False, align=None), "cauh": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="ULARGE_INTEGER"), offset=0)}, name="CAUH", pack=False, align=None), "caflt": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=32), offset=0)}, name="CAFLT", pack=False, align=None), "cadbl": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=64), offset=0)}, name="CADBL", pack=False, align=None), "cabool": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0)}, name="CABOOL", pack=False, align=None), "cascode": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)}, name="CASCODE", pack=False, align=None), "cacy": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="CY"), offset=0)}, name="CACY", pack=False, align=None), "cadate": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=64), offset=0)}, name="CADATE", pack=False, align=None), "cafiletime": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), offset=0)}, name="CAFILETIME", pack=False, align=None), "cauuid": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="Guid"), offset=0)}, name="CACLSID", pack=False, align=None), "caclipdata": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="CLIPDATA"), offset=0)}, name="CACLIPDATA", pack=False, align=None), "cabstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="CABSTR", pack=False, align=None), "cabstrblob": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="BSTRBLOB"), offset=0)}, name="CABSTRBLOB", pack=False, align=None), "calpstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0)}, name="CALPSTR", pack=False, align=None), "calpwstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="CALPWSTR", pack=False, align=None), "capropvar": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="PROPVARIANT"), offset=0)}, name="CAPROPVARIANT", pack=False, align=None), "pcVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "pbVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "piVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "puiVal": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0), "plVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pulVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pintVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "puintVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pfltVal": SimTypePointer(SimTypeFloat(size=32), offset=0), "pdblVal": SimTypePointer(SimTypeFloat(size=64), offset=0), "pboolVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "pdecVal": SimTypePointer(SimTypeBottom(label="DECIMAL"), offset=0), "pscode": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pcyVal": SimTypePointer(SimTypeBottom(label="CY"), offset=0), "pdate": SimTypePointer(SimTypeFloat(size=64), offset=0), "pbstrVal": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "ppunkVal": SimTypePointer(SimTypeBottom(label="IUnknown"), offset=0), "ppdispVal": SimTypePointer(SimTypeBottom(label="IDispatch"), offset=0), "pparray": SimTypePointer(SimTypePointer(SimTypeBottom(label="SAFEARRAY"), offset=0), offset=0), "pvarVal": SimTypePointer(SimTypeBottom(label="PROPVARIANT"), offset=0)}, name="<anon>", label="None")}, name="_Anonymous_e__Struct", pack=False, align=None), "decVal": SimTypeBottom(label="DECIMAL")}, name="<anon>", label="None")}, name="PROPVARIANT", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="IMFMediaEvent"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["met", "guidExtendedType", "hrStatus", "pvValue", "ppEvent"]),
#
'MFCreateEventQueue': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFMediaEventQueue"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppMediaEventQueue"]),
#
'MFCreateSample': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFSample"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppIMFSample"]),
#
'MFCreateAttributes': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFAttributes"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["ppMFAttributes", "cInitialSize"]),
#
'MFInitAttributesFromBlob': SimTypeFunction([SimTypeBottom(label="IMFAttributes"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pAttributes", "pBuf", "cbBufSize"]),
#
'MFGetAttributesAsBlobSize': SimTypeFunction([SimTypeBottom(label="IMFAttributes"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pAttributes", "pcbBufSize"]),
#
'MFGetAttributesAsBlob': SimTypeFunction([SimTypeBottom(label="IMFAttributes"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pAttributes", "pBuf", "cbBufSize"]),
#
'MFTRegister': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypeBottom(label="Guid"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), label="LPArray", offset=0), SimTypeBottom(label="IMFAttributes")], SimTypeInt(signed=True, label="Int32"), arg_names=["clsidMFT", "guidCategory", "pszName", "Flags", "cInputTypes", "pInputTypes", "cOutputTypes", "pOutputTypes", "pAttributes"]),
#
'MFTUnregister': SimTypeFunction([SimTypeBottom(label="Guid")], SimTypeInt(signed=True, label="Int32"), arg_names=["clsidMFT"]),
#
'MFTRegisterLocal': SimTypeFunction([SimTypeBottom(label="IClassFactory"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), label="LPArray", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pClassFactory", "guidCategory", "pszName", "Flags", "cInputTypes", "pInputTypes", "cOutputTypes", "pOutputTypes"]),
#
'MFTUnregisterLocal': SimTypeFunction([SimTypeBottom(label="IClassFactory")], SimTypeInt(signed=True, label="Int32"), arg_names=["pClassFactory"]),
#
'MFTRegisterLocalByCLSID': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), label="LPArray", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["clisdMFT", "guidCategory", "pszName", "Flags", "cInputTypes", "pInputTypes", "cOutputTypes", "pOutputTypes"]),
#
'MFTUnregisterLocalByCLSID': SimTypeFunction([SimTypeBottom(label="Guid")], SimTypeInt(signed=True, label="Int32"), arg_names=["clsidMFT"]),
#
'MFTEnum': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), SimTypeBottom(label="IMFAttributes"), SimTypePointer(SimTypePointer(SimTypeBottom(label="Guid"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidCategory", "Flags", "pInputType", "pOutputType", "pAttributes", "ppclsidMFT", "pcMFTs"]),
#
'MFTEnumEx': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="IMFActivate"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidCategory", "Flags", "pInputType", "pOutputType", "pppMFTActivate", "pnumMFTActivate"]),
#
'MFTEnum2': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), SimTypeBottom(label="IMFAttributes"), SimTypePointer(SimTypePointer(SimTypeBottom(label="IMFActivate"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidCategory", "Flags", "pInputType", "pOutputType", "pAttributes", "pppMFTActivate", "pnumMFTActivate"]),
#
'MFTGetInfo': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), SimTypePointer(SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="IMFAttributes"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["clsidMFT", "pszName", "ppInputTypes", "pcInputTypes", "ppOutputTypes", "pcOutputTypes", "ppAttributes"]),
#
'MFGetPluginControl': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFPluginControl"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppPluginControl"]),
#
'MFGetMFTMerit': SimTypeFunction([SimTypeBottom(label="IUnknown"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFT", "cbVerifier", "verifier", "merit"]),
#
'MFRegisterLocalSchemeHandler': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeBottom(label="IMFActivate")], SimTypeInt(signed=True, label="Int32"), arg_names=["szScheme", "pActivate"]),
#
'MFRegisterLocalByteStreamHandler': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeBottom(label="IMFActivate")], SimTypeInt(signed=True, label="Int32"), arg_names=["szFileExtension", "szMimeType", "pActivate"]),
#
'MFCreateMFByteStreamWrapper': SimTypeFunction([SimTypeBottom(label="IMFByteStream"), SimTypePointer(SimTypeBottom(label="IMFByteStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pStream", "ppStreamWrapper"]),
#
'MFCreateMediaExtensionActivate': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["szActivatableClassId", "pConfiguration", "riid", "ppvObject"]),
#
'MFCreateMuxStreamAttributes': SimTypeFunction([SimTypeBottom(label="IMFCollection"), SimTypePointer(SimTypeBottom(label="IMFAttributes"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pAttributesToMux", "ppMuxAttribs"]),
#
'MFCreateMuxStreamMediaType': SimTypeFunction([SimTypeBottom(label="IMFCollection"), SimTypePointer(SimTypeBottom(label="IMFMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMediaTypesToMux", "ppMuxMediaType"]),
#
'MFCreateMuxStreamSample': SimTypeFunction([SimTypeBottom(label="IMFCollection"), SimTypePointer(SimTypeBottom(label="IMFSample"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSamplesToMux", "ppMuxSample"]),
#
'MFValidateMediaTypeSize': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["FormatType", "pBlock", "cbSize"]),
#
'MFCreateMediaType': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppMFType"]),
#
'MFCreateMFVideoFormatFromMFMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "ppMFVF", "pcbSize"]),
#
'MFCreateWaveFormatExFromMFMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimTypePointer(SimStruct({"wFormatTag": SimTypeShort(signed=False, label="UInt16"), "nChannels": SimTypeShort(signed=False, label="UInt16"), "nSamplesPerSec": SimTypeInt(signed=False, label="UInt32"), "nAvgBytesPerSec": SimTypeInt(signed=False, label="UInt32"), "nBlockAlign": SimTypeShort(signed=False, label="UInt16"), "wBitsPerSample": SimTypeShort(signed=False, label="UInt16"), "cbSize": SimTypeShort(signed=False, label="UInt16")}, name="WAVEFORMATEX", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "ppWF", "pcbSize", "Flags"]),
#
'MFInitMediaTypeFromVideoInfoHeader': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"rcSource": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "rcTarget": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "dwBitRate": SimTypeInt(signed=False, label="UInt32"), "dwBitErrorRate": SimTypeInt(signed=False, label="UInt32"), "AvgTimePerFrame": SimTypeLongLong(signed=True, label="Int64"), "bmiHeader": SimTypeBottom(label="BITMAPINFOHEADER")}, name="VIDEOINFOHEADER", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pVIH", "cbBufSize", "pSubtype"]),
#
'MFInitMediaTypeFromVideoInfoHeader2': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"rcSource": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "rcTarget": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "dwBitRate": SimTypeInt(signed=False, label="UInt32"), "dwBitErrorRate": SimTypeInt(signed=False, label="UInt32"), "AvgTimePerFrame": SimTypeLongLong(signed=True, label="Int64"), "dwInterlaceFlags": SimTypeInt(signed=False, label="UInt32"), "dwCopyProtectFlags": SimTypeInt(signed=False, label="UInt32"), "dwPictAspectRatioX": SimTypeInt(signed=False, label="UInt32"), "dwPictAspectRatioY": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"dwControlFlags": SimTypeInt(signed=False, label="UInt32"), "dwReserved1": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None"), "dwReserved2": SimTypeInt(signed=False, label="UInt32"), "bmiHeader": SimTypeBottom(label="BITMAPINFOHEADER")}, name="VIDEOINFOHEADER2", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pVIH2", "cbBufSize", "pSubtype"]),
#
'MFInitMediaTypeFromMPEG1VideoInfo': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"hdr": SimStruct({"rcSource": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "rcTarget": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "dwBitRate": SimTypeInt(signed=False, label="UInt32"), "dwBitErrorRate": SimTypeInt(signed=False, label="UInt32"), "AvgTimePerFrame": SimTypeLongLong(signed=True, label="Int64"), "bmiHeader": SimTypeBottom(label="BITMAPINFOHEADER")}, name="VIDEOINFOHEADER", pack=False, align=None), "dwStartTimeCode": SimTypeInt(signed=False, label="UInt32"), "cbSequenceHeader": SimTypeInt(signed=False, label="UInt32"), "bSequenceHeader": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="MPEG1VIDEOINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pMP1VI", "cbBufSize", "pSubtype"]),
#
'MFInitMediaTypeFromMPEG2VideoInfo': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"hdr": SimStruct({"rcSource": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "rcTarget": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "dwBitRate": SimTypeInt(signed=False, label="UInt32"), "dwBitErrorRate": SimTypeInt(signed=False, label="UInt32"), "AvgTimePerFrame": SimTypeLongLong(signed=True, label="Int64"), "dwInterlaceFlags": SimTypeInt(signed=False, label="UInt32"), "dwCopyProtectFlags": SimTypeInt(signed=False, label="UInt32"), "dwPictAspectRatioX": SimTypeInt(signed=False, label="UInt32"), "dwPictAspectRatioY": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"dwControlFlags": SimTypeInt(signed=False, label="UInt32"), "dwReserved1": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None"), "dwReserved2": SimTypeInt(signed=False, label="UInt32"), "bmiHeader": SimTypeBottom(label="BITMAPINFOHEADER")}, name="VIDEOINFOHEADER2", pack=False, align=None), "dwStartTimeCode": SimTypeInt(signed=False, label="UInt32"), "cbSequenceHeader": SimTypeInt(signed=False, label="UInt32"), "dwProfile": SimTypeInt(signed=False, label="UInt32"), "dwLevel": SimTypeInt(signed=False, label="UInt32"), "dwFlags": SimTypeInt(signed=False, label="MPEG2VIDEOINFO_FLAGS"), "dwSequenceHeader": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)}, name="MPEG2VIDEOINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pMP2VI", "cbBufSize", "pSubtype"]),
#
'MFCalculateBitmapImageSize': SimTypeFunction([SimTypePointer(SimStruct({"biSize": SimTypeInt(signed=False, label="UInt32"), "biWidth": SimTypeInt(signed=True, label="Int32"), "biHeight": SimTypeInt(signed=True, label="Int32"), "biPlanes": SimTypeShort(signed=False, label="UInt16"), "biBitCount": SimTypeShort(signed=False, label="UInt16"), "biCompression": SimTypeInt(signed=False, label="UInt32"), "biSizeImage": SimTypeInt(signed=False, label="UInt32"), "biXPelsPerMeter": SimTypeInt(signed=True, label="Int32"), "biYPelsPerMeter": SimTypeInt(signed=True, label="Int32"), "biClrUsed": SimTypeInt(signed=False, label="UInt32"), "biClrImportant": SimTypeInt(signed=False, label="UInt32")}, name="BITMAPINFOHEADER", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pBMIH", "cbBufSize", "pcbImageSize", "pbKnown"]),
#
'MFCalculateImageSize': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidSubtype", "unWidth", "unHeight", "pcbImageSize"]),
#
'MFFrameRateToAverageTimePerFrame': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["unNumerator", "unDenominator", "punAverageTimePerFrame"]),
#
'MFAverageTimePerFrameToFrameRate': SimTypeFunction([SimTypeLongLong(signed=False, label="UInt64"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["unAverageTimePerFrame", "punNumerator", "punDenominator"]),
#
'MFInitMediaTypeFromMFVideoFormat': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pMFVF", "cbBufSize"]),
#
'MFInitMediaTypeFromWaveFormatEx': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"wFormatTag": SimTypeShort(signed=False, label="UInt16"), "nChannels": SimTypeShort(signed=False, label="UInt16"), "nSamplesPerSec": SimTypeInt(signed=False, label="UInt32"), "nAvgBytesPerSec": SimTypeInt(signed=False, label="UInt32"), "nBlockAlign": SimTypeShort(signed=False, label="UInt16"), "wBitsPerSample": SimTypeShort(signed=False, label="UInt16"), "cbSize": SimTypeShort(signed=False, label="UInt16")}, name="WAVEFORMATEX", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pWaveFormat", "cbBufSize"]),
#
'MFInitMediaTypeFromAMMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"majortype": SimTypeBottom(label="Guid"), "subtype": SimTypeBottom(label="Guid"), "bFixedSizeSamples": SimTypeInt(signed=True, label="Int32"), "bTemporalCompression": SimTypeInt(signed=True, label="Int32"), "lSampleSize": SimTypeInt(signed=False, label="UInt32"), "formattype": SimTypeBottom(label="Guid"), "pUnk": SimTypeBottom(label="IUnknown"), "cbFormat": SimTypeInt(signed=False, label="UInt32"), "pbFormat": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="AM_MEDIA_TYPE", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pAMType"]),
#
'MFInitAMMediaTypeFromMFMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypeBottom(label="Guid"), SimTypePointer(SimStruct({"majortype": SimTypeBottom(label="Guid"), "subtype": SimTypeBottom(label="Guid"), "bFixedSizeSamples": SimTypeInt(signed=True, label="Int32"), "bTemporalCompression": SimTypeInt(signed=True, label="Int32"), "lSampleSize": SimTypeInt(signed=False, label="UInt32"), "formattype": SimTypeBottom(label="Guid"), "pUnk": SimTypeBottom(label="IUnknown"), "cbFormat": SimTypeInt(signed=False, label="UInt32"), "pbFormat": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="AM_MEDIA_TYPE", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "guidFormatBlockType", "pAMType"]),
#
'MFCreateAMMediaTypeFromMFMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypeBottom(label="Guid"), SimTypePointer(SimTypePointer(SimStruct({"majortype": SimTypeBottom(label="Guid"), "subtype": SimTypeBottom(label="Guid"), "bFixedSizeSamples": SimTypeInt(signed=True, label="Int32"), "bTemporalCompression": SimTypeInt(signed=True, label="Int32"), "lSampleSize": SimTypeInt(signed=False, label="UInt32"), "formattype": SimTypeBottom(label="Guid"), "pUnk": SimTypeBottom(label="IUnknown"), "cbFormat": SimTypeInt(signed=False, label="UInt32"), "pbFormat": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="AM_MEDIA_TYPE", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "guidFormatBlockType", "ppAMType"]),
#
'MFCompareFullToPartialMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypeBottom(label="IMFMediaType")], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFTypeFull", "pMFTypePartial"]),
#
'MFWrapMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeBottom(label="IMFMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pOrig", "MajorType", "SubType", "ppWrap"]),
#
'MFUnwrapMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimTypeBottom(label="IMFMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pWrap", "ppOrig"]),
#
'MFCreateVideoMediaType': SimTypeFunction([SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="IMFVideoMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pVideoFormat", "ppIVideoMediaType"]),
#
'MFCreateVideoMediaTypeFromSubtype': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeBottom(label="IMFVideoMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pAMSubtype", "ppIVideoMediaType"]),
#
'MFCreateVideoMediaTypeFromBitMapInfoHeader': SimTypeFunction([SimTypePointer(SimStruct({"biSize": SimTypeInt(signed=False, label="UInt32"), "biWidth": SimTypeInt(signed=True, label="Int32"), "biHeight": SimTypeInt(signed=True, label="Int32"), "biPlanes": SimTypeShort(signed=False, label="UInt16"), "biBitCount": SimTypeShort(signed=False, label="UInt16"), "biCompression": SimTypeInt(signed=False, label="UInt32"), "biSizeImage": SimTypeInt(signed=False, label="UInt32"), "biXPelsPerMeter": SimTypeInt(signed=True, label="Int32"), "biYPelsPerMeter": SimTypeInt(signed=True, label="Int32"), "biClrUsed": SimTypeInt(signed=False, label="UInt32"), "biClrImportant": SimTypeInt(signed=False, label="UInt32")}, name="BITMAPINFOHEADER", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="MFVideoInterlaceMode"), SimTypeLongLong(signed=False, label="UInt64"), SimTypeLongLong(signed=False, label="UInt64"), SimTypeLongLong(signed=False, label="UInt64"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFVideoMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pbmihBitMapInfoHeader", "dwPixelAspectRatioX", "dwPixelAspectRatioY", "InterlaceMode", "VideoFlags", "qwFramesPerSecondNumerator", "qwFramesPerSecondDenominator", "dwMaxBitRate", "ppIVideoMediaType"]),
#
'MFGetStrideForBitmapInfoHeader': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["format", "dwWidth", "pStride"]),
#
'MFCreateVideoMediaTypeFromBitMapInfoHeaderEx': SimTypeFunction([SimTypePointer(SimStruct({"biSize": SimTypeInt(signed=False, label="UInt32"), "biWidth": SimTypeInt(signed=True, label="Int32"), "biHeight": SimTypeInt(signed=True, label="Int32"), "biPlanes": SimTypeShort(signed=False, label="UInt16"), "biBitCount": SimTypeShort(signed=False, label="UInt16"), "biCompression": SimTypeInt(signed=False, label="UInt32"), "biSizeImage": SimTypeInt(signed=False, label="UInt32"), "biXPelsPerMeter": SimTypeInt(signed=True, label="Int32"), "biYPelsPerMeter": SimTypeInt(signed=True, label="Int32"), "biClrUsed": SimTypeInt(signed=False, label="UInt32"), "biClrImportant": SimTypeInt(signed=False, label="UInt32")}, name="BITMAPINFOHEADER", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="MFVideoInterlaceMode"), SimTypeLongLong(signed=False, label="UInt64"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFVideoMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pbmihBitMapInfoHeader", "cbBitMapInfoHeader", "dwPixelAspectRatioX", "dwPixelAspectRatioY", "InterlaceMode", "VideoFlags", "dwFramesPerSecondNumerator", "dwFramesPerSecondDenominator", "dwMaxBitRate", "ppIVideoMediaType"]),
#
'MFCreateMediaTypeFromRepresentation': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeBottom(label="IMFMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidRepresentation", "pvRepresentation", "ppIMediaType"]),
#
'MFCreateAudioMediaType': SimTypeFunction([SimTypePointer(SimStruct({"wFormatTag": SimTypeShort(signed=False, label="UInt16"), "nChannels": SimTypeShort(signed=False, label="UInt16"), "nSamplesPerSec": SimTypeInt(signed=False, label="UInt32"), "nAvgBytesPerSec": SimTypeInt(signed=False, label="UInt32"), "nBlockAlign": SimTypeShort(signed=False, label="UInt16"), "wBitsPerSample": SimTypeShort(signed=False, label="UInt16"), "cbSize": SimTypeShort(signed=False, label="UInt16")}, name="WAVEFORMATEX", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="IMFAudioMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pAudioFormat", "ppIAudioMediaType"]),
#
'MFGetUncompressedVideoFormat': SimTypeFunction([SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pVideoFormat"]),
#
'MFInitVideoFormat': SimTypeFunction([SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="MFStandardVideoFormat")], SimTypeInt(signed=True, label="Int32"), arg_names=["pVideoFormat", "type"]),
#
'MFInitVideoFormat_RGB': SimTypeFunction([SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pVideoFormat", "dwWidth", "dwHeight", "D3Dfmt"]),
#
'MFConvertColorInfoToDXVA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pdwToDXVA", "pFromFormat"]),
#
'MFConvertColorInfoFromDXVA': SimTypeFunction([SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pToFormat", "dwFromDXVA"]),
#
'MFCopyImage': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pDest", "lDestStride", "pSrc", "lSrcStride", "dwWidthInBytes", "dwLines"]),
#
'MFConvertFromFP16Array': SimTypeFunction([SimTypePointer(SimTypeFloat(size=32), label="LPArray", offset=0), SimTypePointer(SimTypeShort(signed=False, label="UInt16"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pDest", "pSrc", "dwCount"]),
#
'MFConvertToFP16Array': SimTypeFunction([SimTypePointer(SimTypeShort(signed=False, label="UInt16"), label="LPArray", offset=0), SimTypePointer(SimTypeFloat(size=32), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pDest", "pSrc", "dwCount"]),
#
'MFCreate2DMediaBuffer': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWidth", "dwHeight", "dwFourCC", "fBottomUp", "ppBuffer"]),
#
'MFCreateMediaBufferFromMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypeLongLong(signed=True, label="Int64"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMediaType", "llDuration", "dwMinLength", "dwMinAlignment", "ppBuffer"]),
#
'MFCreateCollection': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFCollection"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppIMFCollection"]),
#
'MFHeapAlloc': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=False, label="EAllocationType")], SimTypePointer(SimTypeBottom(label="Void"), offset=0), arg_names=["nSize", "dwFlags", "pszFile", "line", "eat"]),
#
'MFHeapFree': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeBottom(label="Void"), arg_names=["pv"]),
#
'MFllMulDiv': SimTypeFunction([SimTypeLongLong(signed=True, label="Int64"), SimTypeLongLong(signed=True, label="Int64"), SimTypeLongLong(signed=True, label="Int64"), SimTypeLongLong(signed=True, label="Int64")], SimTypeLongLong(signed=True, label="Int64"), arg_names=["a", "b", "c", "d"]),
#
'MFGetContentProtectionSystemCLSID': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidProtectionSystemID", "pclsid"]),
#
'MFCombineSamples': SimTypeFunction([SimTypeBottom(label="IMFSample"), SimTypeBottom(label="IMFSample"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSample", "pSampleToAdd", "dwMaxMergedDurationInMS", "pMerged"]),
#
'MFSplitSample': SimTypeFunction([SimTypeBottom(label="IMFSample"), SimTypePointer(SimTypeBottom(label="IMFSample"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSample", "pOutputSamples", "dwOutputSampleMaxCount", "pdwOutputSampleCount"]),
}
lib.set_prototypes(prototypes)
| 360.151613 | 8,410 | 0.736276 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 36,169 | 0.323959 |
1247b2646a2dc945b275a2796fe324446a123943 | 9,319 | py | Python | src/flows.py | act65/mri-reconstruction | 2dcf30e10c37a482f1aab2524c5966d03eb72085 | [
"MIT"
] | 8 | 2018-12-30T10:33:44.000Z | 2021-07-16T09:59:09.000Z | src/flows.py | gongjizhang/mri-reconstruction | 2dcf30e10c37a482f1aab2524c5966d03eb72085 | [
"MIT"
] | 1 | 2018-06-01T03:51:35.000Z | 2018-08-27T03:39:34.000Z | src/flows.py | gongjizhang/mri-reconstruction | 2dcf30e10c37a482f1aab2524c5966d03eb72085 | [
"MIT"
] | 5 | 2018-12-30T10:33:45.000Z | 2021-11-15T11:19:56.000Z | import os
import numpy as np
import urllib
from absl import flags
import tensorflow as tf
import tensorflow_probability as tfp
tfb = tfp.bijectors
tfd = tfp.distributions
flags.DEFINE_float(
"learning_rate", default=0.001, help="Initial learning rate.")
flags.DEFINE_integer(
"epochs", default=100, help="Number of training steps to run.")
flags.DEFINE_string(
"activation",
default="selu",
help="Activation function for all hidden layers.")
flags.DEFINE_integer(
"batch_size",
default=32,
help="Batch size.")
flags.DEFINE_string(
"data_dir",
default="/tmp/mnist",
help="Directory where data is stored (if using real data).")
flags.DEFINE_string(
"model_dir",
default="/tmp/critic/",
help="Directory to put the model's fit.")
flags.DEFINE_integer(
"viz_steps", default=500, help="Frequency at which to save visualizations.")
flags.DEFINE_bool(
"delete_existing",
default=False,
help="If true, deletes existing `model_dir` directory.")
FLAGS = flags.FLAGS
def non_square_det(x, reltol=1e-6):
"""
Idea taken from https://www.quora.com/How-do-we-calculate-the-determinant-of-a-non-square-matrix
# for n != m
A = tf.random_normal([n, m])
det(A) := sqrt(det(A.A^T))
Args:
x (tf.tensor): shape in [..., a, b]
Returns:
[..., ]
"""
# squared_mat = tf.matmul(x, x, transpose_b=True)
# return tf.sqrt(tf.linalg.det(squared_mat))
s = tf.svd(x, compute_uv=False)
# atol = tf.reduce_max(s) * reltol
# s = tf.diag(tf.where(tf.greater(atol, tf.abs(s)), tf.ones_like(s), s))
return tf.reduce_prod(s)
def pinv(A, reltol=1e-6):
"""
Args:
A (tf.tensor): the matrix to be inverted shape=[n, m]
Returns:
inverse (tf.tensor): the invserse of A, s.t. A_T.A = I. shape=[m,n]
"""
s, u, v = tf.svd(A)
atol = tf.reduce_max(s) * reltol
s_inv = tf.diag(tf.where(tf.greater(tf.abs(s), atol), 1.0/s, tf.zeros_like(s)))
# s_inv = tf.diag(1./s)
return tf.matmul(v, tf.matmul(s_inv, u, transpose_b=True))
class Dense(tfb.Bijector):
"""
Want a hierarchical flow.
Map some low dim distribution to a manifold in a higher dimensional space.
For more info on bijectors see tfb.Bijector, I simply cloned the general
structure.
"""
def __init__(self, n_inputs, n_outputs, validate_args=False, name=''):
"""
Args:
n_inputs (int): the number of features (last dim)
n_outputs (int): the target num of feautres
"""
super(self.__class__, self).__init__(
validate_args=validate_args,
is_constant_jacobian=True,
forward_min_event_ndims=1,
name=name)
self.n_inputs = n_inputs
self.n_outputs = n_outputs
with tf.variable_scope('dense'+name):
self.weights = tf.get_variable(name='weights',
shape=[n_inputs, n_outputs],
dtype=tf.float32,
# initializer=tf.initializers.orthogonal()
)
self.bias = tf.get_variable(name='bias',
shape=[n_outputs],
dtype=tf.float32,
initializer=tf.initializers.zeros()
)
@property
def _is_injective(self):
return True
def _forward_event_shape_tensor(self, shape):
return tf.shape([shape[0], self.n_inputs])
def _invserse_event_shape_tensor(self, shape):
return tf.shape([shape[0], self.n_outputs])
def _forward(self, x):
return tf.matmul(x, self.weights) + self.bias
def _inverse(self, y):
weights_inv = pinv(self.weights)
return tf.matmul(y - self.bias, weights_inv)
def _forward_log_det_jacobian(self, x):
return tf.log(non_square_det(self.weights))
def _inverse_log_det_jacobian(self, y):
return tf.log(non_square_det(pinv(self.weights)))
def make_mixture(latent_size, mixture_components):
"""Creates a mixture of Gaussians distribution.
Args:
latent_size: The dimensionality of the latent representation.
mixture_components: Number of elements of the mixture.
Returns:
random_prior: A `tf.distributions.Distribution` instance
representing the distribution over encodings in the absence of any
evidence.
"""
if mixture_components == 1:
# See the module docstring for why we don't learn the parameters here.
return tfd.MultivariateNormalDiag(
loc=tf.zeros([latent_size]),
scale_identity_multiplier=1.0)
loc = tf.get_variable(name="loc", shape=[mixture_components, latent_size])
raw_scale_diag = tf.get_variable(
name="raw_scale_diag", shape=[mixture_components, latent_size])
mixture_logits = tf.get_variable(
name="mixture_logits", shape=[mixture_components])
return tfd.MixtureSameFamily(
components_distribution=tfd.MultivariateNormalDiag(
loc=loc,
scale_diag=tf.nn.softplus(raw_scale_diag)),
mixture_distribution=tfd.Categorical(logits=mixture_logits),
name="prior")
def model_fn(features, labels, mode, params, config):
"""
Builds the model function for use in an estimator.
Arguments:
features: The input features for the estimator.
labels: The labels, unused here.
mode: Signifies whether it is train or test or predict.
params: Some hyperparameters as a dictionary.
config: The RunConfig, unused here.
Returns:
EstimatorSpec: A tf.estimator.EstimatorSpec instance.
"""
x = features['x']
global_step = tf.train.get_or_create_global_step()
with tf.contrib.summary.record_summaries_every_n_global_steps(100, global_step=global_step):
# construct a multilayer parameterised bijector
n_hidden = 8
width = 32
n_outputs = 784
fn = tfb.Chain([
Dense(width, n_outputs, name='3'),
# tfb.Softplus(),
# Dense(width, width, name='2'),
# tfb.Softplus(),
# Dense(width, width, name='1'),
Dense(n_hidden, width, name='0')
])
# use the bijector to map a simple distribution into our a density model
dist = make_mixture(n_hidden, 10)
# logits = tf.get_variable(
# name="logits", shape=[n_outputs])
# dist = tfd.RelaxedOneHotCategorical(logits=logits, temperature=1.0)
# density = tfd.RelaxedBernoulli(logits=logits, temperature=100.0)
density = tfd.TransformedDistribution(distribution=dist, bijector=fn)
# maximise the likelihood of the data
p = density.prob(x)
loss = tf.reduce_mean(1-p) # - 0.1*density.entropy()
# reg = -density.entropy()
# tf.summary.scalar('entropy', reg)
# generate some samples to visualise
# HACK to get samples to work I had to comment out line 411 of transformed_distribution.py
samples = density.sample(3)
tf.summary.image('samples', tf.reshape(samples, [3, 28, 28, 1]))
# mu = density.mean()
# tf.summary.image('mean', tf.reshape(mu, [1, 28, 28, 1]))
opt = tf.train.AdamOptimizer(0.0001)
gnvs = opt.compute_gradients(loss)
gnvs = [(tf.clip_by_norm(g, 10.0) if g is not None else tf.zeros_like(v), v) for g, v in gnvs]
train_step = opt.apply_gradients(gnvs, global_step=global_step)
return tf.estimator.EstimatorSpec(
mode=mode,
loss=loss,
train_op=train_step,
eval_metric_ops={"eval_loss": tf.metrics.mean(loss)}
)
def main(_):
params = FLAGS.flag_values_dict()
params["activation"] = getattr(tf.nn, params["activation"])
if FLAGS.delete_existing and tf.gfile.Exists(FLAGS.model_dir):
tf.logging.warn("Deleting old log directory at {}".format(FLAGS.model_dir))
tf.gfile.DeleteRecursively(FLAGS.model_dir)
tf.gfile.MakeDirs(FLAGS.model_dir)
mnist = tf.contrib.learn.datasets.load_dataset("mnist")
train_data = mnist.train.images # Returns np.array
train_labels = np.asarray(mnist.train.labels, dtype=np.int32)
eval_data = mnist.test.images # Returns np.array
eval_labels = np.asarray(mnist.test.labels, dtype=np.int32)
train_input_fn = tf.estimator.inputs.numpy_input_fn(
x={"x": train_data},
y=train_labels,
batch_size=FLAGS.batch_size,
num_epochs=1,
shuffle=True)
eval_input_fn = tf.estimator.inputs.numpy_input_fn(
x={"x": eval_data},
y=eval_labels,
batch_size=FLAGS.batch_size,
num_epochs=1,
shuffle=False)
estimator = tf.estimator.Estimator(
model_fn,
params=params,
config=tf.estimator.RunConfig(
model_dir=FLAGS.model_dir,
save_checkpoints_steps=FLAGS.viz_steps,
),
)
for _ in range(FLAGS.epochs):
estimator.train(train_input_fn, steps=FLAGS.viz_steps)
eval_results = estimator.evaluate(eval_input_fn)
print("Evaluation_results:\n\t%s\n" % eval_results)
if __name__ == "__main__":
tf.app.run()
| 32.81338 | 102 | 0.63054 | 2,064 | 0.221483 | 0 | 0 | 58 | 0.006224 | 0 | 0 | 3,325 | 0.356798 |
124a1d7c7ee9fa6fc3a3d429e6d76f675fb7017f | 227 | py | Python | src/utils/url_path.py | FP-DataSolutions/DeltaWarehouse | 2c57e6176d9461a287ee925f9006fdfef8da5653 | [
"Apache-2.0"
] | 6 | 2020-06-18T13:31:53.000Z | 2021-09-12T20:27:26.000Z | src/utils/url_path.py | FP-DataSolutions/DeltaWarehouse | 2c57e6176d9461a287ee925f9006fdfef8da5653 | [
"Apache-2.0"
] | null | null | null | src/utils/url_path.py | FP-DataSolutions/DeltaWarehouse | 2c57e6176d9461a287ee925f9006fdfef8da5653 | [
"Apache-2.0"
] | 1 | 2021-04-03T16:56:38.000Z | 2021-04-03T16:56:38.000Z | class UrlPath:
@staticmethod
def combine(*args):
result = ''
for path in args:
result += path if path.endswith('/') else '{}/'.format(path)
#result = result[:-1]
return result | 28.375 | 72 | 0.528634 | 227 | 1 | 0 | 0 | 208 | 0.9163 | 0 | 0 | 31 | 0.136564 |
124ab5ee13504d68a3d114c277fcb2d99863ce77 | 5,229 | py | Python | data/model.py | depowered/mndot-bid-abstracts | bb02c005e7cbd5645502e21c4baebd76170d4be1 | [
"MIT"
] | null | null | null | data/model.py | depowered/mndot-bid-abstracts | bb02c005e7cbd5645502e21c4baebd76170d4be1 | [
"MIT"
] | null | null | null | data/model.py | depowered/mndot-bid-abstracts | bb02c005e7cbd5645502e21c4baebd76170d4be1 | [
"MIT"
] | null | null | null | from sqlalchemy import Column, Integer, String, Float, ForeignKey
from sqlalchemy.engine.create import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
CONNECTION_STRING = "sqlite+pysqlite:///data/db.sqlite"
engine = create_engine(CONNECTION_STRING)
Session = sessionmaker(engine)
Base = declarative_base()
class Item2018(Base):
__tablename__ = "Item2018"
ItemID_2018 = Column(Integer, primary_key=True, unique=True)
SpecCode_2018 = Column(String)
UnitCode_2018 = Column(String)
ItemCode_2018 = Column(String)
Description_2018 = Column(String)
Unit_2018 = Column(String)
def __str__(self) -> str:
return f'Item(Description={self.Description}, Unit={self.Unit})'
def __repr__(self) -> str:
a = f'ItemID = {self.ItemID_2018}'
b = f'SpecCode = {self.SpecCode_2018}'
c = f'UnitCode = {self.UnitCode_2018}'
d = f'ItemCode = {self.ItemCode_2018}'
e = f'Description = {self.Description_2018}'
f = f'Unit = {self.Unit_2018}'
return ', '.join( [a, b, c, d, e, f] )
class Item2020(Base):
__tablename__ = "Item2020"
ItemID_2020 = Column(Integer, primary_key=True, unique=True)
SpecCode_2020 = Column(String)
UnitCode_2020 = Column(String)
ItemCode_2020 = Column(String)
Description_2020 = Column(String)
Unit_2020 = Column(String)
Item2018_ID = Column(Integer)
def __str__(self) -> str:
return f'Item(Description={self.Description}, Unit={self.Unit})'
def __repr__(self) -> str:
a = f'ItemID = {self.ItemID_2020}'
b = f'SpecCode = {self.SpecCode_2020}'
c = f'UnitCode = {self.UnitCode_2020}'
d = f'ItemCode = {self.ItemCode_2020}'
e = f'Description = {self.Description_2020}'
f = f'Unit = {self.Unit_2020}'
return ', '.join( [a, b, c, d, e, f] )
class Abstract(Base):
__tablename__ = "Abstract"
AbstractID = Column(Integer, primary_key=True, unique=True)
Year = Column(Integer)
Processed = Column(String)
def __str__(self) -> str:
return f'Abstract(AbstractID={self.AbstractID})'
def __repr__(self) -> str:
return f'AbstractID = {self.AbstractID}, Year = {self.Year}, Processed = {self.Processed}'
class Contract(Base):
__tablename__ = "Contract"
ContractID = Column(Integer, primary_key=True, unique=True)
Year = Column(Integer)
LetDate = Column(String)
SPNumber = Column(String)
District = Column(String)
County = Column(String)
BidderID_0 = Column(Integer, ForeignKey("Bidder.BidderID"))
BidderID_1 = Column(Integer, ForeignKey("Bidder.BidderID"))
BidderID_2 = Column(Integer, ForeignKey("Bidder.BidderID"))
def __str__(self) -> str:
return f'Contract(ContractID={self.ContractID})'
def __repr__(self) -> str:
a = f'ContractID = {self.ContractID}'
b = f'Year = {self.Year}'
c = f'LetDate = {self.LetDate}'
d = f'SPNumber = {self.SPNumber}'
e = f'District = {self.District}'
f = f'County = {self.County}'
g = f'BidderID_0 = {self.BidderID_0}'
h = f'BidderID_1 = {self.BidderID_1}'
i = f'BidderID_2 = {self.BidderID_2}'
return ', '.join( [a, b, c, d, e, f, g, h, i] )
class Bid(Base):
__tablename__ = "Bid"
BidID = Column(Integer, primary_key=True, unique=True)
ContractID = Column(Integer, ForeignKey("Contract.ContractID"))
ItemID = Column(Integer)
SpecYear = Column(Integer)
Quantity = Column(Float)
Engineer_UnitPrice = Column(Float)
Engineer_TotalPrice = Column(Float)
BidderID_0_UnitPrice = Column(Float)
BidderID_0_TotalPrice = Column(Float)
BidderID_1_UnitPrice = Column(Float)
BidderID_1_TotalPrice = Column(Float)
BidderID_2_UnitPrice = Column(Float)
BidderID_2_TotalPrice = Column(Float)
def __str__(self) -> str:
return f'Bid(BidID={self.BidID})'
def __repr__(self) -> str:
a = f'BidID = {self.BidID}'
b = f'ContractID = {self.ContractID}'
c = f'ItemID = {self.ContractID}'
d = f'Quantity = {self.Quantity}'
e = f'Engineer_UnitPrice = {self.Engineer_UnitPrice}'
f = f'Engineer_TotalPrice = {self.Engineer_TotalPrice}'
g = f'BidderID_0_UnitPrice = {self.BidderID_0_UnitPrice}'
h = f'BidderID_0_TotalPrice = {self.BidderID_0_TotalPrice}'
i = f'BidderID_1_UnitPrice = {self.BidderID_1_UnitPrice}'
j = f'BidderID_1_TotalPrice = {self.BidderID_1_TotalPrice}'
k = f'BidderID_2_UnitPrice = {self.BidderID_2_UnitPrice}'
l = f'BidderID_2_TotalPrice = {self.BidderID_2_TotalPrice}'
return ', '.join( [a, b, c, d, e, f, g, h, i, j, k, l] )
class Bidder(Base):
__tablename__ = "Bidder"
BidderID = Column(Integer, primary_key=True, unique=True)
Name = Column(String)
def __str__(self) -> str:
return f'Bidder(Name={self.Name})'
def __repr__(self) -> str:
return f'BidderID = {self.BidderID}, Name = {self.Name}'
def main():
# Creates blank database file
Base.metadata.create_all(engine)
if __name__ == '__main__':
main() | 29.542373 | 98 | 0.646969 | 4,712 | 0.901128 | 0 | 0 | 0 | 0 | 0 | 0 | 1,793 | 0.342895 |
124b91dae4ca83e7c2890e0dcfbf5f3b352095cb | 3,436 | py | Python | guess_number_bo/guess_number.py | scumabo/Number-Guessing-Game | 690bb1e6c1cb7705c4be6ef21306aa63ea015c33 | [
"MIT"
] | null | null | null | guess_number_bo/guess_number.py | scumabo/Number-Guessing-Game | 690bb1e6c1cb7705c4be6ef21306aa63ea015c33 | [
"MIT"
] | null | null | null | guess_number_bo/guess_number.py | scumabo/Number-Guessing-Game | 690bb1e6c1cb7705c4be6ef21306aa63ea015c33 | [
"MIT"
] | null | null | null | import random
from enum import Enum, auto
class Result(Enum):
BINGO = auto()
HIGH = auto()
LOW = auto()
class GuessNumber:
"""The class randomly chooses an integer
and then tells a human player if a guess is higher or lower than the number
"""
def __init__(self):
"""
Attributes:
_low: low threshold for the random number
_high: high threshold for the random number
_player_cnt: number of players
_target: a random number between (_low, _high)
"""
self._low = 1
self._high = 100
self._player_cnt = 2
self._target = random.randint(self._low, self._high)
def guess(self, number: int) -> Result:
""" Perform one guess
Args:
number: the number guessed
Returns:
Result.BINGO if number == _target
Result.HIGH if number > _target
Result.LOW if number < _target
"""
if number == self._target:
return Result.BINGO
if number > self._target:
self._high = number if number < self._high else self._high
return Result.HIGH
else:
self._low = number if number > self._low else self._low
return Result.LOW
def play(self) -> None:
""" Play a game """
print("Welcome to guessing number game!")
customize = input(
'''The default game is for 2 players. \
Do you want to customize your game? [N/y]''')
if customize != '' and customize.upper() == 'Y':
while True:
try:
player_cnt = input("Enter number of players: ")
self._player_cnt = int(player_cnt)
break
except ValueError:
print("No valid integer! Please try again ...")
while True:
try:
low = input("Enter the smallest possible value: ")
self._low = int(low)
break
except ValueError:
print("No valid integer! Please try again ...")
while True:
try:
high = input("Enter the largest possible value: ")
self._high = int(high)
break
except ValueError:
print("No valid integer! Please try again ...")
self._target = random.randint(self._low, self._high)
playerID = 0
while True:
playerID = playerID % self._player_cnt
print(f'Player {playerID + 1}\'s turn:')
while True:
try:
number = int(input(f'Please guess a number between {self._low} and {self._high}: '))
break
except ValueError:
print("No valid integer! Please try again ...")
result = self.guess(number)
if result == Result.BINGO:
print(f'Player {playerID + 1} wins! The number is {self._target}')
return
elif result == Result.HIGH:
print(f'Too large!')
elif result == Result.LOW:
print(f'Too small!')
playerID += 1
def __repr__(self) -> str:
""" Magic method (override for print) """
pass
| 30.678571 | 104 | 0.501746 | 3,388 | 0.98603 | 0 | 0 | 0 | 0 | 0 | 0 | 1,257 | 0.365832 |
124cdc7703ebf2ea82fe5e53d675710d5f9da0a2 | 706 | py | Python | attic/concurrency/timer2.py | matteoshen/example-code | b54c22a1b8cee3fc53d1473cb38ca46eb179b4c3 | [
"MIT"
] | 5,651 | 2015-01-06T21:58:46.000Z | 2022-03-31T13:39:07.000Z | attic/concurrency/timer2.py | matteoshen/example-code | b54c22a1b8cee3fc53d1473cb38ca46eb179b4c3 | [
"MIT"
] | 42 | 2016-12-11T19:17:11.000Z | 2021-11-23T19:41:16.000Z | attic/concurrency/timer2.py | matteoshen/example-code | b54c22a1b8cee3fc53d1473cb38ca46eb179b4c3 | [
"MIT"
] | 2,394 | 2015-01-18T10:57:38.000Z | 2022-03-31T11:41:12.000Z | import asyncio
import sys
import contextlib
@asyncio.coroutine
def show_remaining(dots_task):
remaining = 5
while remaining:
print('Remaining: ', remaining)
sys.stdout.flush()
yield from asyncio.sleep(1)
remaining -= 1
dots_task.cancel()
print()
@asyncio.coroutine
def dots():
while True:
print('.', sep='', end='')
sys.stdout.flush()
yield from asyncio.sleep(.1)
def main():
with contextlib.closing(asyncio.get_event_loop()) as loop:
dots_task = asyncio.Task(dots())
coros = [show_remaining(dots_task), dots_task]
loop.run_until_complete(asyncio.wait(coros))
if __name__ == '__main__':
main()
| 22.774194 | 62 | 0.634561 | 0 | 0 | 356 | 0.504249 | 394 | 0.558074 | 0 | 0 | 30 | 0.042493 |
124d0147a05244a749391c0acf979893ea8c65d5 | 6,372 | py | Python | nexpose_rest/nexpose_policy.py | Patralos/nexpose-rest | c03431a408afd1528b0ca5a00859467574953ea0 | [
"MIT"
] | null | null | null | nexpose_rest/nexpose_policy.py | Patralos/nexpose-rest | c03431a408afd1528b0ca5a00859467574953ea0 | [
"MIT"
] | null | null | null | nexpose_rest/nexpose_policy.py | Patralos/nexpose-rest | c03431a408afd1528b0ca5a00859467574953ea0 | [
"MIT"
] | null | null | null | from nexpose_rest.nexpose import _GET
def getPolicies(config, filter=None, scannedOnly=None):
getParameters=[]
if filter is not None:
getParameters.append('filter=' + filter)
if scannedOnly is not None:
getParameters.append('scannedOnly=' + scannedOnly)
code, data = _GET('/api/3/policies', config, getParameters=getParameters)
return data
def getPolicyRuleControls(config, policyId, ruleId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '/controls', config, getParameters=getParameters)
return data
def getAssetPolicyRulesSummary(config, assetId, policyId):
getParameters=[]
code, data = _GET('/api/3/assets/' + str(assetId) + '/policies/' + str(policyId) + '/rules', config, getParameters=getParameters)
return data
def getPolicyGroup(config, policyId, groupId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/groups/' + str(groupId) + '', config, getParameters=getParameters)
return data
def getPolicyRule(config, policyId, ruleId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '', config, getParameters=getParameters)
return data
def getPolicyRuleAssetResultProof(config, policyId, ruleId, assetId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '/assets/' + str(assetId) + '/proof', config, getParameters=getParameters)
return data
def getDisabledPolicyRules(config, policyId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/disabled', config, getParameters=getParameters)
return data
def getPolicyChildren(config, id):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(id) + '/children', config, getParameters=getParameters)
return data
def getPolicyGroups(config, policyId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/groups', config, getParameters=getParameters)
return data
def getPolicyAssetResults(config, policyId, applicableOnly=None):
getParameters=[]
if applicableOnly is not None:
getParameters.append('applicableOnly=' + applicableOnly)
code, data = _GET('/api/3/policies/' + str(policyId) + '/assets', config, getParameters=getParameters)
return data
def getAssetPolicyChildren(config, assetId, policyId):
getParameters=[]
code, data = _GET('/api/3/assets/' + str(assetId) + '/policies/' + str(policyId) + '/children', config, getParameters=getParameters)
return data
def getPolicyRuleRationale(config, policyId, ruleId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '/rationale', config, getParameters=getParameters)
return data
def getPolicyGroupRulesWithAssetAssessment(config, assetId, policyId, groupId):
getParameters=[]
code, data = _GET('/api/3/assets/' + str(assetId) + '/policies/' + str(policyId) + '/groups/' + str(groupId) + '/rules', config, getParameters=getParameters)
return data
def getPolicyRuleAssetResults(config, policyId, ruleId, applicableOnly=None):
getParameters=[]
if applicableOnly is not None:
getParameters.append('applicableOnly=' + applicableOnly)
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '/assets', config, getParameters=getParameters)
return data
def getAssetPolicyGroupChildren(config, assetId, policyId, groupId):
getParameters=[]
code, data = _GET('/api/3/assets/' + str(assetId) + '/policies/' + str(policyId) + '/groups/' + str(groupId) + '/children', config, getParameters=getParameters)
return data
def getPoliciesForAsset(config, assetId, applicableOnly=None):
getParameters=[]
if applicableOnly is not None:
getParameters.append('applicableOnly=' + applicableOnly)
code, data = _GET('/api/3/assets/' + str(assetId) + '/policies', config, getParameters=getParameters)
return data
def getPolicyRuleRemediation(config, policyId, ruleId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '/remediation', config, getParameters=getParameters)
return data
def getPolicyRules(config, policyId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules', config, getParameters=getParameters)
return data
def getPolicySummary(config):
getParameters=[]
code, data = _GET('/api/3/policy/summary', config, getParameters=getParameters)
return data
def getPolicyGroupAssetResult(config, policyId, groupId, assetId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/groups/' + str(groupId) + '/assets/' + str(assetId) + '', config, getParameters=getParameters)
return data
def getPolicyAssetResult(config, policyId, assetId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/assets/' + str(assetId) + '', config, getParameters=getParameters)
return data
def getPolicyGroupAssetResults(config, policyId, groupId, applicableOnly=None):
getParameters=[]
if applicableOnly is not None:
getParameters.append('applicableOnly=' + applicableOnly)
code, data = _GET('/api/3/policies/' + str(policyId) + '/groups/' + str(groupId) + '/assets', config, getParameters=getParameters)
return data
def getDescendantPolicyRules(config, policyId, groupId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/groups/' + str(groupId) + '/rules', config, getParameters=getParameters)
return data
def getPolicyRuleAssetResult(config, policyId, ruleId, assetId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '/assets/' + str(assetId) + '', config, getParameters=getParameters)
return data
def getPolicyGroupChildren(config, policyId, groupId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/groups/' + str(groupId) + '/children', config, getParameters=getParameters)
return data
def getPolicy(config, policyId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '', config, getParameters=getParameters)
return data
| 37.482353 | 164 | 0.694131 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 971 | 0.152385 |
124f69015efa6812e6854a40b97d1f640b2aa516 | 3,785 | py | Python | cybld/cybld_command_stats.py | dcvetko/cybld | 678979543bb3b1f6be696ae9ae2c97ba4d17bc8a | [
"MIT"
] | null | null | null | cybld/cybld_command_stats.py | dcvetko/cybld | 678979543bb3b1f6be696ae9ae2c97ba4d17bc8a | [
"MIT"
] | null | null | null | cybld/cybld_command_stats.py | dcvetko/cybld | 678979543bb3b1f6be696ae9ae2c97ba4d17bc8a | [
"MIT"
] | null | null | null | #!/usr/bin/python
# --------------------------------------------------------------------------
#
# MIT License
#
# --------------------------------------------------------------------------
from cybld import cybld_helpers
# --------------------------------------------------------------------------
class CyBldCommandStatsList:
"""
A simple list of CyBldCommandStats. Only once instance of this should exist
"""
def __init__(self):
self._command_stats = []
def update_command_stats(self, command: str, success_or_fail: bool, run_time: int):
"""
Update the statistics of the given command.
:param command: The command (as string)
:param success_or_fail: Whether the command was successful or not (ret 0
means success)
:param run_time: How long the command took (in seconds)
"""
target_command = None
for stored_command in self._command_stats:
if stored_command.command == command:
target_command = stored_command
if target_command is None:
target_command = CyBldCommandStats(command)
self._command_stats.append(target_command)
target_command.update_stats(success_or_fail, run_time)
def get_command_stats(self, command: str) -> str:
"""
Getter for a given specific command.
:param command: The command as string
"""
for stored_command in self._command_stats:
if stored_command.command == command:
return stored_command.get_stats_str()
return 'No previous runs recorded'
# --------------------------------------------------------------------------
class CyBldCommandStats:
"""
A simple data object, which keeps track of the 5 most recent runs of a
command.
:param command: The command as string
"""
def __init__(self, command: str):
self._command = command
self._exit_codes = []
self._run_times = []
@property
def command(self) -> str:
return self._command
def update_stats(self, success_or_fail: bool, run_time: int):
"""
Update the statistics by appending the exit code and the run time.
In case we already have stored > 5 runs, the first run is removed.
:param success_or_fail: Whether the command was successful or not
:param run_time: How long the command took (in seconds)
"""
# Store max of 5 runs
if len(self._exit_codes) >= 5:
self._exit_codes.pop(0)
if len(self._run_times) >= 5:
self._run_times.pop(0)
self._exit_codes.append(success_or_fail)
self._run_times.append(int(run_time))
def get_stats_str(self):
""" Returns a printable representation of the stats """
ret = 'previous runs: '
ret_exit_codes = ""
for exit_code in self._exit_codes:
if exit_code:
ret_exit_codes = ret_exit_codes + cybld_helpers.ICON_SUCCESS
else:
ret_exit_codes = ret_exit_codes + cybld_helpers.ICON_FAIL
ret_exit_codes += " "
while len(ret_exit_codes) < 10:
ret_exit_codes = ret_exit_codes + cybld_helpers.ICON_UNKNOWN + " "
ret = "{0}{1} ".format(ret, ret_exit_codes)
ret = "{0}(avg. {1} seconds)".format(ret, str(self._get_avg_runtime()))
return ret
def _get_avg_runtime(self):
""" Calculate a simple arithmetic average of the run time """
run_time_total = 0
for run_time in self._run_times:
run_time_total = run_time_total + run_time
return int(run_time_total / len(self._run_times))
| 32.076271 | 87 | 0.566182 | 3,399 | 0.898018 | 0 | 0 | 68 | 0.017966 | 0 | 0 | 1,531 | 0.404491 |
124f826646c61b86d4bd5ee114177b7081ca0f74 | 130 | py | Python | core/src/zeit/content/article/edit/browser/interfaces.py | rickdg/vivi | 16134ac954bf8425646d4ad47bdd1f372e089355 | [
"BSD-3-Clause"
] | 5 | 2019-05-16T09:51:29.000Z | 2021-05-31T09:30:03.000Z | core/src/zeit/content/article/edit/browser/interfaces.py | rickdg/vivi | 16134ac954bf8425646d4ad47bdd1f372e089355 | [
"BSD-3-Clause"
] | 107 | 2019-05-24T12:19:02.000Z | 2022-03-23T15:05:56.000Z | src/zeit/content/article/edit/browser/interfaces.py | ZeitOnline/zeit.content.article | 4375baec7e7ff1f013402f4b920cc37305e44379 | [
"BSD-3-Clause"
] | 3 | 2020-08-14T11:01:17.000Z | 2022-01-08T17:32:19.000Z | import zope.interface
class IFoldable(zope.interface.Interface):
"""Marker interface for a block which can be callapsed."""
| 21.666667 | 62 | 0.753846 | 105 | 0.807692 | 0 | 0 | 0 | 0 | 0 | 0 | 58 | 0.446154 |
124f9adbc3629f60192bbc345789c5fe360c9cdf | 190 | py | Python | {{cookiecutter.repo_name}}/{{cookiecutter.project_name}}/utils/context_processor.py | abahnihi/kn-django-cookiecutter | bf85aa47b6aae450d25551fdf68c943f41b5c6bd | [
"MIT"
] | 2 | 2020-07-26T07:33:08.000Z | 2020-08-14T09:40:21.000Z | {{cookiecutter.repo_name}}/{{cookiecutter.project_name}}/utils/context_processor.py | abahnihi/kn-django-cookiecutter | bf85aa47b6aae450d25551fdf68c943f41b5c6bd | [
"MIT"
] | 7 | 2020-02-12T01:19:42.000Z | 2022-03-11T23:26:05.000Z | {{cookiecutter.repo_name}}/{{cookiecutter.project_name}}/utils/context_processor.py | abahnihi/kn-django-cookiecutter | bf85aa47b6aae450d25551fdf68c943f41b5c6bd | [
"MIT"
] | 9 | 2020-09-22T10:42:23.000Z | 2021-07-28T05:52:26.000Z | from django.conf import settings
def google_analytics(request):
return {'GOOGLE_ANALYTICS': settings.GOOGLE_ANALYTICS}
def debug_state(request):
return {'DEBUG': settings.DEBUG}
| 19 | 58 | 0.763158 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 25 | 0.131579 |
12545f6564e3ca3f83db07ea8b386b6c9ab46b3f | 4,256 | py | Python | src/search/src/search-service/app.py | Young-ook/retail-demo-store | c54108d5d56566f766514cab77ee68e91b202360 | [
"MIT-0"
] | 1 | 2021-04-26T14:29:36.000Z | 2021-04-26T14:29:36.000Z | src/search/src/search-service/app.py | Young-ook/retail-demo-store | c54108d5d56566f766514cab77ee68e91b202360 | [
"MIT-0"
] | null | null | null | src/search/src/search-service/app.py | Young-ook/retail-demo-store | c54108d5d56566f766514cab77ee68e91b202360 | [
"MIT-0"
] | 2 | 2021-06-22T12:45:05.000Z | 2021-11-23T22:40:14.000Z | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.flask.middleware import XRayMiddleware
from aws_xray_sdk.core import patch_all
patch_all()
from flask import Flask
from flask import request
from flask_cors import CORS
from datetime import datetime
from elasticsearch import Elasticsearch
import json
import uuid
import os, sys
import pprint
import boto3
import time
import uuid
es_search_domain_scheme = os.environ.get('ES_SEARCH_DOMAIN_SCHEME', 'https')
es_search_domain_host = os.environ['ES_SEARCH_DOMAIN_HOST']
es_search_domain_port = os.environ.get('ES_SEARCH_DOMAIN_PORT', 443)
es_products_index_name = 'products'
es = Elasticsearch(
[es_search_domain_host],
scheme=es_search_domain_scheme,
port=es_search_domain_port,
)
# -- Logging
class LoggingMiddleware(object):
def __init__(self, app):
self._app = app
def __call__(self, environ, resp):
errorlog = environ['wsgi.errors']
pprint.pprint(('REQUEST', environ), stream=errorlog)
def log_response(status, headers, *args):
pprint.pprint(('RESPONSE', status, headers), stream=errorlog)
return resp(status, headers, *args)
return self._app(environ, log_response)
# -- End Logging
# -- Handlers
app = Flask(__name__)
corps = CORS(app)
xray_recorder.configure(service='Search Service')
XRayMiddleware(app, xray_recorder)
@app.route('/')
def index():
return 'Search Service'
@app.route('/search/products', methods=['GET', 'POST'])
def searchProducts():
if request.method == 'GET':
try:
searchTerm = request.args.get('searchTerm').lower()
app.logger.info(searchTerm)
results = es.search(index = es_products_index_name, body={
"query": {
"dis_max" : {
"queries" : [
{ "wildcard" : { "name" : { "value": '{}*'.format(searchTerm), "boost": 1.2 }}},
{ "term" : { "category" : searchTerm }},
{ "term" : { "style" : searchTerm }},
{ "wildcard" : { "description" : { "value": '{}*'.format(searchTerm), "boost": 0.6 }}}
],
"tie_breaker" : 0.7
}
}
})
app.logger.info(json.dumps(results))
found_items = []
for item in results['hits']['hits']:
found_items.append({
'itemId': item['_id']
})
return json.dumps(found_items)
except Exception as e:
app.logger.error(e)
return str(e)
if request.method == 'POST':
app.logger.info("Request Received, Processing")
@app.route('/similar/products', methods=['GET'])
def similarProducts():
try:
productId = request.args.get('productId')
app.logger.info(productId)
results = es.search(index = es_products_index_name,
body={
"query": {
"more_like_this": {
"fields": ["name", "category", "style", "description"],
"like": [{
"_index": es_products_index_name,
"_id": productId
}],
"min_term_freq" : 1,
"max_query_terms" : 10
}
}
})
app.logger.info(json.dumps(results))
found_items = []
for item in results['hits']['hits']:
found_items.append({
'itemId': item['_id']
})
return json.dumps(found_items)
except Exception as e:
app.logger.error(e)
return str(e)
if __name__ == '__main__':
app.wsgi_app = LoggingMiddleware(app.wsgi_app)
app.run(debug=True,host='0.0.0.0', port=80) | 29.351724 | 114 | 0.528195 | 450 | 0.105733 | 0 | 0 | 2,631 | 0.618186 | 0 | 0 | 744 | 0.174812 |
1254a9857e6bd6e919dc89f8ba7391c2e619fe3c | 1,605 | py | Python | utils/iou.py | taimur1871/cutter_detect_app | dd4b2251645b1a1c588739fc60e8c943f40b70df | [
"MIT"
] | null | null | null | utils/iou.py | taimur1871/cutter_detect_app | dd4b2251645b1a1c588739fc60e8c943f40b70df | [
"MIT"
] | null | null | null | utils/iou.py | taimur1871/cutter_detect_app | dd4b2251645b1a1c588739fc60e8c943f40b70df | [
"MIT"
] | 1 | 2022-01-19T03:17:26.000Z | 2022-01-19T03:17:26.000Z | # check for duplicate detections
import numpy as np
def iou(box1, box2):
# determine the (x, y)-coordinates of the intersection rectangle
xA = max(box1[0], box2[0])
yA = max(box1[1], box2[1])
xB = min(box1[2], box2[2])
yB = min(box1[3], box2[3])
# compute the area of intersection rectangle
intersction = abs(max((xB - xA, 0)) * max((yB - yA), 0))
if intersction == 0:
return 0
# compute the area of both the prediction and ground-truth
# rectangles
box1_Area = abs((box1[2] - box1[0]) * (box1[3] - box1[1]))
box2_Area = abs((box2[2] - box2[0]) * (box2[3] - box2[1]))
# compute the intersection over union
# union is sum of areas of two boxes - intersection
iou = intersction / float(box1_Area + box2_Area - intersction)
# return the intersection over union value
return iou
def iou_check(cutter_list):
# record indices of duplicates
dupl_temp = []
for i in range(len(cutter_list)):
for j in range(len(cutter_list)):
if i == j:
continue
elif iou(cutter_list[i], cutter_list[j]) > 0.5:
dupl_temp.append(i)
# get only first half of indices
if len(dupl_temp) != 0:
# remove the indices from original list
to_remove = []
for i in dupl_temp[len(dupl_temp)//2:]:
to_remove.append(cutter_list[i])
for j in to_remove:
try:
cutter_list.remove(j)
except ValueError:
continue
return cutter_list
else:
return cutter_list
| 30.283019 | 68 | 0.58567 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 441 | 0.274766 |
1254d16c4e176a58c1a20487d3f11f36c8056c00 | 256 | py | Python | tests/i18n/patterns/urls/path_unused.py | webjunkie/django | 5dbca13f3baa2e1bafd77e84a80ad6d8a074712e | [
"BSD-3-Clause"
] | 790 | 2015-01-03T02:13:39.000Z | 2020-05-10T19:53:57.000Z | AppServer/lib/django-1.5/tests/regressiontests/i18n/patterns/urls/path_unused.py | nlake44/appscale | 6944af660ca4cb772c9b6c2332ab28e5ef4d849f | [
"Apache-2.0"
] | 1,361 | 2015-01-08T23:09:40.000Z | 2020-04-14T00:03:04.000Z | AppServer/lib/django-1.5/tests/regressiontests/i18n/patterns/urls/path_unused.py | nlake44/appscale | 6944af660ca4cb772c9b6c2332ab28e5ef4d849f | [
"Apache-2.0"
] | 155 | 2015-01-08T22:59:31.000Z | 2020-04-08T08:01:53.000Z | from django.conf.urls import url
from django.conf.urls import patterns
from django.views.generic import TemplateView
view = TemplateView.as_view(template_name='dummy.html')
urlpatterns = patterns('',
url(r'^nl/foo/', view, name='not-translated'),
)
| 23.272727 | 55 | 0.753906 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 41 | 0.160156 |
12569e9c0d171a9d7527cfe600f2b89b30a16992 | 222 | py | Python | backend/profiles/serializers.py | stevethompsonstar/django-react-blog | 88af926454901c826acc9e2996addd0d53b0626a | [
"MIT"
] | 592 | 2017-03-07T04:29:08.000Z | 2020-09-21T00:36:58.000Z | backend/profiles/serializers.py | stevethompsonstar/django-react-blog | 88af926454901c826acc9e2996addd0d53b0626a | [
"MIT"
] | 8 | 2017-03-08T01:22:36.000Z | 2020-08-20T15:45:42.000Z | backend/profiles/serializers.py | stevethompsonstar/django-react-blog | 88af926454901c826acc9e2996addd0d53b0626a | [
"MIT"
] | 102 | 2017-03-07T05:42:47.000Z | 2020-08-28T20:02:20.000Z | from rest_framework import serializers
from .models import Subscriber
class SubscriberSerializer(serializers.ModelSerializer):
class Meta:
model = Subscriber
fields = (
'email',
)
| 20.181818 | 56 | 0.662162 | 149 | 0.671171 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 0.031532 |
1256d712ac9ca51e3deef82acc5b59bca5098eea | 4,592 | py | Python | src/opendr/perception/activity_recognition/datasets/utils/transforms.py | makistsantekidis/opendr | 07dee3b59d3487b9c5a93d6946317178a02c9890 | [
"Apache-2.0"
] | 3 | 2021-06-24T01:54:25.000Z | 2021-12-12T16:21:24.000Z | src/opendr/perception/activity_recognition/datasets/utils/transforms.py | makistsantekidis/opendr | 07dee3b59d3487b9c5a93d6946317178a02c9890 | [
"Apache-2.0"
] | 79 | 2021-06-23T10:40:10.000Z | 2021-12-16T07:59:42.000Z | src/opendr/perception/activity_recognition/datasets/utils/transforms.py | makistsantekidis/opendr | 07dee3b59d3487b9c5a93d6946317178a02c9890 | [
"Apache-2.0"
] | 5 | 2021-07-04T07:38:50.000Z | 2021-12-12T16:18:47.000Z | # Copyright 2020-2021 OpenDR Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
import math
from typing import Callable, Tuple
from torchvision.transforms import Compose
from torchvision.transforms._transforms_video import (
CenterCropVideo,
NormalizeVideo,
RandomCropVideo,
RandomHorizontalFlipVideo,
ToTensorVideo,
)
Transform = Callable[[torch.Tensor], torch.Tensor]
def standard_video_transforms(
spatial_pixels: int = 224,
horizontal_flip=True,
mean=(0.45, 0.45, 0.45),
std=(0.225, 0.225, 0.225)
) -> Tuple[Transform, Transform]:
"""Generate standard transforms for video recognition
Args:
spatial_pixels (int, optional ): Spatial size (i.e. height or width) to resize to. Defaults to 224.
horizontal_flip (bool, optional): Whether horizontal flipping (p = 0.5) is used. Defaults to True.
mean (tuple, optional): Mean RGB values used in standardization. Defaults to (0.45, 0.45, 0.45).
std (tuple, optional): Std RGB values used in standardization. Defaults to (0.225, 0.225, 0.225).
Returns:
Tuple[Transform, Transform]: [description]
"""
train_scale = (1 / 0.7 * 0.8, 1 / 0.7)
scaled_pix_min = (spatial_pixels * train_scale[0]) // 2 * 2
scaled_pix_max = (spatial_pixels * train_scale[1]) // 2 * 2
train_transforms = Compose(
[
t for t in [
ToTensorVideo(),
RandomShortSideScaleJitterVideo(
min_size=scaled_pix_min, max_size=scaled_pix_max
),
RandomCropVideo(spatial_pixels),
RandomHorizontalFlipVideo() if horizontal_flip else None,
NormalizeVideo(mean=mean, std=std),
] if t
]
)
eval_transforms = Compose(
[
ToTensorVideo(),
RandomShortSideScaleJitterVideo(min_size=spatial_pixels, max_size=spatial_pixels),
CenterCropVideo(spatial_pixels),
NormalizeVideo(mean=mean, std=std),
]
)
return train_transforms, eval_transforms
class RandomShortSideScaleJitterVideo:
def __init__(self, min_size: int, max_size: int, inverse_uniform_sampling=False):
"""
Args:
min_size (int): the minimal size to scale the frames.
max_size (int): the maximal size to scale the frames.
inverse_uniform_sampling (bool): if True, sample uniformly in
[1 / max_scale, 1 / min_scale] and take a reciprocal to get the
scale. If False, take a uniform sample from [min_scale, max_scale].
"""
self.min_size = min_size
self.max_size = max_size
self.inverse_uniform_sampling = inverse_uniform_sampling
def __call__(self, images: torch.Tensor) -> torch.Tensor:
"""
Perform a spatial short scale jittering on the given images and
corresponding boxes.
Args:
images (tensor): images to perform scale jitter. Dimension is
`num frames` x `channel` x `height` x `width`.
Returns:
(tensor): the scaled images with dimension of
`num frames` x `channel` x `new height` x `new width`.
"""
if self.inverse_uniform_sampling:
size = int(
round(1.0 / np.random.uniform(1.0 / self.max_size, 1.0 / self.min_size))
)
else:
size = int(round(np.random.uniform(self.min_size, self.max_size)))
height = images.shape[2]
width = images.shape[3]
if (width <= height and width == size) or (height <= width and height == size):
return images
new_width = size
new_height = size
if width < height:
new_height = int(math.floor((float(height) / width) * size))
else:
new_width = int(math.floor((float(width) / height) * size))
return torch.nn.functional.interpolate(
images, size=(new_height, new_width), mode="bilinear", align_corners=False,
)
| 37.950413 | 107 | 0.635017 | 1,974 | 0.429878 | 0 | 0 | 0 | 0 | 0 | 0 | 1,955 | 0.42574 |
1257bf72104e89e7cfc8426086636e97ad97788b | 799 | py | Python | coinbase/models/util.py | EU-institution/coinbase_python | 1e0d5d162cb40c1094775ceb5c267a5bdedf0949 | [
"Unlicense",
"MIT"
] | 53 | 2015-01-05T08:42:17.000Z | 2022-03-01T20:52:41.000Z | coinbase/models/util.py | EU-institution/coinbase_python | 1e0d5d162cb40c1094775ceb5c267a5bdedf0949 | [
"Unlicense",
"MIT"
] | 10 | 2015-01-08T04:09:25.000Z | 2021-10-08T21:43:17.000Z | coinbase/models/util.py | mhluongo/coinbase_python | 2e29d4fa1c501495b41005bbcc770cb29fba6ad1 | [
"MIT",
"Unlicense"
] | 34 | 2016-09-18T23:18:44.000Z | 2022-02-19T17:31:05.000Z | import collections
try:
stringtype = basestring # python 2
except:
stringtype = str # python 3
def coerce_to_list(x):
if isinstance(x, stringtype):
return x.replace(',', ' ').split()
return x or []
def namedtuple(name, args=None, optional=None):
args = coerce_to_list(args)
optional = coerce_to_list(optional)
x = collections.namedtuple(name, args + optional)
if hasattr(x.__new__, 'func_defaults'): # python 2
x.__new__.func_defaults = tuple([None] * len(optional))
elif hasattr(x.__new__, '__defaults__'): # python 3
x.__new__.__defaults__ = tuple([None] * len(optional))
else:
raise Exception('???')
return x
def optional(fn):
def opt(x):
if x is not None:
return fn(x)
return opt
| 24.212121 | 63 | 0.625782 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.100125 |
1258185b72665406006c911438c249e31103916b | 632 | py | Python | main.py | Spain-AI/dark_helper | c2a5d774b455b2a374d6ca5e2715f7a560f5fe5b | [
"Apache-2.0"
] | null | null | null | main.py | Spain-AI/dark_helper | c2a5d774b455b2a374d6ca5e2715f7a560f5fe5b | [
"Apache-2.0"
] | 8 | 2020-11-13T18:59:55.000Z | 2022-03-12T00:39:43.000Z | main.py | Spain-AI/dark_helper | c2a5d774b455b2a374d6ca5e2715f7a560f5fe5b | [
"Apache-2.0"
] | 1 | 2020-07-10T19:16:37.000Z | 2020-07-10T19:16:37.000Z | from capture_monitor import CaptureMonitor
from face_lib import FaceSystem
from visualizer import Visualizer #MainWindow
#from PyQt5 import QtCore, QtGui, QtWidgets
import os
monitor = CaptureMonitor(bb=(0, 0, 600, 480))
face_system = FaceSystem(os.path.abspath("./bio/bio.json"))
def thread_process():
frame = monitor.get_frame()
faces = []
for face in face_system(frame):
faces.append(face)
return frame, faces
if __name__ == '__main__':
# app = QtWidgets.QApplication([])
# window = MainWindow()
# window.process(thread_process)
# window.show()
# app.exec_()
app = Visualizer(monitor, thread_process)
app.run() | 23.407407 | 59 | 0.738924 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 197 | 0.311709 |
12582c39f2a4992d63610ad2f63f31ac78eb799c | 4,454 | py | Python | tests/mixins.py | armstrong/armstrong.apps.embeds | 6042f4ab39e752c4e78826e44f7c2aa82bc04e6a | [
"Apache-2.0"
] | 1 | 2016-08-02T09:33:35.000Z | 2016-08-02T09:33:35.000Z | tests/mixins.py | armstrong/armstrong.apps.embeds | 6042f4ab39e752c4e78826e44f7c2aa82bc04e6a | [
"Apache-2.0"
] | null | null | null | tests/mixins.py | armstrong/armstrong.apps.embeds | 6042f4ab39e752c4e78826e44f7c2aa82bc04e6a | [
"Apache-2.0"
] | 1 | 2018-03-04T20:30:15.000Z | 2018-03-04T20:30:15.000Z | import fudge
from armstrong.apps.embeds.mixins import TemplatesByEmbedTypeMixin
from .support.models import Parent, Child, TypeModel
from ._utils import TestCase
class TemplateCompareTestMixin(object):
def path_opts(self, obj, use_fallback=False, use_type=False):
return dict(
base=obj.base_layout_directory,
app=obj._meta.app_label,
model=obj._meta.object_name.lower(),
typemodel=self.type_name if use_type else "fail",
type=self.type_slug if use_type else "fail",
tpl=obj.fallback_template_name if use_fallback else self.tpl_name)
def compare_templates(self, obj, expected, **kwargs):
opts = self.path_opts(obj, **kwargs)
final = [line % opts for line in expected]
result = obj.get_layout_template_name(self.tpl_name)
self.assertEqual(result, final)
class TemplatesByEmbedTypeTestCase(TemplateCompareTestMixin, TestCase):
def setUp(self):
self.tpl_name = "tpl"
self.type_name = TypeModel()._meta.object_name.lower()
self.type_slug = "photo"
def test_object_requires_response(self):
with self.assertRaisesRegexp(AttributeError, "has no attribute 'response'"):
TemplatesByEmbedTypeMixin().get_layout_template_name(self.tpl_name)
def test_object_response_checks_validity(self):
obj = TemplatesByEmbedTypeMixin()
obj.response = fudge.Fake().expects('is_valid')
obj.get_layout_template_name(self.tpl_name)
def test_non_model_without_response_returns_empty(self):
obj = TemplatesByEmbedTypeMixin()
obj.response = None
self.assertEqual(obj.get_layout_template_name(self.tpl_name), [])
def test_non_model_with_valid_response_returns_empty(self):
obj = TemplatesByEmbedTypeMixin()
obj.response = fudge.Fake().expects('is_valid').returns(True)
obj.type = fudge.Fake()
self.assertEqual(obj.get_layout_template_name(self.tpl_name), [])
def test_model_without_response_uses_fallback(self):
obj = Parent()
expected = ['%(base)s/%(app)s/%(model)s/%(tpl)s.html']
self.compare_templates(obj, expected, use_fallback=True)
def test_model_with_invalid_response_uses_fallback(self):
obj = Parent()
obj.response = fudge.Fake().expects('is_valid').returns(False)
expected = ['%(base)s/%(app)s/%(model)s/%(tpl)s.html']
self.compare_templates(obj, expected, use_fallback=True)
def test_model_with_valid_response(self):
obj = Parent(type=TypeModel(slug=self.type_slug))
obj.response = fudge.Fake().expects('is_valid').returns(True)
expected = [
'%(base)s/%(app)s/%(typemodel)s/%(type)s/%(tpl)s.html',
'%(base)s/%(app)s/%(model)s/%(tpl)s.html']
self.compare_templates(obj, expected, use_type=True)
def test_model_can_specify_templates_that_dont_fallback(self):
obj = Parent()
obj.templates_without_fallbacks.append(self.tpl_name)
expected = ['%(base)s/%(app)s/%(model)s/%(tpl)s.html']
self.compare_templates(obj, expected)
def test_model_can_change_fallback_template(self):
obj = Parent()
obj.fallback_template_name = 'usethisone'
expected = ['%(base)s/%(app)s/%(model)s/%(tpl)s.html']
self.compare_templates(obj, expected, use_fallback=True)
def test_child_model_without_response_uses_fallback(self):
obj = Child()
expected = [
'%(base)s/%(app)s/child/%(tpl)s.html',
'%(base)s/%(app)s/parent/%(tpl)s.html']
self.compare_templates(obj, expected, use_fallback=True)
def test_child_model_with_invalid_response_uses_fallback(self):
obj = Child()
obj.response = fudge.Fake().expects('is_valid').returns(False)
expected = [
'%(base)s/%(app)s/child/%(tpl)s.html',
'%(base)s/%(app)s/parent/%(tpl)s.html']
self.compare_templates(obj, expected, use_fallback=True)
def test_child_model_with_valid_response(self):
obj = Child(type=TypeModel(slug=self.type_slug))
obj.response = fudge.Fake().expects('is_valid').returns(True)
expected = [
'%(base)s/%(app)s/%(typemodel)s/%(type)s/%(tpl)s.html',
'%(base)s/%(app)s/child/%(tpl)s.html',
'%(base)s/%(app)s/parent/%(tpl)s.html']
self.compare_templates(obj, expected, use_type=True)
| 41.626168 | 84 | 0.662101 | 4,285 | 0.962057 | 0 | 0 | 0 | 0 | 0 | 0 | 663 | 0.148855 |
12597459b30dbd3bd8a1d404996a35025ae53e68 | 196 | py | Python | app/api/__init__.py | correaleyval/Telezon-S3 | 1a6de581c73f7b2391207bfd717f0dfc42de0223 | [
"MIT"
] | 12 | 2021-03-18T20:42:19.000Z | 2021-06-08T18:43:05.000Z | app/api/__init__.py | luiscib3r/Telezon-S3 | 1a6de581c73f7b2391207bfd717f0dfc42de0223 | [
"MIT"
] | 1 | 2021-03-19T14:08:51.000Z | 2021-03-19T23:09:55.000Z | app/api/__init__.py | luiscib3r/Telezon-S3 | 1a6de581c73f7b2391207bfd717f0dfc42de0223 | [
"MIT"
] | 1 | 2021-04-11T04:35:14.000Z | 2021-04-11T04:35:14.000Z | from fastapi import APIRouter
from app.api.auth import router as auth
from app.api.v1 import router as v1
router = APIRouter(prefix='/api')
router.include_router(auth)
router.include_router(v1)
| 21.777778 | 39 | 0.795918 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 | 0.030612 |
125c6575edea789ef3f36e0a837caccf4d5a15ab | 1,439 | py | Python | TestData/Soccer/ImageDownloader.py | hundyoung/yolo3_keras | e0f306e579c2a1515e9e9eedaeabc2fb4a9773eb | [
"MIT"
] | null | null | null | TestData/Soccer/ImageDownloader.py | hundyoung/yolo3_keras | e0f306e579c2a1515e9e9eedaeabc2fb4a9773eb | [
"MIT"
] | null | null | null | TestData/Soccer/ImageDownloader.py | hundyoung/yolo3_keras | e0f306e579c2a1515e9e9eedaeabc2fb4a9773eb | [
"MIT"
] | null | null | null | import requests
import urllib
import os
import re
from bs4 import BeautifulSoup
import json
# save_path = "./foul/"
save_path = "./non_foul/"
url="https://images.api.press.net/api/v2/search/?category=A,S,E&ck=public&cond=not&crhPriority=1&fields_0=all&fields_1=all&imagesonly=1&limit=2000&orientation=both&page=1&q=football+foul&words_0=all&words_1=all"
non_foul_url=" https://images.api.press.net/api/v2/search/?category=A,S,E&ck=public&cond=not&crhPriority=1&fields_0=all&fields_1=all&imagesonly=1&limit=2000&orientation=both&page=2&q=soccer&text=soccer×tamp=1582204096&totalresults=2483919&words_0=all&words_1=all"
response = requests.get(non_foul_url)
print(response.text)
json = json.loads(response.text)
resultList = json["results"]
save_count =0
unsave_count =0
for result in resultList:
# fileName=result["description_text"]
renditions = result["renditions"]
sampleSize = renditions["sample"]
href = sampleSize["href"]
fileName = str(href).split('/')[-1]
if not os.path.exists(save_path+fileName):
print(href)
try:
urllib.request.urlretrieve(href,save_path+fileName)
save_count += 1
except:
print("Fail")
else:
unsave_count+=1
print("save",save_count,"images")
print("exists",unsave_count,"images")
# soup = BeautifulSoup(html.text, "html.parser")
# image_list = soup.find_all('img')
# for img in image_list:
# print(img)
| 34.261905 | 268 | 0.71647 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 732 | 0.508687 |
125d654e6f98cfb2e9d1009b4b18f7574fef6306 | 785 | py | Python | twitchchatbot/lib/commands/addcom.py | Amperture/twitch-sbc-integration | 71ee86688a7735e6bb3d18c9896c1b8c7a3662d7 | [
"MIT"
] | 10 | 2017-04-20T15:15:51.000Z | 2021-11-17T20:08:01.000Z | twitchchatbot/lib/commands/addcom.py | Amperture/twitch-sbc-integration | 71ee86688a7735e6bb3d18c9896c1b8c7a3662d7 | [
"MIT"
] | null | null | null | twitchchatbot/lib/commands/addcom.py | Amperture/twitch-sbc-integration | 71ee86688a7735e6bb3d18c9896c1b8c7a3662d7 | [
"MIT"
] | 2 | 2020-02-08T04:15:43.000Z | 2021-11-04T09:18:43.000Z | from twitchchatbot.lib.commands.parsing import commands
import json
def addcom(user, args):
# Concatenate a list of strings down to a single, space delimited string.
queueEvent = {}
if len(args) < 2:
queueEvent['msg'] = "Proper usage: !addcom <cmd> <Text to send>"
else:
commandHead = "!" + args[0]
commands[commandHead] = {
'limit' : 10,
'userbadge' : 'moderator',
'last_used' : 0
}
del args[0]
commands[commandHead]['return'] = " ".join(args)
with open("commands.json", "w") as f:
json.dump(commands, f, indent=1)
queueEvent['msg'] = "%s has added the %s command!" %( \
user, commandHead)
return queueEvent
| 26.166667 | 77 | 0.540127 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 229 | 0.29172 |
125da2debfb3bcd25523d38067e1548fefba9b8b | 858 | py | Python | fog/fog-client/setup.py | breakEval13/tor_dev | 1040d906474d1da463f4de57b3c5f72ae14f550d | [
"Apache-2.0"
] | 1 | 2020-07-21T01:23:28.000Z | 2020-07-21T01:23:28.000Z | fog/fog-client/setup.py | breakEval13/tor_dev | 1040d906474d1da463f4de57b3c5f72ae14f550d | [
"Apache-2.0"
] | null | null | null | fog/fog-client/setup.py | breakEval13/tor_dev | 1040d906474d1da463f4de57b3c5f72ae14f550d | [
"Apache-2.0"
] | null | null | null | from distutils.core import setup
import py2exe
# if py2exe complains "can't find P", try one of the following workarounds:
#
# a. py2exe doesn't support zipped eggs - http://www.py2exe.org/index.cgi/ExeWithEggs
# You should give the --always-unzip option to easy_install, or you can use setup.py directly
# $ python setup.py install --record install.log --single-version-externally-managed
# Don't forget to remove the previous zipped egg.
#
# b. Add an empty __init__.py to the P/ top-level directory, if it's missing
# - this is due to a bug (or misleading documentation) in python's imp.find_module()
setup(
console=["fog-client"],
zipfile="py2exe-fog-client.zip",
options={
"py2exe": {
"includes": ["pyptlib", "twisted", "txsocksx"],
"packages": ["ometa", "terml", "zope.interface"],
},
},
)
| 35.75 | 94 | 0.67366 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 673 | 0.784382 |
125f0b131e3fd90aefb488268f7b2509f369413b | 16,807 | py | Python | generate_imagery.py | AnthonyLapadula/pytorch-GANs | 81e5138a0e1e97c35a29b9d3ac6a79de6e9dfd6f | [
"MIT"
] | 278 | 2020-09-07T20:17:47.000Z | 2021-01-31T10:24:33.000Z | generate_imagery.py | AnthonyLapadula/pytorch-GANs | 81e5138a0e1e97c35a29b9d3ac6a79de6e9dfd6f | [
"MIT"
] | 1 | 2020-09-08T08:49:15.000Z | 2020-09-09T07:51:02.000Z | generate_imagery.py | AnthonyLapadula/pytorch-GANs | 81e5138a0e1e97c35a29b9d3ac6a79de6e9dfd6f | [
"MIT"
] | 23 | 2020-09-07T21:50:29.000Z | 2021-01-08T01:25:26.000Z | import os
import shutil
import argparse
import torch
from torch import nn
from torchvision.utils import save_image, make_grid
import matplotlib.pyplot as plt
import numpy as np
import cv2 as cv
import utils.utils as utils
from utils.constants import *
class GenerationMode(enum.Enum):
SINGLE_IMAGE = 0,
INTERPOLATION = 1,
VECTOR_ARITHMETIC = 2
def postprocess_generated_img(generated_img_tensor):
assert isinstance(generated_img_tensor, torch.Tensor), f'Expected PyTorch tensor but got {type(generated_img_tensor)}.'
# Move the tensor from GPU to CPU, convert to numpy array, extract 0th batch, move the image channel
# from 0th to 2nd position (CHW -> HWC)
generated_img = np.moveaxis(generated_img_tensor.to('cpu').numpy()[0], 0, 2)
# If grayscale image repeat 3 times to get RGB image (for generators trained on MNIST)
if generated_img.shape[2] == 1:
generated_img = np.repeat(generated_img, 3, axis=2)
# Imagery is in the range [-1, 1] (generator has tanh as the output activation) move it into [0, 1] range
generated_img -= np.min(generated_img)
generated_img /= np.max(generated_img)
return generated_img
def generate_from_random_latent_vector(generator, cgan_digit=None):
with torch.no_grad():
latent_vector = utils.get_gaussian_latent_batch(1, next(generator.parameters()).device)
if cgan_digit is None:
generated_img = postprocess_generated_img(generator(latent_vector))
else: # condition and generate the digit specified by cgan_digit
ref_label = torch.tensor([cgan_digit], dtype=torch.int64)
ref_label_one_hot_encoding = torch.nn.functional.one_hot(ref_label, MNIST_NUM_CLASSES).type(torch.FloatTensor).to(next(generator.parameters()).device)
generated_img = postprocess_generated_img(generator(latent_vector, ref_label_one_hot_encoding))
return generated_img, latent_vector.to('cpu').numpy()[0]
def generate_from_specified_numpy_latent_vector(generator, latent_vector):
assert isinstance(latent_vector, np.ndarray), f'Expected latent vector to be numpy array but got {type(latent_vector)}.'
with torch.no_grad():
latent_vector_tensor = torch.unsqueeze(torch.tensor(latent_vector, device=next(generator.parameters()).device), dim=0)
return postprocess_generated_img(generator(latent_vector_tensor))
def linear_interpolation(t, p0, p1):
return p0 + t * (p1 - p0)
def spherical_interpolation(t, p0, p1):
""" Spherical interpolation (slerp) formula: https://en.wikipedia.org/wiki/Slerp
Found inspiration here: https://github.com/soumith/ganhacks
but I didn't get any improvement using it compared to linear interpolation.
Args:
t (float): has [0, 1] range
p0 (numpy array): First n-dimensional vector
p1 (numpy array): Second n-dimensional vector
Result:
Returns spherically interpolated vector.
"""
if t <= 0:
return p0
elif t >= 1:
return p1
elif np.allclose(p0, p1):
return p0
# Convert p0 and p1 to unit vectors and find the angle between them (omega)
omega = np.arccos(np.dot(p0 / np.linalg.norm(p0), p1 / np.linalg.norm(p1)))
sin_omega = np.sin(omega) # syntactic sugar
return np.sin((1.0 - t) * omega) / sin_omega * p0 + np.sin(t * omega) / sin_omega * p1
def display_vector_arithmetic_results(imgs_to_display):
fig = plt.figure(figsize=(6, 6))
title_fontsize = 'x-small'
num_display_imgs = 7
titles = ['happy women', 'happy woman (avg)', 'neutral women', 'neutral woman (avg)', 'neutral men', 'neutral man (avg)', 'result - happy man']
ax = np.zeros(num_display_imgs, dtype=object)
assert len(imgs_to_display) == num_display_imgs, f'Expected {num_display_imgs} got {len(imgs_to_display)} images.'
gs = fig.add_gridspec(5, 4, left=0.02, right=0.98, wspace=0.05, hspace=0.3)
ax[0] = fig.add_subplot(gs[0, :3])
ax[1] = fig.add_subplot(gs[0, 3])
ax[2] = fig.add_subplot(gs[1, :3])
ax[3] = fig.add_subplot(gs[1, 3])
ax[4] = fig.add_subplot(gs[2, :3])
ax[5] = fig.add_subplot(gs[2, 3])
ax[6] = fig.add_subplot(gs[3:, 1:3])
for i in range(num_display_imgs):
ax[i].imshow(cv.resize(imgs_to_display[i], (0, 0), fx=3, fy=3, interpolation=cv.INTER_NEAREST))
ax[i].set_title(titles[i], fontsize=title_fontsize)
ax[i].tick_params(which='both', bottom=False, left=False, labelleft=False, labelbottom=False)
plt.show()
def generate_new_images(model_name, cgan_digit=None, generation_mode=True, slerp=True, a=None, b=None, should_display=True):
""" Generate imagery using pre-trained generator (using vanilla_generator_000000.pth by default)
Args:
model_name (str): model name you want to use (default lookup location is BINARIES_PATH).
cgan_digit (int): if specified generate that exact digit.
generation_mode (enum): generate a single image from a random vector, interpolate between the 2 chosen latent
vectors, or perform arithmetic over latent vectors (note: not every mode is supported for every model type)
slerp (bool): if True use spherical interpolation otherwise use linear interpolation.
a, b (numpy arrays): latent vectors, if set to None you'll be prompted to choose images you like,
and use corresponding latent vectors instead.
should_display (bool): Display the generated images before saving them.
"""
model_path = os.path.join(BINARIES_PATH, model_name)
assert os.path.exists(model_path), f'Could not find the model {model_path}. You first need to train your generator.'
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Prepare the correct (vanilla, cGAN, DCGAN, ...) model, load the weights and put the model into evaluation mode
model_state = torch.load(model_path)
gan_type = model_state["gan_type"]
print(f'Found {gan_type} GAN!')
_, generator = utils.get_gan(device, gan_type)
generator.load_state_dict(model_state["state_dict"], strict=True)
generator.eval()
# Generate a single image, save it and potentially display it
if generation_mode == GenerationMode.SINGLE_IMAGE:
generated_imgs_path = os.path.join(DATA_DIR_PATH, 'generated_imagery')
os.makedirs(generated_imgs_path, exist_ok=True)
generated_img, _ = generate_from_random_latent_vector(generator, cgan_digit if gan_type == GANType.CGAN.name else None)
utils.save_and_maybe_display_image(generated_imgs_path, generated_img, should_display=should_display)
# Pick 2 images you like between which you'd like to interpolate (by typing 'y' into console)
elif generation_mode == GenerationMode.INTERPOLATION:
assert gan_type == GANType.VANILLA.name or gan_type ==GANType.DCGAN.name, f'Got {gan_type} but only VANILLA/DCGAN are supported for the interpolation mode.'
interpolation_name = "spherical" if slerp else "linear"
interpolation_fn = spherical_interpolation if slerp else linear_interpolation
grid_interpolated_imgs_path = os.path.join(DATA_DIR_PATH, 'interpolated_imagery') # combined results dir
decomposed_interpolated_imgs_path = os.path.join(grid_interpolated_imgs_path, f'tmp_{gan_type}_{interpolation_name}_dump') # dump separate results
if os.path.exists(decomposed_interpolated_imgs_path):
shutil.rmtree(decomposed_interpolated_imgs_path)
os.makedirs(grid_interpolated_imgs_path, exist_ok=True)
os.makedirs(decomposed_interpolated_imgs_path, exist_ok=True)
latent_vector_a, latent_vector_b = [None, None]
# If a and b were not specified loop until the user picked the 2 images he/she likes.
found_good_vectors_flag = False
if a is None or b is None:
while not found_good_vectors_flag:
generated_img, latent_vector = generate_from_random_latent_vector(generator)
plt.imshow(generated_img); plt.title('Do you like this image?'); plt.show()
user_input = input("Do you like this generated image? [y for yes]:")
if user_input == 'y':
if latent_vector_a is None:
latent_vector_a = latent_vector
print('Saved the first latent vector.')
elif latent_vector_b is None:
latent_vector_b = latent_vector
print('Saved the second latent vector.')
found_good_vectors_flag = True
else:
print('Well lets generate a new one!')
continue
else:
print('Skipping latent vectors selection section and using cached ones.')
latent_vector_a, latent_vector_b = [a, b]
# Cache latent vectors
if a is None or b is None:
np.save(os.path.join(grid_interpolated_imgs_path, 'a.npy'), latent_vector_a)
np.save(os.path.join(grid_interpolated_imgs_path, 'b.npy'), latent_vector_b)
print(f'Lets do some {interpolation_name} interpolation!')
interpolation_resolution = 47 # number of images between the vectors a and b
num_interpolated_imgs = interpolation_resolution + 2 # + 2 so that we include a and b
generated_imgs = []
for i in range(num_interpolated_imgs):
t = i / (num_interpolated_imgs - 1) # goes from 0. to 1.
current_latent_vector = interpolation_fn(t, latent_vector_a, latent_vector_b)
generated_img = generate_from_specified_numpy_latent_vector(generator, current_latent_vector)
print(f'Generated image [{i+1}/{num_interpolated_imgs}].')
utils.save_and_maybe_display_image(decomposed_interpolated_imgs_path, generated_img, should_display=should_display)
# Move from channel last to channel first (CHW->HWC), PyTorch's save_image function expects BCHW format
generated_imgs.append(torch.tensor(np.moveaxis(generated_img, 2, 0)))
interpolated_block_img = torch.stack(generated_imgs)
interpolated_block_img = nn.Upsample(scale_factor=2.5, mode='nearest')(interpolated_block_img)
save_image(interpolated_block_img, os.path.join(grid_interpolated_imgs_path, utils.get_available_file_name(grid_interpolated_imgs_path)), nrow=int(np.sqrt(num_interpolated_imgs)))
elif generation_mode == GenerationMode.VECTOR_ARITHMETIC:
assert gan_type == GANType.DCGAN.name, f'Got {gan_type} but only DCGAN is supported for arithmetic mode.'
# Generate num_options face images and create a grid image from them
num_options = 100
generated_imgs = []
latent_vectors = []
padding = 2
for i in range(num_options):
generated_img, latent_vector = generate_from_random_latent_vector(generator)
generated_imgs.append(torch.tensor(np.moveaxis(generated_img, 2, 0))) # make_grid expects CHW format
latent_vectors.append(latent_vector)
stacked_tensor_imgs = torch.stack(generated_imgs)
final_tensor_img = make_grid(stacked_tensor_imgs, nrow=int(np.sqrt(num_options)), padding=padding)
display_img = np.moveaxis(final_tensor_img.numpy(), 0, 2)
# For storing latent vectors
num_of_vectors_per_category = 3
happy_woman_latent_vectors = []
neutral_woman_latent_vectors = []
neutral_man_latent_vectors = []
# Make it easy - by clicking on the plot you pick the image.
def onclick(event):
if event.dblclick:
pass
else: # single click
if event.button == 1: # left click
x_coord = event.xdata
y_coord = event.ydata
column = int(x_coord / (64 + padding))
row = int(y_coord / (64 + padding))
# Store latent vector corresponding to the image that the user clicked on.
if len(happy_woman_latent_vectors) < num_of_vectors_per_category:
happy_woman_latent_vectors.append(latent_vectors[10*row + column])
print(f'Picked image row={row}, column={column} as {len(happy_woman_latent_vectors)}. happy woman.')
elif len(neutral_woman_latent_vectors) < num_of_vectors_per_category:
neutral_woman_latent_vectors.append(latent_vectors[10*row + column])
print(f'Picked image row={row}, column={column} as {len(neutral_woman_latent_vectors)}. neutral woman.')
elif len(neutral_man_latent_vectors) < num_of_vectors_per_category:
neutral_man_latent_vectors.append(latent_vectors[10*row + column])
print(f'Picked image row={row}, column={column} as {len(neutral_man_latent_vectors)}. neutral man.')
else:
plt.close()
plt.figure(figsize=(10, 10))
plt.imshow(display_img)
# This is just an example you could also pick 3 neutral woman images with sunglasses, etc.
plt.title('Click on 3 happy women, 3 neutral women and \n 3 neutral men images (order matters!)')
cid = plt.gcf().canvas.mpl_connect('button_press_event', onclick)
plt.show()
plt.gcf().canvas.mpl_disconnect(cid)
print('Done choosing images.')
# Calculate the average latent vector for every category (happy woman, neutral woman, neutral man)
happy_woman_avg_latent_vector = np.mean(np.array(happy_woman_latent_vectors), axis=0)
neutral_woman_avg_latent_vector = np.mean(np.array(neutral_woman_latent_vectors), axis=0)
neutral_man_avg_latent_vector = np.mean(np.array(neutral_man_latent_vectors), axis=0)
# By subtracting neutral woman from the happy woman we capture the "vector of smiling". Adding that vector
# to a neutral man we get a happy man's latent vector! Our latent space has amazingly beautiful structure!
happy_man_latent_vector = neutral_man_avg_latent_vector + (happy_woman_avg_latent_vector - neutral_woman_avg_latent_vector)
# Generate images from these latent vectors
happy_women_imgs = np.hstack([generate_from_specified_numpy_latent_vector(generator, v) for v in happy_woman_latent_vectors])
neutral_women_imgs = np.hstack([generate_from_specified_numpy_latent_vector(generator, v) for v in neutral_woman_latent_vectors])
neutral_men_imgs = np.hstack([generate_from_specified_numpy_latent_vector(generator, v) for v in neutral_man_latent_vectors])
happy_woman_avg_img = generate_from_specified_numpy_latent_vector(generator, happy_woman_avg_latent_vector)
neutral_woman_avg_img = generate_from_specified_numpy_latent_vector(generator, neutral_woman_avg_latent_vector)
neutral_man_avg_img = generate_from_specified_numpy_latent_vector(generator, neutral_man_avg_latent_vector)
happy_man_img = generate_from_specified_numpy_latent_vector(generator, happy_man_latent_vector)
display_vector_arithmetic_results([happy_women_imgs, happy_woman_avg_img, neutral_women_imgs, neutral_woman_avg_img, neutral_men_imgs, neutral_man_avg_img, happy_man_img])
else:
raise Exception(f'Generation mode not yet supported.')
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--model_name", type=str, help="Pre-trained generator model name", default=r'VANILLA_000000.pth')
parser.add_argument("--cgan_digit", type=int, help="Used only for cGAN - generate specified digit", default=3)
parser.add_argument("--generation_mode", type=bool, help="Pick between 3 generation modes", default=GenerationMode.SINGLE_IMAGE)
parser.add_argument("--slerp", type=bool, help="Should use spherical interpolation (default No)", default=False)
parser.add_argument("--should_display", type=bool, help="Display intermediate results", default=True)
args = parser.parse_args()
# The first time you start generation in the interpolation mode it will cache a and b
# which you'll choose the first time you run the it.
a_path = os.path.join(DATA_DIR_PATH, 'interpolated_imagery', 'a.npy')
b_path = os.path.join(DATA_DIR_PATH, 'interpolated_imagery', 'b.npy')
latent_vector_a = np.load(a_path) if os.path.exists(a_path) else None
latent_vector_b = np.load(b_path) if os.path.exists(b_path) else None
generate_new_images(
args.model_name,
args.cgan_digit,
generation_mode=args.generation_mode,
slerp=args.slerp,
a=latent_vector_a,
b=latent_vector_b,
should_display=args.should_display)
| 51.873457 | 187 | 0.699292 | 103 | 0.006128 | 0 | 0 | 0 | 0 | 0 | 0 | 5,170 | 0.30761 |
125fa03a8c2ad33884edfc2ff0b21e48fff11c77 | 2,207 | py | Python | toontown/dmenu/DMenuDisclaimer.py | LittleNed/toontown-stride | 1252a8f9a8816c1810106006d09c8bdfe6ad1e57 | [
"Apache-2.0"
] | 1 | 2018-06-16T23:06:38.000Z | 2018-06-16T23:06:38.000Z | toontown/dmenu/DMenuDisclaimer.py | NoraTT/Historical-Commits-Project-Altis-Source | fe88e6d07edf418f7de6ad5b3d9ecb3d0d285179 | [
"Apache-2.0"
] | null | null | null | toontown/dmenu/DMenuDisclaimer.py | NoraTT/Historical-Commits-Project-Altis-Source | fe88e6d07edf418f7de6ad5b3d9ecb3d0d285179 | [
"Apache-2.0"
] | 4 | 2019-06-20T23:45:23.000Z | 2020-10-14T20:30:15.000Z | from direct.gui.DirectGui import OnscreenText, DirectButton
from panda3d.core import *
from direct.interval.IntervalGlobal import *
from direct.showbase.DirectObject import DirectObject
from toontown.toonbase import ToontownGlobals
class DMenuDisclaimer(DirectObject):
notify = directNotify.newCategory('DisclaimerScreen')
def __init__(self):
DirectObject.__init__(self)
base.setBackgroundColor(0, 0, 0)
disclaimerText = "Project Altis is a not-for-profit fanmade parody made under Fair Use. Project Altis is not affiliated with The Walt Disney Company and/or the Disney Interactive Media Group (collectively referred to as \"Disney\") by clicking I agree you hereby agree that you acknowledge this fact."
self.disclaimer = OnscreenText(text = disclaimerText, font = ToontownGlobals.getMinnieFont(), style = 3, wordwrap = 30, scale = .08, pos = (0, .3, 0))
gui = loader.loadModel('phase_3/models/gui/tt_m_gui_mat_mainGui.bam')
yesUp = gui.find('**/tt_t_gui_mat_okUp')
yesDown = gui.find('**/tt_t_gui_mat_okDown')
noUp = gui.find('**/tt_t_gui_mat_closeUp')
noDown = gui.find('**/tt_t_gui_mat_closeDown')
self.accept = DirectButton(parent = aspect2d, relief = None, image = (yesUp, yesDown, yesUp), image_scale = (0.6, 0.6, 0.6), image1_scale = (0.7, 0.7, 0.7), image2_scale = (0.7, 0.7, 0.7), text = ('', 'I Agree', 'I Agree'), text_pos=(0, -0.175), text_style = 3, text_scale=0.08, pos = (.4, 0, -.5), command = self.accept)
self.deny = DirectButton(parent = aspect2d, relief = None, image = (noUp, noDown, noUp), image_scale = (0.6, 0.6, 0.6), image1_scale = (0.7, 0.7, 0.7), image2_scale = (0.7, 0.7, 0.7), text = ('', 'I Disagree', 'I Disagree'), text_pos=(0, -0.175), text_style = 3, text_scale=0.08, pos = (-.4, 0, -.5), command = self.deny)
def accept(self):
self.disclaimer['text'] = 'Loading...'
self.accept.destroy()
self.deny.destroy()
base.graphicsEngine.renderFrame()
messenger.send("AgreeToGame")
base.cr.hasAccepted = True
self.disclaimer.removeNode()
def deny(self):
base.exitFunc() | 63.057143 | 329 | 0.660172 | 1,974 | 0.894427 | 0 | 0 | 0 | 0 | 0 | 0 | 522 | 0.23652 |
125fbe96b358bea289f744b98621a56277d4b0a4 | 7,012 | py | Python | geomloss/sinkhorn_images.py | ismedina/geomloss | bbd6289a139174effedb6855e1e992eb77772c67 | [
"MIT"
] | null | null | null | geomloss/sinkhorn_images.py | ismedina/geomloss | bbd6289a139174effedb6855e1e992eb77772c67 | [
"MIT"
] | null | null | null | geomloss/sinkhorn_images.py | ismedina/geomloss | bbd6289a139174effedb6855e1e992eb77772c67 | [
"MIT"
] | null | null | null | import torch
from .utils import log_dens, pyramid, upsample, softmin_grid
from .sinkhorn_divergence import epsilon_schedule, scaling_parameters
from .sinkhorn_divergence import sinkhorn_cost, sinkhorn_loop
def extrapolate(f_ba, g_ab, eps, damping, C_xy, b_log, C_xy_fine):
return upsample(f_ba)
def kernel_truncation(
C_xy,
C_yx,
C_xy_fine,
C_yx_fine,
f_ba,
g_ab,
eps,
truncate=None,
cost=None,
verbose=False,
):
return C_xy_fine, C_yx_fine
def sinkhorn_divergence(
a,
b,
p=2,
blur=None,
reach=None,
axes=None,
scaling=0.5,
cost=None,
debias=True,
potentials=False,
verbose=False,
multiscale=True, # IM: Added multiscale argument, otherwise multiscale triggered automatically
**kwargs,
):
r"""Sinkhorn divergence between measures supported on 1D/2D/3D grids.
Args:
a ((B, Nx), (B, Nx, Ny) or (B, Nx, Ny, Nz) Tensor): Weights :math:`\alpha_i`
for the first measure, with a batch dimension.
b ((B, Nx), (B, Nx, Ny) or (B, Nx, Ny, Nz) Tensor): Weights :math:`\beta_j`
for the second measure, with a batch dimension.
p (int, optional): Exponent of the ground cost function
:math:`C(x_i,y_j)`, which is equal to
:math:`\tfrac{1}{p}\|x_i-y_j\|^p` if it is not provided
explicitly through the `cost` optional argument.
Defaults to 2.
blur (float or None, optional): Target value for the blurring scale
of the "point spread function" or Gibbs kernel
:math:`K_{i,j} = \exp(-C(x_i,y_j)/\varepsilon) = \exp(-\|x_i-y_j\|^p / p \text{blur}^p).
In the Sinkhorn algorithm, the temperature :math:`\varepsilon`
is computed as :math:`\text{blur}^p`.
Defaults to None: we pick the smallest pixel size across
the Nx, Ny and Nz dimensions (if applicable).
axes (tuple of pairs of floats or None (= [0, 1)^(1/2/3)), optional):
Dimensions of the image domain, specified through a 1/2/3-uple
of [vmin, vmax] bounds.
For instance, if the batched 2D images correspond to sampled
measures on [-10, 10) x [-3, 5), you may use "axes = ([-10, 10], [-3, 5])".
The (implicit) pixel coordinates are computed using a "torch.linspace(...)"
across each dimension: along any given axis, the spacing between two pixels
is equal to "(vmax - vmin) / npixels".
Defaults to None: we assume that the signal / image / volume
is sampled on the unit interval [0, 1) / square [0, 1)^2 / cube [0, 1)^3.
scaling (float in (0, 1), optional): Ratio between two successive
values of the blur radius in the epsilon-scaling annealing descent.
Defaults to 0.5.
cost (function or None, optional): ...
Defaults to None: we use a Euclidean cost
:math:`C(x_i,y_j) = \tfrac{1}{p}\|x_i-y_j\|^p`.
debias (bool, optional): Should we used the "de-biased" Sinkhorn divergence
:math:`\text{S}_{\varepsilon, \rho}(\al,\be)` instead
of the "raw" entropic OT cost
:math:`\text{OT}_{\varepsilon, \rho}(\al,\be)`?
This slows down the OT solver but guarantees that our approximation
of the Wasserstein distance will be positive and definite
- up to convergence of the Sinkhorn loop.
For a detailed discussion of the influence of this parameter,
see e.g. Fig. 3.21 in Jean Feydy's PhD thesis.
Defaults to True.
potentials (bool, optional): Should we return the optimal dual potentials
instead of the cost value?
Defaults to False.
Returns:
(B,) Tensor or pair of (B, Nx, ...), (B, Nx, ...) Tensors: If `potentials` is True,
we return a pair of (B, Nx, ...), (B, Nx, ...) Tensors that encode the optimal
dual vectors, respectively supported by :math:`x_i` and :math:`y_j`.
Otherwise, we return a (B,) Tensor of values for the Sinkhorn divergence.
"""
if blur is None:
blur = 1 / a.shape[-1]
# Pre-compute a multiscale decomposition (=Binary/Quad/OcTree)
# of the input measures, stored as logarithms
if multiscale:
a_s, b_s = pyramid(a)[1:], pyramid(b)[1:]
else:
a_s, b_s = [a], [b]
a_logs = list(map(log_dens, a_s))
b_logs = list(map(log_dens, b_s))
# By default, our cost function :math:`C(x_i,y_j)` is a halved,
# squared Euclidean distance (p=2) or a simple Euclidean distance (p=1):
depth = len(a_logs)
if cost is None:
C_s = [p] * depth # Dummy "cost matrices"
else:
raise NotImplementedError()
# Diameter of the configuration:
diameter = 1
# Target temperature epsilon:
eps = blur ** p
# Strength of the marginal constraints:
rho = None if reach is None else reach ** p
# Schedule for the multiscale descent, with ε-scaling:
"""
sigma = diameter
for n in range(depth):
for _ in range(scaling_N): # Number of steps per scale
eps_list.append(sigma ** p)
# Decrease the kernel radius, making sure that
# the radius sigma is divided by two at every scale until we reach
# the target value, "blur":
scale = max(sigma * (2 ** (-1 / scaling_N)), blur)
jumps = [scaling_N * (i + 1) - 1 for i in range(depth - 1)]
"""
if scaling < 0.5:
raise ValueError(
f"Scaling value of {scaling} is too small: please use a number in [0.5, 1)."
)
diameter, eps, eps_list, rho = scaling_parameters(
None, None, p, blur, reach, diameter, scaling
)
# List of pixel widths:
pyramid_scales = [diameter / a.shape[-1] for a in a_s]
if verbose:
print("Pyramid scales:", pyramid_scales)
current_scale = pyramid_scales.pop(0)
jumps = []
if multiscale:
for i, eps in enumerate(eps_list[1:]):
if current_scale ** p > eps:
jumps.append(i + 1)
current_scale = pyramid_scales.pop(0)
if verbose:
print("Temperatures: ", eps_list)
print("Jumps: ", jumps)
assert (
len(jumps) == len(a_s) - 1
), "There's a bug in the multicale pre-processing..."
# Use an optimal transport solver to retrieve the dual potentials:
f_aa, g_bb, g_ab, f_ba = sinkhorn_loop(
softmin_grid,
a_logs,
b_logs,
C_s,
C_s,
C_s,
C_s,
eps_list,
rho,
jumps=jumps,
kernel_truncation=kernel_truncation,
extrapolate=extrapolate,
debias=debias,
)
# Optimal transport cost:
return sinkhorn_cost(
eps,
rho,
a,
b,
f_aa,
g_bb,
g_ab,
f_ba,
batch=True,
debias=debias,
potentials=potentials,
) | 36.331606 | 100 | 0.591557 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,594 | 0.655069 |
125ff27f92e4698c1a23719a23bf8d2e592956d2 | 31,947 | py | Python | time_trials/indexedvalues_timetrials.py | eric-s-s/share-with-z | 60a4c9788085ba55c6214114447e1c16bc49f7ce | [
"MIT"
] | 5 | 2016-07-29T18:28:04.000Z | 2019-06-17T19:49:11.000Z | time_trials/indexedvalues_timetrials.py | eric-s-s/dice-tables | 60a4c9788085ba55c6214114447e1c16bc49f7ce | [
"MIT"
] | 16 | 2015-11-12T02:13:30.000Z | 2020-12-05T21:35:00.000Z | time_trials/indexedvalues_timetrials.py | eric-s-s/dice-tables | 60a4c9788085ba55c6214114447e1c16bc49f7ce | [
"MIT"
] | null | null | null | """a module solely for finding how add_a_list and add_tuple_list compare.
it's effectively the empirical proof for how LongIntTable.add() chooses
the fastest method with it's get_fastest_method() function."""
from __future__ import print_function
from math import log10
import time
import random
from os import getcwd
from itertools import cycle
import matplotlib.pyplot as plt
import numpy as np
from dicetables.additiveevents import AdditiveEvents
WELCOME_TXT = 'hi'
def input_py_2_and_3(question):
try:
return raw_input(question)
except NameError:
return input(question)
def generate_tuple_list_with_increasing_number_of_events(first_event, start_length, event_occurrences,
len_increase_step=1):
"""
:param first_event:
:param start_length:
:param event_occurrences:
:param len_increase_step: =1
:return: generator(next)
"""
tuple_list_of_events = [(first_event, event_occurrences)]
for add_to_first_event in range(1, start_length):
tuple_list_of_events.append((first_event + add_to_first_event, event_occurrences))
while True:
yield tuple_list_of_events
highest_event = tuple_list_of_events[-1][0]
new_tuples = [(highest_event + 1 + step, event_occurrences) for step in range(len_increase_step)]
tuple_list_of_events += new_tuples
def generate_tuple_list_with_increasing_occurrences(first_event, start_length, increment, exponential_increase=True):
"""
:param first_event:
:param start_length:
:param increment:
:param exponential_increase: =True
:return: generator(next)
"""
tuple_list_of_events = [(event, 1) for event in range(first_event, first_event + start_length)]
growth = 0.0
while True:
yield tuple_list_of_events
growth += increment
if exponential_increase:
tuple_list_of_events = [(event, int(2 ** growth)) for
event in range(first_event, first_event + start_length)]
else:
tuple_list_of_events = [(event, int(growth)) for
event in range(first_event, first_event + start_length)]
def generate_tuple_list_with_increasing_gaps(first_event, start_length, event_occurrences=1, gaps_per_iteration=1,
randomize=True):
"""
:param first_event:
:param start_length:
:param event_occurrences: =1
:param gaps_per_iteration: =1
:param randomize: =True
:return: generator
"""
tuple_list_of_events = [(first_event + index, event_occurrences) for index in range(start_length)]
while sum([event[1] for event in tuple_list_of_events]) > 2 * event_occurrences:
yield tuple_list_of_events
for _ in range(gaps_per_iteration):
if randomize:
start_search_index = random.randrange(1, start_length - 1)
else:
start_search_index = len(tuple_list_of_events) - 2
only_occurrences = [event[1] for event in tuple_list_of_events]
while not only_occurrences[start_search_index:-1].count(event_occurrences) and start_search_index:
start_search_index -= 1
index_to_make_zero = only_occurrences[start_search_index:].index(event_occurrences) + start_search_index
event_value = tuple_list_of_events[index_to_make_zero][0]
tuple_list_of_events[index_to_make_zero] = (event_value, 0)
def get_generator(variable_name, first_event, start_length,
growth_increment=1.,
event_occurrences=1,
len_increase_step=1,
gaps_per_iteration=1,
randomize=True,
exponential_increase=True):
"""
:param variable_name: 'list_length', 'event_occurrences', 'increasing_gaps'
:param first_event:
:param start_length:
:param growth_increment: =1.0
:param event_occurrences: =1
:param len_increase_step: =1
:param gaps_per_iteration: =1
:param randomize: True
:param exponential_increase: =True
:return:
"""
if variable_name == 'list_length':
return generate_tuple_list_with_increasing_number_of_events(first_event, start_length,
event_occurrences, len_increase_step)
if variable_name == 'event_occurrences':
return generate_tuple_list_with_increasing_occurrences(first_event, start_length,
growth_increment, exponential_increase)
if variable_name == 'increasing_gaps':
return generate_tuple_list_with_increasing_gaps(first_event, start_length,
event_occurrences, gaps_per_iteration, randomize)
def one_time_trial(combine_times, events_tuples, input_dict_size=1, use_exponential_occurrences=True):
"""
:param combine_times:
:param events_tuples:
:param input_dict_size: =1
:param use_exponential_occurrences: =True
:return: (list_len, # occurrences, log10(# occurrences), range/events, start dict size)\n
, control time, IndexedValues time
"""
if events_tuples[0][1] < 10**100:
print('one_time_trial prepped list [{} .. {}]'.format(events_tuples[0], events_tuples[-1]))
input_dict = get_input_dict(input_dict_size, use_exponential_occurrences)
events_tuples = [pair for pair in events_tuples if pair[1]]
control_time, indexed_values_time = get_control_and_indexed_values_times(combine_times, events_tuples, input_dict)
list_length = float(len(events_tuples))
event_occurrences = float(events_tuples[0][1])
event_occurrences_exponent = log10(events_tuples[0][1])
events_range_vs_events = (max(events_tuples)[0] - min(events_tuples)[0] + 1) / float(list_length)
start_dict_size = float(input_dict_size)
y_axis_variables = (list_length, event_occurrences, event_occurrences_exponent, events_range_vs_events,
start_dict_size)
return y_axis_variables, control_time, indexed_values_time
def get_input_dict(input_dict_size, use_exponential_occurrences):
if use_exponential_occurrences:
input_dict = dict([(event, 1 + 2 ** (event % 1000)) for event in range(input_dict_size)])
else:
input_dict = dict([(event, 1 + event % 1000) for event in range(input_dict_size)])
return input_dict
def get_control_and_indexed_values_times(combine_times, events_tuples, input_dict):
control_events_action = get_control_action(input_dict, events_tuples)
events_for_indexed_values = AdditiveEvents(input_dict)
events_to_add = AdditiveEvents(dict(events_tuples))
indexed_values_start = time.clock()
events_for_indexed_values.combine_by_indexed_values(events_to_add, combine_times)
indexed_values_time = time.clock() - indexed_values_start
control_start = time.clock()
control_events_action(events_to_add, combine_times)
control_time = time.clock() - control_start
return control_time, indexed_values_time
def get_control_action(input_dict, events_tuples):
control_events = AdditiveEvents(input_dict)
control_method_str = get_control_method_str(events_tuples)
control_method_dict = {'tuple_list': control_events.combine_by_dictionary,
'flattened_list': control_events.combine_by_flattened_list}
control_events_action = control_method_dict[control_method_str]
return control_events_action
def get_control_method_str(prepped_list):
if prepped_list[0][1] == 1:
return 'flattened_list'
else:
return 'tuple_list'
def time_trial_vary_start_dict(events_tuple_list, input_dict_start_size=1000, input_dict_downward_step=5,
number_of_adds=1, use_exponential_occurrences=True):
"""
:param events_tuple_list:
:param input_dict_start_size: =1000
:param input_dict_downward_step: =5
:param number_of_adds: =1
:param use_exponential_occurrences: =False
:return:
"""
adds_per_trial = number_of_adds
variable_name = 'start_dict_size'
variable_values = []
control_times = []
indexed_values_times = []
print('please wait for the down to reach zero')
input_dict_size = input_dict_start_size
while input_dict_size > 0:
print('adds {}'.format(adds_per_trial))
y_axis, control_time, indexed_values_time = one_time_trial(
adds_per_trial,
events_tuple_list,
input_dict_size=input_dict_size,
use_exponential_occurrences=use_exponential_occurrences
)
input_dict_size -= input_dict_downward_step
variable = y_axis[4]
print('results: variable: {:.2}, control: {:.3e}, IndexedValues: {:.3e}'.format(variable,
control_time,
indexed_values_time))
print('count down: {}\n'.format(input_dict_size))
variable_values.append(variable)
control_times.append(control_time)
indexed_values_times.append(indexed_values_time)
return variable_values, variable_name, control_times, indexed_values_times
def time_trial(generator, variable_name, adds_per_trial=1, automatic_adds_per_trial=False, input_dict_size=1,
number_of_data_pts=100):
"""
:param generator:
:param variable_name: 'list_length', 'event_occurrences_linear', 'event_occurrences', 'increasing_gaps'
:param adds_per_trial: =1
:param automatic_adds_per_trial: =False
:param input_dict_size: =1
:param number_of_data_pts: =100
:return: variable_values, variable_name, control_times, indexed_values_times
"""
tuple_list_length_times_add_times = 2200
variable_values = []
control_times = []
indexed_values_times = []
count = number_of_data_pts
print('please wait for the count-up/down to reach zero')
while count > 0:
try:
tuple_list_for_trial = next(generator)
except StopIteration:
break
if automatic_adds_per_trial:
adds_per_trial = int(max(1, tuple_list_length_times_add_times / len(tuple_list_for_trial)))
print('adds {}'.format(adds_per_trial))
y_axis, control_time, indexed_values_time = one_time_trial(adds_per_trial, tuple_list_for_trial,
input_dict_size=input_dict_size)
variable_order = ['list_length', 'event_occurrences_linear', 'event_occurrences', 'increasing_gaps']
index = variable_order.index(variable_name)
variable = y_axis[index]
print('results: variable: {:.2}, control: {:.3e}, IndexedValues: {:.3e}'.format(variable,
control_time,
indexed_values_time))
print('count down: {}\n'.format(count))
count -= 1
variable_values.append(variable)
control_times.append(control_time)
indexed_values_times.append(indexed_values_time)
return variable_values, variable_name, control_times, indexed_values_times
def plot_trial_with_ratio(variable_values, variable_name, control_times, iv_times, title='none', figure=1,
style='bo-', label='', base_line=False):
"""
:param variable_values:
:param variable_name: 'list_length', 'event_occurrences', 'event_occurrences_linear', 'increasing_gaps', 'dict_size'
:param control_times:
:param iv_times:
:param title:
:param figure:
:param style: ='bo-'
:param label: =''
:param base_line: =False
:return:
"""
plt.ion()
# use_figure = plt.figure(figure)
# use_figure.clf()
speed_ratios = []
equality_line = [1.0] * len(control_times)
for index, numerator in enumerate(control_times):
speed_ratios.append(numerator / iv_times[index])
plt.plot(variable_values, speed_ratios, style, label=label)
if base_line:
plt.plot(variable_values, equality_line, 'g-', label='equal speed')
plt.ylabel('speed of indexed values over speed of control')
x_labels = {'list_length': 'size of tuple list',
'event_occurrences': '10 ** exponent event occurrences',
'event_occurrences_linear': 'event occurrences',
'increasing_gaps': 'ratio of events range to non-zero events',
'start_dict_size': 'number of events in starting dictionary'}
plt.xlabel(x_labels[variable_name])
plt.legend()
plt.title(title)
plt.pause(0.01)
def plot_trial_two_lines(variable_values, variable_name, control_times, iv_times, title='none', figure=1):
"""
:param variable_values:
:param variable_name:'list_length', 'event_occurrences', 'increasing_gaps', 'dict_size'
:param control_times:
:param iv_times:
:param title:
:param figure:
:return:
"""
plt.ion()
use_figure = plt.figure(figure)
use_figure.clf()
plt.plot(variable_values, control_times, 'bo-', label='control')
plt.plot(variable_values, iv_times, 'r*-', label='IndexedValues')
plt.ylabel('time')
x_labels = {'list_length': 'size of tuple list',
'event_occurrences': '10 ** exponent event occurrences',
'increasing_gaps': 'ratio of events range to non-zero events',
'start_dict_size': 'number of events in starting dictionary'}
plt.xlabel(x_labels[variable_name])
plt.legend()
intersection, control_fit, iv_fit = get_poly_fit_and_intersection(variable_values, control_times, iv_times)
title += '\nintersection = {}'.format(intersection)
plt.title(title)
plt.plot(variable_values, control_fit, 'c-')
plt.plot(variable_values, iv_fit, 'c-')
plt.pause(0.01)
return intersection
def get_poly_fit_and_intersection(variable_values, control_times, iv_times):
control_slope, control_constant = np.polyfit(variable_values, control_times, 1)
iv_slope, iv_constant = np.polyfit(variable_values, iv_times, 1)
intersection = (control_constant - iv_constant) / (iv_slope - control_slope)
control_poly_fit_values = [(control_slope * x + control_constant) for x in variable_values]
iv_poly_fit_values = [(iv_slope * x + iv_constant) for x in variable_values]
return intersection, control_poly_fit_values, iv_poly_fit_values
def get_welcome():
"""return welcome_message.txt"""
try:
welcome_file_name = getcwd() + '\\' + 'welcome_message.txt'
welcome_file = open(welcome_file_name, 'r')
welcome_message = welcome_file.read()
except IOError:
welcome_message = 'took a guess where "welcome_' \
'message.txt" was, and I was wrong.'
return welcome_message
def get_int(question):
"""makes sure user input is an int. quit if "q" """
while True:
answer = input_py_2_and_3(question + '\n>>> ')
if answer == 'q':
raise SystemExit
try:
output = int(answer)
return output
except ValueError:
print('must be int OR "q" to quit')
continue
def get_answer(question, min_val, max_val):
question = '{} between {} and {}'.format(question, min_val, max_val)
raw_val = get_int(question)
return min(max_val, (max(min_val, raw_val)))
def get_plot_style_generator():
pt_style = cycle(['o', '<', '>', 'v', 's', 'p', '*',
'+', 'x', 'D', 'd'])
colors = cycle(['b', 'y', 'r', 'c', 'm', 'k', 'g'])
while True:
yield '{}{}-'.format(next(colors), next(pt_style))
def do_trials_vary_start_dict(add_list_len=10, occurrences_are_many=False, use_exponential_occurrences=True,
adds_list=(1, 2, 5)):
"""
:param add_list_len: =10
:param occurrences_are_many: =False
:param use_exponential_occurrences: =False
:param adds_list: =(1, 2, 5)
:return:
"""
style_generator = get_plot_style_generator()
if occurrences_are_many:
occurrences = 10
else:
occurrences = 1
list_for_vary_start_dict = get_generator('list_length', 0, add_list_len, event_occurrences=occurrences)
tuple_list_for_time_trial = next(list_for_vary_start_dict)
for add_variable in adds_list:
title = 'vary size of start dict. number of adds = {}\n'.format(add_variable)
title += 'input occurrences = {}. input list length = {}'.format(occurrences, add_list_len)
results = time_trial_vary_start_dict(tuple_list_for_time_trial, input_dict_start_size=1000,
input_dict_downward_step=10, number_of_adds=add_variable,
use_exponential_occurrences=use_exponential_occurrences)
do_base_line = False
if add_variable == adds_list[-1]:
do_base_line = True
plot_trial_with_ratio(*results, figure=1, title=title, label='add: {}'.format(add_variable),
style=next(style_generator), base_line=do_base_line)
def do_trials_vary_event_occurrences(add_list_len=10, start_dict_size=1, adds_list=(1, 2, 5), exponential_growth=True):
"""
:param add_list_len: =10
:param start_dict_size: =1
:param adds_list: =(1, 2, 5)
:param exponential_growth: =True
:return:
"""
style_generator = get_plot_style_generator()
for add_variable in adds_list:
if exponential_growth:
increment = 0.2
time_trial_variable = 'event_occurrences'
else:
increment = 1
time_trial_variable = 'event_occurrences_linear'
event_occurrences_generator = get_generator('event_occurrences', 0, add_list_len, growth_increment=increment,
exponential_increase=exponential_growth)
results = time_trial(event_occurrences_generator, time_trial_variable, adds_per_trial=add_variable,
input_dict_size=start_dict_size, number_of_data_pts=100)
title = 'increasing event occurrences.\n'
title += 'starting dict size={}. input list length = {}'.format(start_dict_size, add_list_len)
do_base_line = False
if add_variable == adds_list[-1]:
do_base_line = True
plot_trial_with_ratio(*results, figure=1, title=title, label='add: {}'.format(add_variable),
style=next(style_generator), base_line=do_base_line)
def do_trials_vary_list_length(start_dict_size=1, occurrences_are_many=False, adds_list=(1, 2, 5)):
"""
:param start_dict_size: =1
:param occurrences_are_many: =False
:param adds_list: =(1, 2, 4)
:return:
"""
style_generator = get_plot_style_generator()
if occurrences_are_many:
occurrences = 10
else:
occurrences = 1
for add_variable in adds_list:
list_length_generator = get_generator('list_length', 0, 2, event_occurrences=occurrences, len_increase_step=1)
results = time_trial(list_length_generator, 'list_length', adds_per_trial=add_variable,
input_dict_size=start_dict_size, number_of_data_pts=100)
title = 'increasing list length.\n'
title += 'starting dict size={}. input list occurrences = {}'.format(start_dict_size, occurrences)
do_base_line = False
if add_variable == adds_list[-1]:
do_base_line = True
plot_trial_with_ratio(*results, figure=1, title=title, label='add: {}'.format(add_variable),
style=next(style_generator), base_line=do_base_line)
def do_trials_vary_gaps_in_list(add_list_len=100, start_dict_size=1, occurrences_are_many=False, randomize_gaps=True,
adds_list=(1, 2, 5)):
"""
:param add_list_len: =100
:param start_dict_size: =1
:param occurrences_are_many: =False
:param randomize_gaps: =True
:param adds_list: =(1, 2, 5)
:return:
"""
style_generator = get_plot_style_generator()
if occurrences_are_many:
occurrences = 10
else:
occurrences = 1
gaps_per_iteration = max(1, add_list_len // 100)
for add_variable in adds_list:
increasing_gaps_generator = get_generator('increasing_gaps', 0, add_list_len, event_occurrences=occurrences,
gaps_per_iteration=gaps_per_iteration, randomize=randomize_gaps)
results = time_trial(increasing_gaps_generator, 'increasing_gaps', adds_per_trial=add_variable,
input_dict_size=start_dict_size, number_of_data_pts=100)
title = 'making many gaps in list.\n'
title += 'starting dict size={}. input list length: {}, occurrences: {}'.format(start_dict_size,
add_list_len,
occurrences)
do_base_line = False
if add_variable == adds_list[-1]:
do_base_line = True
plot_trial_with_ratio(*results, figure=1, title=title, label='add: {}'.format(add_variable),
style=next(style_generator), base_line=do_base_line)
def graphing_ui():
"""a UI to demonstrate add speeds"""
print(WELCOME_TXT)
"""
'list_length', 'event_occurrences', 'increasing_gaps', 'dict_size'
"""
plt_figure = 1
while True:
plt.figure(plt_figure)
plt_figure += 1
plt.ion()
variable_choice = get_answer('enter "1" for varying input events\' length\n' +
'enter "2" for varying input events\' # of occurrences\n' +
'enter "3" for varying input events\' gaps in values\n' +
'enter "4" for varying the size of the start dictionary',
1, 4)
variable_dict = {1: 'list_length',
2: 'event_occurrences',
3: 'increasing_gaps',
4: 'dict_size'}
action_dict = {1: do_trials_vary_list_length,
2: do_trials_vary_event_occurrences,
3: do_trials_vary_gaps_in_list,
4: do_trials_vary_start_dict}
variable = variable_dict[variable_choice]
action = action_dict[variable_choice]
print('chose {}'.format(variable))
input_variables = get_kwargs(variable)
action(**input_variables)
plt.pause(0.1)
def get_kwargs(request):
default_adds_list = [1, 2, 3, 4, 5]
keys = ['start_dict_size', 'add_list_len', 'occurrences_are_many', 'exponential_growth']
questions = ['what size for starting dictionary?',
'how large a list to add?',
'should the list have many occurrences? 1=True, 0=False',
'should the occurrences increase exponentially? 1=True, 0=False'
]
min_max = [(1, 2000), (2, 500), (0, 1), (0, 1), (0, 1)]
if request == 'dict_size':
min_max[1] = (2, 100)
request_and_indices = {'list_length': (0, 2),
'event_occurrences': (0, 1, 3),
'increasing_gaps': (0, 1, 2),
'dict_size': (1, 2)}
output_kwargs = {}
for index in request_and_indices[request]:
output_kwargs[keys[index]] = get_answer(questions[index], *min_max[index])
if min_max[index] == (0, 1):
output_kwargs[keys[index]] = bool(output_kwargs[keys[index]])
if request != 'list_length':
adds_list = get_adds_list(output_kwargs)
output_kwargs['adds_list'] = adds_list
else:
output_kwargs['adds_list'] = default_adds_list
return output_kwargs
def get_adds_list(dictionary):
start_size = dictionary.get('start_dict_size', 1000)
add_list_size = dictionary['add_list_len']
complete_add_list = [1, 2, 3, 4, 5, 10, 50, 100, 500]
max_adds = 5
if start_size <= 100:
max_list_size_for_add = [(3, 500), (6, 100), (9, 50), (20, 10), (10000, 5)]
for pair in max_list_size_for_add:
if add_list_size <= pair[0]:
max_adds = pair[1]
break
else:
max_list_size_for_add = [(4, 50), (9, 10), (10000, 5)]
for pair in max_list_size_for_add:
if add_list_size <= pair[0]:
max_adds = pair[1]
break
adds_list_end = complete_add_list.index(max_adds)
return complete_add_list[: adds_list_end + 1]
def get_tuple_list(size, many_occurrences=False, step=1):
if many_occurrences:
occur = 10
else:
occur = 1
return [(event, occur) for event in range(0, size, step)]
def get_indexed_advantage_ratio(start_dict_size, adds, tuple_list_sizes, many_occurrences):
events_tuples = get_tuple_list(tuple_list_sizes, many_occurrences)
input_dict = get_input_dict(start_dict_size, True)
control_time, indexed_values_time = get_control_and_indexed_values_times(adds, events_tuples, input_dict)
return control_time / indexed_values_time
def get_data_list(many_occurrences):
titles = ('ADDS', 'DICT SIZE', 'LIST SIZE', 'OCCUR MANY', 'RESULT')
adds = [1, 2, 3, 4, 5, 10, 20, 50, 100, 500, 1000, 2000]
start_dict_sizes = [1, 10, 50, 100, 200, 500, 1000, 2000, 5000]
tuple_list_sizes = [2, 3, 4, 6, 8, 10, 20, 50, 100]
all_data = [titles]
for add_time in adds:
print(add_time)
for start_size in start_dict_sizes:
for tuple_size in tuple_list_sizes:
if add_time * tuple_size <= 4000:
datum = get_indexed_advantage_ratio(start_size, add_time, tuple_size, many_occurrences)
data_line = (float(add_time), float(start_size), float(tuple_size), float(many_occurrences), datum)
all_data.append(data_line)
return all_data
def data_grouper(data_list, index_priority=(0, 1, 2, 3, 4)):
new_list = []
for data in data_list:
new_data = []
for index in index_priority:
new_data.append(data[index])
new_list.append(tuple(new_data))
new_labels = new_list[0]
the_rest = sorted(new_list[1:])
return [new_labels] + the_rest
def get_result_str(data_list):
labels = data_list[0]
result_index = labels.index('RESULT')
bool_index = labels.index('OCCUR MANY')
star_the_result = 1.0
number_of_labels = len(labels)
middle_just = '10'
template = '\n' + ('{:^' + middle_just + '}|') * number_of_labels
template.rstrip('|')
table_descriptor = template.format(*labels)
line_len = len(table_descriptor)
table_descriptor = add_sep_line(table_descriptor, line_len, '*')
table_descriptor = '\n' + line_len * '=' + table_descriptor
first_element = -1
second_element = -1
output_str = ''
for line in data_list[1:]:
new_first_element = int(line[0])
new_second_element = int(line[1])
if new_first_element != first_element:
output_str += table_descriptor
if new_second_element != second_element:
output_str = add_sep_line(output_str, line_len, '-')
first_element = new_first_element
second_element = new_second_element
line_strings = []
for index, element in enumerate(line):
if index == result_index:
to_add = '{:.3f}'.format(element)
elif index == bool_index:
to_add = str(bool(element))
else:
to_add = str(int(element))
line_strings.append(to_add)
output_str += template.format(*line_strings)
result = float(line[result_index])
if result > star_the_result:
output_str += ' *** '
return output_str
def add_sep_line(input_str, line_length, separator):
return input_str + '\n' + line_length * separator
def save_data_pts(data_flat, data_bumpy):
flat_save = np.array(data_flat)
np.save('save_flat_data', flat_save)
bumpy_save = np.array(data_bumpy)
np.save('save_bumpy_data', bumpy_save)
def load_data_pts(full_file_name):
np_array = np.load(full_file_name)
output = []
for data_tuple in np_array.tolist():
try:
output.append(tuple([float(number) for number in data_tuple]))
except ValueError:
output.append(tuple(data_tuple))
return output
def get_saved_data():
data_points_flat = load_data_pts('save_flat_data.npy')
data_points_bumpy = load_data_pts('save_bumpy_data.npy')
return data_points_flat, data_points_bumpy
def data_points_ui():
try:
get_new_data = input_py_2_and_3('generate new data pts (will take some minutes)? type "y" for yes.\n>>> ')
if get_new_data == 'y':
raise IOError
data_points_flat, data_points_bumpy = get_saved_data()
except IOError:
print('generating data points. this will take a few minutes')
data_points_flat = get_data_list(False)
data_points_bumpy = get_data_list(True)
save_data_pts(data_points_flat, data_points_bumpy)
labels_dict = dict(enumerate(data_points_flat[0]))
intro = """
here are the values whose order you may change
{}
at the prompt put in a new 5-digit string showing how you want the data ordered
so "01234" will order the data by ('ADDS', 'DICT SIZE', 'LIST SIZE', 'OCCUR MANY', 'RESULT')
"21034" will order the data by ('LIST SIZE', 'DICT SIZE', 'ADDS', 'OCCUR MANY', 'RESULT')
when prompted, enter the base name for the file.
"test" would create 3 files.
"test_flat.txt", "test_many.txt", "test_combined.txt". they will be text files showing the data
grouped accordingly. flat show adding events that occurred once and many shows events that occurred 10 times.
the result column shows how many times faster the index_values method is and so any time
indexed values is faster, it is starred.
"""
print(intro.format(str(labels_dict).replace(',', '\n')))
while True:
print(str(labels_dict).replace(',', '\n'))
new_order = input_py_2_and_3('new order or "q" quits >>> ')
if new_order == 'q':
break
change_list = []
for digit in new_order:
change_list.append(int(digit))
result_to_print_flat = data_grouper(data_points_flat, change_list)
result_to_print_bumpy = data_grouper(data_points_bumpy, change_list)
flat = get_result_str(result_to_print_flat)
many = get_result_str(result_to_print_bumpy)
name = input_py_2_and_3('file base name >>> ')
with open(name + '_flat.txt', 'w') as file:
file.write(flat)
with open(name + '_many.txt', 'w') as file:
file.write(many)
with open(name + '_combined.txt', 'w') as file:
file.write(get_side_by_side_data(flat, many))
def get_side_by_side_data(left_answer, right_answer):
left_lines = left_answer.split('\n')
right_lines = right_answer.split('\n')
left_just_line_len = 64
joined_lines = []
for index, line in enumerate(left_lines):
new_line = '{:<{}}{}'.format(line, left_just_line_len, right_lines[index])
joined_lines.append(new_line)
joined_answer = '\n'.join(joined_lines)
return joined_answer
if __name__ == '__main__':
graphing_ui()
# data_points_ui()
| 39.538366 | 120 | 0.644348 | 0 | 0 | 3,188 | 0.09979 | 0 | 0 | 0 | 0 | 7,179 | 0.224716 |
12613531f2e6b78eb8019166a8391274bea54aaa | 14,814 | py | Python | orangecontrib/wonder/widgets/wonder/ow_lorentz_polarization.py | WONDER-project/OASYS1-WONDER | cf6e3620f95c0b14c5c33d13161f615f2ac23b14 | [
"Unlicense"
] | null | null | null | orangecontrib/wonder/widgets/wonder/ow_lorentz_polarization.py | WONDER-project/OASYS1-WONDER | cf6e3620f95c0b14c5c33d13161f615f2ac23b14 | [
"Unlicense"
] | null | null | null | orangecontrib/wonder/widgets/wonder/ow_lorentz_polarization.py | WONDER-project/OASYS1-WONDER | cf6e3620f95c0b14c5c33d13161f615f2ac23b14 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# #########################################################################
# Copyright (c) 2020, UChicago Argonne, LLC. All rights reserved. #
# #
# Copyright 2020. UChicago Argonne, LLC. This software was produced #
# under U.S. Government contract DE-AC02-06CH11357 for Argonne National #
# Laboratory (ANL), which is operated by UChicago Argonne, LLC for the #
# U.S. Department of Energy. The U.S. Government has rights to use, #
# reproduce, and distribute this software. NEITHER THE GOVERNMENT NOR #
# UChicago Argonne, LLC MAKES ANY WARRANTY, EXPRESS OR IMPLIED, OR #
# ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE. If software is #
# modified to produce derivative works, such modified software should #
# be clearly marked, so as not to confuse it with the version available #
# from ANL. #
# #
# Additionally, redistribution and use in source and binary forms, with #
# or without modification, are permitted provided that the following #
# conditions are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# #
# * Redistributions in binary form must reproduce the above copyright #
# notice, this list of conditions and the following disclaimer in #
# the documentation and/or other materials provided with the #
# distribution. #
# #
# * Neither the name of UChicago Argonne, LLC, Argonne National #
# Laboratory, ANL, the U.S. Government, nor the names of its #
# contributors may be used to endorse or promote products derived #
# from this software without specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY UChicago Argonne, LLC AND CONTRIBUTORS #
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UChicago #
# Argonne, LLC OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, #
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, #
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; #
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER #
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT #
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN #
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
# #########################################################################
import sys
from orangewidget.settings import Setting
from orangewidget import gui as orangegui
from orangecontrib.wonder.widgets.gui.ow_generic_parameter_widget import OWGenericInstrumentalDiffractionPatternParametersWidget, ParameterBox
from orangecontrib.wonder.util.gui_utility import gui
from oasys.widgets import congruence
from orangecontrib.wonder.fit.parameters.instrument.polarization_parameters import Beampath, LorentzFormula, PolarizationParameters
class OWLorentzPolarization(OWGenericInstrumentalDiffractionPatternParametersWidget):
name = "Lorentz-Polarization Factors"
description = "Define Lorentz-Polarization Factor"
icon = "icons/lorentz_polarization.png"
priority = 9
use_lorentz_factor = Setting([1])
lorentz_formula = Setting([LorentzFormula.Shkl_Shkl])
use_polarization_factor = Setting([0])
degree_of_polarization = Setting([0.0])
beampath = Setting([Beampath.PRIMARY])
use_twotheta_mono = Setting([1])
twotheta_mono = Setting([28.443])
def __init__(self):
super().__init__()
def get_max_height(self):
return 500
def get_parameter_name(self):
return "Lorentz-Polarization"
def get_current_dimension(self):
return len(self.use_lorentz_factor)
def get_parameter_box_instance(self, parameter_tab, index):
return PolarizationParametersBox(widget=self,
parent=parameter_tab,
index=index,
use_lorentz_factor=self.use_lorentz_factor[index],
lorentz_formula=self.lorentz_formula[index],
use_polarization_factor=self.use_polarization_factor[index],
degree_of_polarization=self.degree_of_polarization[index],
beampath=self.beampath[index],
use_twotheta_mono=self.use_twotheta_mono[index],
twotheta_mono=self.twotheta_mono[index])
def get_empty_parameter_box_instance(self, parameter_tab, index):
return PolarizationParametersBox(widget=self, parent=parameter_tab, index=index)
def set_parameter_data(self):
self.fit_global_parameters.set_instrumental_profile_parameters([self.get_parameter_box(index).get_lorentz_polarization() for index in range(self.get_current_dimension())])
def get_parameter_array(self):
return self.fit_global_parameters.get_instrumental_profile_parameters(PolarizationParameters.__name__)
def get_parameter_item(self, diffraction_pattern_index):
return self.fit_global_parameters.get_instrumental_profile_parameters_item(PolarizationParameters.__name__, diffraction_pattern_index)
def get_instrumental_parameter_array(self, instrumental_parameters):
return instrumental_parameters.get_instrumental_profile_parameters(PolarizationParameters.__name__)
def get_instrumental_parameter_item(self, instrumental_parameters, diffraction_pattern_index):
return instrumental_parameters.get_instrumental_profile_parameters_item(PolarizationParameters.__name__, diffraction_pattern_index)
def dumpSettings(self):
self.dump_use_lorentz_factor()
self.dump_lorentz_formula()
self.dump_use_polarization_factor()
self.dump_degree_of_polarization()
self.dump_beampath()
self.dump_use_twotheta_mono()
self.dump_twotheta_mono()
def dump_use_lorentz_factor(self): self.dump_variable("use_lorentz_factor")
def dump_lorentz_formula(self): self.dump_variable("lorentz_formula")
def dump_use_polarization_factor(self): self.dump_variable("use_polarization_factor")
def dump_degree_of_polarization(self): self.dump_variable("degree_of_polarization")
def dump_beampath(self): self.dump_variable("beampath")
def dump_use_twotheta_mono(self): self.dump_variable("use_twotheta_mono")
def dump_twotheta_mono(self): self.dump_variable("twotheta_mono")
class PolarizationParametersBox(ParameterBox):
def __init__(self,
widget=None,
parent=None,
index=0,
use_lorentz_factor=1,
lorentz_formula=LorentzFormula.Shkl_Shkl,
use_polarization_factor=0,
degree_of_polarization=0.0,
beampath=Beampath.PRIMARY,
use_twotheta_mono=1,
twotheta_mono=28.443):
super(PolarizationParametersBox, self).__init__(widget=widget,
parent=parent,
index=index,
use_lorentz_factor=use_lorentz_factor,
lorentz_formula = lorentz_formula,
use_polarization_factor = use_polarization_factor,
degree_of_polarization = degree_of_polarization,
beampath = beampath,
use_twotheta_mono = use_twotheta_mono,
twotheta_mono = twotheta_mono)
def init_fields(self, **kwargs):
self.use_lorentz_factor = kwargs["use_lorentz_factor"]
self.lorentz_formula = kwargs["lorentz_formula"]
self.use_polarization_factor = kwargs["use_polarization_factor"]
self.degree_of_polarization = kwargs["degree_of_polarization"]
self.beampath = kwargs["beampath"]
self.use_twotheta_mono = kwargs["use_twotheta_mono"]
self.twotheta_mono = kwargs["twotheta_mono"]
def init_gui(self, container):
orangegui.comboBox(container, self, "use_lorentz_factor", label="Add Lorentz Factor", items=["No", "Yes"], labelWidth=300, orientation="horizontal", callback=self.set_LorentzFactor)
self.lorentz_box = gui.widgetBox(container, "", orientation="vertical", width=self.CONTROL_AREA_WIDTH - 20, height=30)
self.lorentz_box_empty = gui.widgetBox(container, "", orientation="vertical", width=self.CONTROL_AREA_WIDTH - 20, height=30)
orangegui.comboBox(self.lorentz_box, self, "lorentz_formula", label="Formula", items=LorentzFormula.tuple(), labelWidth=300, orientation="horizontal", callback=self.widget.dump_lorentz_formula)
self.set_LorentzFactor()
orangegui.separator(container)
orangegui.comboBox(container, self, "use_polarization_factor", label="Add Polarization Factor", items=["No", "Yes"], labelWidth=300,
orientation="horizontal", callback=self.set_Polarization)
self.polarization_box = gui.widgetBox(container, "", orientation="vertical", width=self.CONTROL_AREA_WIDTH - 20, height=200)
self.polarization_box_empty = gui.widgetBox(container, "", orientation="vertical", width=self.CONTROL_AREA_WIDTH - 20, height=200)
gui.lineEdit(self.polarization_box, self, "degree_of_polarization", "Deg. Pol. (0\u2264Q\u22641)", labelWidth=300, valueType=float, callback=self.widget.dump_degree_of_polarization)
orangegui.comboBox(self.polarization_box, self, "use_twotheta_mono", label="Use Monochromator", items=["No", "Yes"], labelWidth=300,
orientation="horizontal", callback=self.set_Monochromator)
self.monochromator_box = gui.widgetBox(self.polarization_box, "", orientation="vertical", width=self.CONTROL_AREA_WIDTH - 20, height=95)
self.monochromator_box_empty = gui.widgetBox(self.polarization_box, "", orientation="vertical", width=self.CONTROL_AREA_WIDTH - 20, height=95)
orangegui.comboBox(self.monochromator_box, self, "beampath", label="Beampath", items=Beampath.tuple(), labelWidth=300,
orientation="horizontal", callback=self.widget.dump_beampath)
gui.lineEdit(self.monochromator_box, self, "twotheta_mono", "2\u03B8 Monochromator [deg]", labelWidth=300, valueType=float, callback=self.widget.dump_twotheta_mono)
self.set_Polarization()
def set_LorentzFactor(self):
self.lorentz_box.setVisible(self.use_lorentz_factor==1)
self.lorentz_box_empty.setVisible(self.use_lorentz_factor==0)
if not self.is_on_init: self.widget.dump_use_lorentz_factor()
def set_Monochromator(self):
self.monochromator_box.setVisible(self.use_twotheta_mono==1)
self.monochromator_box_empty.setVisible(self.use_twotheta_mono==0)
if not self.is_on_init: self.widget.dump_use_twotheta_mono()
def set_Polarization(self):
self.polarization_box.setVisible(self.use_polarization_factor==1)
self.polarization_box_empty.setVisible(self.use_polarization_factor==0)
if self.use_polarization_factor==1: self.set_Monochromator()
if not self.is_on_init: self.widget.dump_use_polarization_factor()
def get_basic_parameter_prefix(self):
return PolarizationParameters.get_parameters_prefix()
def get_lorentz_polarization(self):
if self.use_polarization_factor == 1:
congruence.checkPositiveNumber(self.degree_of_polarization, "Deg. Pol.")
congruence.checkLessOrEqualThan(self.degree_of_polarization, 1.0, "Deg. Pol.", "1.0")
if self.use_polarization_factor == 1 and self.use_twotheta_mono==1:
congruence.checkStrictlyPositiveAngle(self.twotheta_mono, "2\u03B8 Monochromator")
return PolarizationParameters(use_lorentz_factor=self.use_lorentz_factor == 1,
lorentz_formula=self.lorentz_formula,
use_polarization_factor=self.use_polarization_factor,
twotheta_mono=None if (self.use_polarization_factor == 0 or self.use_twotheta_mono == 0) else self.twotheta_mono,
beampath=self.beampath,
degree_of_polarization=self.degree_of_polarization)
def set_data(self, polarization_parameters):
self.use_lorentz_factor = 1 if polarization_parameters.use_lorentz_factor else self.use_lorentz_factor
self.lorentz_formula = polarization_parameters.lorentz_formula
self.use_polarization_factor = 1 if polarization_parameters.use_polarization_factor else self.use_polarization_factor
if self.use_polarization_factor == 1:
self.degree_of_polarization = polarization_parameters.degree_of_polarization
twotheta_mono = polarization_parameters.twotheta_mono
if not twotheta_mono is None:
self.use_twotheta_mono = 1
self.twotheta_mono = twotheta_mono
self.beampath = polarization_parameters.beampath
else:
self.use_twotheta_mono = 0
self.set_LorentzFactor()
self.set_Polarization()
from PyQt5.QtWidgets import QApplication
if __name__ == "__main__":
a = QApplication(sys.argv)
ow = OWLorentzPolarization()
ow.show()
a.exec_()
ow.saveSettings()
| 56.976923 | 201 | 0.64115 | 10,771 | 0.727082 | 0 | 0 | 0 | 0 | 0 | 0 | 4,214 | 0.284461 |
126232b6ea1b59bc9e926fc7b1a66519b1a1e9b1 | 2,891 | py | Python | tests/test_local_tile_server.py | FlorianPignol/telluric | 285c7e195b2da630b4c76f2552465a424bcdaeb2 | [
"MIT"
] | null | null | null | tests/test_local_tile_server.py | FlorianPignol/telluric | 285c7e195b2da630b4c76f2552465a424bcdaeb2 | [
"MIT"
] | null | null | null | tests/test_local_tile_server.py | FlorianPignol/telluric | 285c7e195b2da630b4c76f2552465a424bcdaeb2 | [
"MIT"
] | null | null | null | from os import path
from unittest import mock
from common_for_tests import make_test_raster
from tornado.testing import gen_test, AsyncHTTPTestCase
from tornado.concurrent import Future
import telluric as tl
from telluric.util.local_tile_server import TileServer, make_app, TileServerHandler
tiles = [(131072, 131072, 18), (65536, 65536, 17), (32768, 32768, 16), (16384, 16384, 15)]
rasters = [
make_test_raster(i, band_names=["band%i" % i], height=300, width=400)
for i in range(3)
]
class TestFCLocalTileServer(AsyncHTTPTestCase):
def get_app(self):
self.fc = tl.FeatureCollection([tl.GeoFeature.from_raster(r, {}) for r in rasters])
TileServer.add_object(self.fc, self.fc.envelope)
return make_app(TileServer.objects)
def test_server_is_alive(self):
response = self.fetch('/ok')
self.assertEqual(response.code, 200)
self.assertEqual(response.body, b"i'm alive")
@mock.patch.object(TileServerHandler, '_get_raster_png_tile')
@mock.patch.object(TileServerHandler, '_merge_rasters')
def test_raster_collection_merges_data(self, mock_merge, mock_get_tile):
future_1 = Future()
future_1.set_result(rasters[1])
mock_merge.return_value = future_1
future_2 = Future()
future_2.set_result(rasters[2])
mock_get_tile.return_value = future_2
for tile in tiles:
uri = "/%i/%i/%i/%i.png" % (id(self.fc), tile[0], tile[1], tile[2])
response = self.fetch(uri)
self.assertEqual(response.code, 200)
self.assertNotEqual(response.body, b"")
self.assertEqual(mock_get_tile.call_count, 3)
self.assertEqual(mock_merge.call_count, 1)
self.assertEqual(mock_merge.call_args[0][1], tile[2])
for r in mock_merge.call_args[0][0]:
self.assertIsInstance(r, tl.GeoRaster2)
self.assertEqual(len(mock_merge.call_args[0][0]), 3)
mock_get_tile.reset_mock()
mock_merge.reset_mock()
class TestRasterLocalTileServer(AsyncHTTPTestCase):
def get_app(self):
self.raster = rasters[1]
TileServer.add_object(self.raster, self.raster.footprint())
return make_app(TileServer.objects)
def test_server_is_alive(self):
response = self.fetch('/ok')
self.assertEqual(response.code, 200)
self.assertEqual(response.body, b"i'm alive")
def test_raster_collection_merges_data(self):
for tile in tiles:
uri = "/%i/%i/%i/%i.png" % (id(self.raster), tile[0], tile[1], tile[2])
response = self.fetch(uri)
self.assertEqual(response.code, 200)
self.assertNotEqual(response.body, b"")
raster = tl.GeoRaster2.from_bytes(response.body, self.raster.affine, self.raster.crs)
self.assertEqual(raster.shape, (3, 256, 256))
| 38.546667 | 97 | 0.662746 | 2,389 | 0.826358 | 0 | 0 | 1,094 | 0.378416 | 0 | 0 | 122 | 0.0422 |
1262334f0b76e271530a07f67a720a33c98f152f | 208 | py | Python | utilities/error.py | pskanade/stretch | 5320769f73a1f49e91cdaaaede3570550a236d9f | [
"MIT"
] | null | null | null | utilities/error.py | pskanade/stretch | 5320769f73a1f49e91cdaaaede3570550a236d9f | [
"MIT"
] | 2 | 2018-08-29T18:39:52.000Z | 2018-08-29T19:32:35.000Z | utilities/error.py | pskanade/stretch | 5320769f73a1f49e91cdaaaede3570550a236d9f | [
"MIT"
] | null | null | null | class Error():
def __init__(self):
print("An error has occured !")
class TypeError(Error):
def __init__(self):
print("This is Type Error\nThere is a type mismatch.. ! Please fix it.") | 29.714286 | 80 | 0.639423 | 206 | 0.990385 | 0 | 0 | 0 | 0 | 0 | 0 | 89 | 0.427885 |
12630eab0d2633dd229a8455f46121e72a20a96c | 437 | py | Python | skypy/galaxies/__init__.py | itrharrison/skypy-itrharrison | cea1f02d1b2cd3b689266d7ae9bca1a4cfe986a2 | [
"BSD-3-Clause"
] | 88 | 2020-04-06T15:48:17.000Z | 2022-02-16T12:01:54.000Z | skypy/galaxies/__init__.py | itrharrison/skypy-itrharrison | cea1f02d1b2cd3b689266d7ae9bca1a4cfe986a2 | [
"BSD-3-Clause"
] | 332 | 2020-04-04T07:30:08.000Z | 2022-03-30T14:49:08.000Z | skypy/galaxies/__init__.py | itrharrison/skypy-itrharrison | cea1f02d1b2cd3b689266d7ae9bca1a4cfe986a2 | [
"BSD-3-Clause"
] | 41 | 2020-04-03T13:50:43.000Z | 2022-03-24T16:10:03.000Z | """
This module contains methods that model the intrinsic properties of galaxy
populations.
"""
__all__ = [
'schechter_lf',
]
from . import luminosity # noqa F401,F403
from . import morphology # noqa F401,F403
from . import redshift # noqa F401,F403
from . import spectrum # noqa F401,F403
from . import stellar_mass # noqa F401,F403
from ._schechter import schechter_lf # noqa
from ._schechter import schechter_smf # noqa
| 24.277778 | 74 | 0.743707 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 201 | 0.459954 |
1263a63dc231b8ecbb32bb0b79ff0d5017758d64 | 84 | py | Python | tests/__init__.py | s-leroux/sql-moins | beb65300e4602a0d1dcaccf534df39c071060d40 | [
"Apache-2.0"
] | null | null | null | tests/__init__.py | s-leroux/sql-moins | beb65300e4602a0d1dcaccf534df39c071060d40 | [
"Apache-2.0"
] | null | null | null | tests/__init__.py | s-leroux/sql-moins | beb65300e4602a0d1dcaccf534df39c071060d40 | [
"Apache-2.0"
] | null | null | null | from tests.parser import *
from tests.formatter import *
from tests.utils import *
| 16.8 | 29 | 0.77381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
126834642203c16a6f094647812a7f04fd742d0c | 853 | py | Python | appshell/endpoints.py | adh/appshell | 94901df8045336e3217eb2fd5eae77cb7c639340 | [
"MIT"
] | 3 | 2015-08-21T22:22:52.000Z | 2018-07-14T03:32:51.000Z | appshell/endpoints.py | adh/appshell | 94901df8045336e3217eb2fd5eae77cb7c639340 | [
"MIT"
] | null | null | null | appshell/endpoints.py | adh/appshell | 94901df8045336e3217eb2fd5eae77cb7c639340 | [
"MIT"
] | 1 | 2015-08-21T22:22:57.000Z | 2015-08-21T22:22:57.000Z | from appshell.base import View
from appshell.templates import confirmation, message
from flask import request, flash, redirect
from flask_babelex import Babel, Domain
mydomain = Domain('appshell')
_ = mydomain.gettext
lazy_gettext = mydomain.lazy_gettext
class ConfirmationEndpoint(View):
methods = ("GET", "POST")
redirect_to = None
def prepare(self):
pass
def dispatch_request(self, **args):
self.prepare(**args)
if request.method == "POST":
self.do_it(**args)
return self.done()
else:
return confirmation(self.confirmation_message)
def done(self):
if self.flash_message:
flash(*self.flash_message)
if self.redirect_to:
return redirect(self.redirect_to)
return message(_("Done"))
| 25.848485 | 58 | 0.629543 | 594 | 0.696366 | 0 | 0 | 0 | 0 | 0 | 0 | 33 | 0.038687 |
12684e56d013768e4b5f7ed73f9dc1448a2cbf04 | 2,679 | py | Python | flask_cc_api/utils/requests_utils.py | suAdminWen/cc-api | a00d9b82583fae57a3cd92ec0478d434f141f172 | [
"MIT"
] | 6 | 2019-03-04T03:08:07.000Z | 2019-07-16T13:43:12.000Z | flask_cc_api/utils/requests_utils.py | suAdminWen/cc-api | a00d9b82583fae57a3cd92ec0478d434f141f172 | [
"MIT"
] | 1 | 2021-11-22T15:02:09.000Z | 2021-11-22T15:02:09.000Z | flask_cc_api/utils/requests_utils.py | suAdminWen/cc-api | a00d9b82583fae57a3cd92ec0478d434f141f172 | [
"MIT"
] | 1 | 2019-07-01T01:01:27.000Z | 2019-07-01T01:01:27.000Z | from flask import g, request
from flask_restful import reqparse
from werkzeug import datastructures
from ..exceptions.system_error import SystemError
from ..exceptions.system_exception import SystemException
from ..exceptions.service_error import ServiceError
from ..exceptions.service_exception import ServiceException
def _get_request():
if 'req' not in g:
g.req = reqparse.RequestParser()
return g.req
def get_argument(
key, *, default=None, type=str, location=None,
help=None, required=False, action='store'
):
'''
:param default: The value produced if the argument is absent from the
request.
:param type: The type to which the request argument should be
converted. If a type raises an exception, the message in the
error will be returned in the response. Defaults to :class:`unicode`
in python2 and :class:`str` in python3.
:param action: The basic type of action to be taken when this argument
is encountered in the request. Valid options are "store" and "append".
:param location: The attributes of the :class:`flask.Request` object
to source the arguments from (ex: headers, args, etc.), can be an
iterator. The last item listed takes precedence in the result set.
:param help: A brief description of the argument, returned in the
response when the argument is invalid. May optionally contain
an "{error_msg}" interpolation token, which will be replaced with
the text of the error raised by the type converter.
'''
cur_type = type # 保存参数初始时的状态
type = str if type == int else cur_type # 当类型为int时,先转换成str的获取形式
kwargs = dict(default=default, type=type, action=action)
if location:
kwargs['location'] = location
if type == 'file':
kwargs['type'] = datastructures.FileStorage
kwargs['location'] = location if location else 'files'
parser = _get_request()
parser.add_argument(key, **kwargs)
args = parser.parse_args()
if cur_type == int and args[key]: # 将str的结果转换成int
try:
args[key] = cur_type(args[key])
type = cur_type
except:
raise ServiceException(ServiceError.INVALID_VALUE, key)
if required and action == 'store' and \
(args[key] is None or type == str and args[key].strip() == '' and key != '_id'):
raise SystemException(SystemError.MISSING_REQUIRED_PARAMETER, help if help else key)
return args[key]
def get_request_ip():
if request.remote_addr == '127.0.0.1':
return '127.0.0.1'
ip_list = request.headers['X-Forwarded-For']
ip = ip_list.split(',')[0]
return ip
| 37.208333 | 92 | 0.679731 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,233 | 0.449508 |
126906c97bbb7a462f9c0fa424b98a75ba2bd8b2 | 3,422 | py | Python | grouper_lib/parent.py | Saevon/Recipes | ab8ca9b5244805d545da2dd1d80d249f1ec6057d | [
"MIT"
] | null | null | null | grouper_lib/parent.py | Saevon/Recipes | ab8ca9b5244805d545da2dd1d80d249f1ec6057d | [
"MIT"
] | null | null | null | grouper_lib/parent.py | Saevon/Recipes | ab8ca9b5244805d545da2dd1d80d249f1ec6057d | [
"MIT"
] | null | null | null | import itertools
class ParentFinder(object):
'''
Finds which parent an item should go under
'''
def __init__(self):
self.__parents = {}
def hash(self, item):
if item.prefix:
return item.prefix
else:
return item.group_name
def add(self, parent):
# Make sure we can find unsorted items
# (regardless whether the group allows it, we need to claffisy it to reject them)
if parent.prefix is not None:
hash_string = parent.prefix
self.__parents[hash_string.lower()] = parent
# Now we add just the groups by themselves
for group in parent.keys():
hash_string = None
if parent.prefix:
hash_string = parent.prefix + '~' + group
else:
hash_string = group
self.__parents[hash_string.lower()] = parent
def find(self, item):
hash_string = self.hash(item)
return self.__parents.get(hash_string.lower(), None)
class ParentGroup(object):
def __init__(self, name, folder, keys, synonyms=None, hide=False, prefix=None, allow_unsorted=False):
'''
allow_unsorted: Allows arbitrary subgroups??
'''
self.name = name
self.folder = folder
self._keys = keys
if synonyms is None:
synonyms = {}
self.synonyms = {key.lower(): val for key,val in synonyms.items()}
self.hide = hide
self.prefix = prefix
self.allow_unsorted = allow_unsorted
if self.allow_unsorted:
self._keys.append('Misc')
def keys(self):
for key in self._keys:
yield key.lower()
for key in self.synonym_keys():
yield key.lower()
def synonym_keys(self):
for key in self.synonyms.keys():
yield key.lower()
def clean_group(self, group):
if group is None:
return self.name
if group.lower() in self.synonym_keys():
return self.synonyms.get(group.lower())
return group
def __str__(self):
return self.name
def check_validity(self, file):
has_valid_group_name = file.group_name.lower() in self.keys()
has_prefix = file.prefix is not None
if not has_valid_group_name and not has_prefix:
# if neither the prefix nor group are valid, then this MUST be an unsorted item
# Unsorted files are completely disallowed
if not self.allow_unsorted:
return "Unsorted item"
# Unsorted items must use the parent prefix as the group name
# Unless they're already in the appropriate folder
if file.group_name.lower() != self.prefix.lower():
return "Unsorted item whose prefix isn't the Group Name"
elif not has_valid_group_name:
if self.allow_unsorted:
# FIXME: don't edit here
file.group_name = 'Misc'
else:
# Since this isn't one of those unsorted ones, it must be valid...
return "Invalid Subgroup: {}".format(file.group_name)
elif has_prefix and self.prefix.lower() != file.prefix.lower():
# Items with a prefix MUST have the right one
return "Invalid Parent Group (prefix): {}".format(file.prefix)
return None
| 29.756522 | 105 | 0.585622 | 3,397 | 0.992694 | 240 | 0.070134 | 0 | 0 | 0 | 0 | 794 | 0.232028 |
126a9e516f4b1e45a50ac7e8798792874a6e0626 | 2,385 | py | Python | tests/test_memory.py | Lewuathe/algernon | 23d3aef69d701ddd33cd90451ec8738032396430 | [
"MIT"
] | null | null | null | tests/test_memory.py | Lewuathe/algernon | 23d3aef69d701ddd33cd90451ec8738032396430 | [
"MIT"
] | null | null | null | tests/test_memory.py | Lewuathe/algernon | 23d3aef69d701ddd33cd90451ec8738032396430 | [
"MIT"
] | null | null | null | from algernon.memory import Memory
import pytest
import numpy as np
from keras.models import Sequential
from keras.layers.core import Dense
from keras.optimizers import sgd
class MockModel:
def __init__(self, output_dims, input_dims):
self.w = np.random.random(size=(output_dims, input_dims))
def predict(self, X):
return np.dot(X, self.w.T)
def predict_proba(self, X):
return self.predict(X)
class TestMemory:
TEST_OBSERVATION_SHAPE = (4, 2)
TEST_ACTION_DIMS = 3
TEST_GAMMA = 0.3
TEST_MAX_MEMORY = 1000
def test_init_values(self):
m = Memory(TestMemory.TEST_OBSERVATION_SHAPE,
TestMemory.TEST_ACTION_DIMS,
TestMemory.TEST_GAMMA,
TestMemory.TEST_MAX_MEMORY)
assert m.observation_shape == TestMemory.TEST_OBSERVATION_SHAPE
assert m.action_dims == TestMemory.TEST_ACTION_DIMS
assert len(m.memories) == 0
assert m.max_memory == TestMemory.TEST_MAX_MEMORY
def test_observation_dims(self):
ret = Memory.get_observation_dims(TestMemory.TEST_OBSERVATION_SHAPE)
# 4 * 2
assert ret == 8
def test_append(self):
m = Memory(TestMemory.TEST_OBSERVATION_SHAPE,
TestMemory.TEST_ACTION_DIMS,
TestMemory.TEST_GAMMA,
TestMemory.TEST_MAX_MEMORY)
s = np.random.random(size=TestMemory.TEST_OBSERVATION_SHAPE)
s_prime = np.random.random(size=TestMemory.TEST_OBSERVATION_SHAPE)
m.append(s, 1, 0.2, s_prime, False)
assert len(m.memories) == 1
def test_get_batch(self):
m = Memory(TestMemory.TEST_OBSERVATION_SHAPE,
TestMemory.TEST_ACTION_DIMS,
TestMemory.TEST_GAMMA,
TestMemory.TEST_MAX_MEMORY)
for _ in range(10):
s = np.random.random(size=TestMemory.TEST_OBSERVATION_SHAPE).flatten()
a = np.random.randint(TestMemory.TEST_ACTION_DIMS)
r = np.random.random(size=1)[0]
s_prime = np.random.random(size=TestMemory.TEST_OBSERVATION_SHAPE).flatten()
m.append(s, a, r, s_prime, False)
assert len(m.memories) == 10
X, y = m.get_batch(MockModel(3, 8), 3)
assert X.shape[0] == y.shape[0] == 3
assert X.shape[1] == 8
assert y.shape[1] == 3
| 31.381579 | 88 | 0.635639 | 2,204 | 0.924109 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 0.002935 |
126d6c2189f9741ed9c6b294aeb788442cc5308c | 4,235 | py | Python | m2critic/parse.py | z33kz33k/m2critic | c57914a53286e10a2082406379aa2535164a1e49 | [
"MIT"
] | null | null | null | m2critic/parse.py | z33kz33k/m2critic | c57914a53286e10a2082406379aa2535164a1e49 | [
"MIT"
] | null | null | null | m2critic/parse.py | z33kz33k/m2critic | c57914a53286e10a2082406379aa2535164a1e49 | [
"MIT"
] | null | null | null | """
m2critic.parse
~~~~~~~~~~~~~~~
Scrape page.
@author: z33k
"""
from pathlib import Path
from typing import List, Tuple
from bs4 import BeautifulSoup
from bs4.element import Tag
from m2critic import BasicUser
FORBIDDENSTR = "403 Forbidden"
class PageParser: # abstract
"""Abstract page parser.
"""
def __init__(self, markup: str) -> None:
self._markup = markup
class UserReviewsPageParser(PageParser):
"""Parse user reviews page for user names.
"""
SENTINEL = "There are no user reviews yet"
MOVIE_PAGE_SENTINEL = "No reviews yet."
def __init__(self, markup: str) -> None:
super().__init__(markup)
if FORBIDDENSTR in markup:
raise RequestBlockedError("Server has blocked the request")
if self.SENTINEL in self._markup or self.MOVIE_PAGE_SENTINEL in self._markup:
raise ValueError("Invalid markup for parsing (page index too high)")
self.users: List[BasicUser] = self._parse()
@staticmethod
def _pre_filter(tag: Tag) -> bool:
"""Pre-filter soup search for relevant 'li' elements.
"""
id_ = tag.get("id")
class_ = tag.get("class")
return tag.name == "li" and id_ and class_ and all("user_review" in a
for a in (id_, class_))
@staticmethod
def _filter_score(tag: Tag) -> bool:
"""Filter relevant 'li' element for user score.
"""
class_ = tag.get("class")
return tag.name == "div" and class_ and "metascore_w" in class_
@staticmethod
def _filter_name(tag: Tag) -> bool:
"""Filter relevant 'li' element for user name.
"""
href = tag.get("href")
return tag.name == "a" and href and "/user/" in href
def _parse(self) -> List[BasicUser]:
"""Parse input markup for user name and user score coupled in basic struct.
"""
users = []
soup = BeautifulSoup(self._markup, "lxml")
elements = soup.find_all(self._pre_filter)
for element in elements:
result = element.find(self._filter_name)
name = result.text
result = element.find(self._filter_score)
score = int(result.text)
users.append(BasicUser(name, score))
return users
class UserPageParser(PageParser):
"""Parse user page for ratings and reviews counts.
"""
def __init__(self, markup: str) -> None:
super().__init__(markup)
if FORBIDDENSTR in markup:
raise RequestBlockedError("Server has blocked the request")
# DEBUG
try:
self.ratingscount, self.reviewscount = self._parse()
except AttributeError:
file = Path("debug/output/error.html")
with file.open("w", encoding="utf-8") as f:
f.write(self._markup)
raise RequestBlockedError("Server has blocked the request")
@staticmethod
def _filter_ratingscount(tag: Tag) -> bool:
"""Filter soup for ratings 'span' element.
"""
class_ = tag.get("class")
return tag.name == "span" and class_ and "total_summary_ratings" in class_
@staticmethod
def _filter_reviewscount(tag: Tag) -> bool:
"""Filter soup for reviews 'span' element.
"""
class_ = tag.get("class")
return tag.name == "span" and "total_summary_reviews" in class_
@staticmethod
def _filter_data(tag: Tag) -> bool:
"""Filter relevant 'span' element for debug.
"""
return tag.name == "span"
def _parse(self) -> Tuple[int, int]:
"""Parse input markup for user name and user score coupled in basic struct.
"""
soup = BeautifulSoup(self._markup, "lxml")
result = soup.find(self._filter_ratingscount)
newresult = result.find(self._filter_data)
ratingscount = int(newresult.text)
result = soup.find(self._filter_reviewscount)
newresult = result.find(self._filter_data)
reviewscount = int(newresult.text)
return ratingscount, reviewscount
class RequestBlockedError(ValueError):
"""Raise when server blocks a request.
""" | 31.37037 | 85 | 0.608501 | 3,958 | 0.934593 | 0 | 0 | 1,412 | 0.333412 | 0 | 0 | 1,212 | 0.286187 |
126f3570c8ca400c14dbf8aadb5953428e90972f | 86 | py | Python | droidlet/dialog/robot/dialogue_objects/__init__.py | CowherdChris/droidlet | 8d965c1ebc38eceb6f8083c52b1146c1bc17d5e1 | [
"MIT"
] | null | null | null | droidlet/dialog/robot/dialogue_objects/__init__.py | CowherdChris/droidlet | 8d965c1ebc38eceb6f8083c52b1146c1bc17d5e1 | [
"MIT"
] | null | null | null | droidlet/dialog/robot/dialogue_objects/__init__.py | CowherdChris/droidlet | 8d965c1ebc38eceb6f8083c52b1146c1bc17d5e1 | [
"MIT"
] | null | null | null | from .loco_dialogue_object import LocoBotCapabilities
__all__ = [LocoBotCapabilities] | 28.666667 | 53 | 0.872093 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
126fc6c0e704141c3768c167da733628103ad816 | 248 | py | Python | yo_fluq_ds/_fluq/_common.py | okulovsky/yo_ds | 9e1fa2e7a1b9746c3982afc152c024169fec45ca | [
"MIT"
] | 16 | 2019-09-26T09:05:42.000Z | 2021-02-04T01:39:09.000Z | yo_fluq_ds/_fluq/_common.py | okulovsky/yo_ds | 9e1fa2e7a1b9746c3982afc152c024169fec45ca | [
"MIT"
] | 2 | 2019-10-23T19:01:23.000Z | 2020-06-11T09:08:45.000Z | yo_fluq_ds/_fluq/_common.py | okulovsky/yo_ds | 9e1fa2e7a1b9746c3982afc152c024169fec45ca | [
"MIT"
] | 2 | 2019-09-26T09:05:50.000Z | 2019-10-23T18:46:11.000Z | from .._common import *
from yo_fluq import *
Queryable = lambda *args, **kwargs: FlupFactory.QueryableFactory(*args, **kwargs)
T = TypeVar('T')
TOut = TypeVar('TOut')
TKey = TypeVar('TKey')
TValue = TypeVar('TValue')
TFactory = TypeVar('TFactory') | 31 | 81 | 0.71371 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 33 | 0.133065 |
127066403d0bd62ed2b3f09d8857586e3f55d73e | 7,423 | py | Python | tools/BlenderProc/src/object/ObjectPoseSampler.py | GeorgSchenzel/pose-detector | 32aff471e2591726bbe52140915255af6dc4f1a1 | [
"MIT"
] | null | null | null | tools/BlenderProc/src/object/ObjectPoseSampler.py | GeorgSchenzel/pose-detector | 32aff471e2591726bbe52140915255af6dc4f1a1 | [
"MIT"
] | null | null | null | tools/BlenderProc/src/object/ObjectPoseSampler.py | GeorgSchenzel/pose-detector | 32aff471e2591726bbe52140915255af6dc4f1a1 | [
"MIT"
] | null | null | null | import bpy
import mathutils
from src.main.Module import Module
from src.utility.BlenderUtility import check_intersection, check_bb_intersection, get_all_mesh_objects
class ObjectPoseSampler(Module):
"""
Samples positions and rotations of selected object inside the sampling volume while performing mesh and
bounding box collision checks.
Example 1: Sample poses (locations and rotations) for objects with a suctom property `sample_pose` set to True.
.. code-block:: yaml
{
"module": "object.ObjectPoseSampler",
"config":{
"max_iterations": 1000,
"objects_to_sample": {
"provider": "getter.Entity",
"condition": {
"cp_sample_pose": True
}
},
"pos_sampler":{
"provider": "sampler.Uniform3d",
"max": [5,5,5],
"min": [-5,-5,-5]
},
"rot_sampler": {
"provider": "sampler.Uniform3d",
"max": [0,0,0],
"min": [6.28,6.28,6.28]
}
}
}
.. list-table::
:widths: 25 100 10
:header-rows: 1
* - Parameter
- Description
- Type
* - objects_to_sample
- Here call an appropriate Provider (Getter) in order to select objects. Default: all mesh objects.
- Provider
* - max_iterations
- Amount of tries before giving up on an object and moving to the next one. Default: 1000.
- int
* - pos_sampler
- Here call an appropriate Provider (Sampler) in order to sample position (XYZ 3d vector) for each object.
- Provider
* - rot_sampler
- Here call an appropriate Provider (Sampler) in order to sample rotation (Euler angles 3d vector) for
each object.
- Provider
"""
def __init__(self, config):
Module.__init__(self, config)
def run(self):
"""
Samples positions and rotations of selected object inside the sampling volume while performing mesh and
bounding box collision checks in the following steps:
1. While we have objects remaining and have not run out of tries - sample a point.
2. If no collisions are found keep the point.
"""
# While we have objects remaining and have not run out of tries - sample a point
# List of successfully placed objects
placed = []
# After this many tries we give up on current object and continue with the rest
max_tries = self.config.get_int("max_iterations", 1000)
objects = self.config.get_list("objects_to_sample", get_all_mesh_objects())
if max_tries <= 0:
raise ValueError("The value of max_tries must be greater than zero: {}".format(max_tries))
if not objects:
raise Exception("The list of objects can not be empty!")
# cache to fasten collision detection
bvh_cache = {}
# for every selected object
for obj in objects:
if obj.type == "MESH":
no_collision = True
amount_of_tries_done = -1
# Try max_iter amount of times
for i in range(max_tries):
# Put the top object in queue at the sampled point in space
position = self.config.get_vector3d("pos_sampler")
rotation = self.config.get_vector3d("rot_sampler")
no_collision = ObjectPoseSampler.check_pose_for_object(obj, position, rotation, bvh_cache,
placed, [])
# If no collision then keep the position
if no_collision:
amount_of_tries_done = i
break
if amount_of_tries_done == -1:
amount_of_tries_done = max_tries
placed.append(obj)
if not no_collision:
print("Could not place " + obj.name + " without a collision.")
else:
print("It took " + str(amount_of_tries_done + 1) + " tries to place " + obj.name)
def insert_key_frames(self, obj, frame_id):
""" Insert key frames for given object pose
:param obj: Loaded object. Type: blender object.
:param frame_id: The frame number where key frames should be inserted. Type: int.
"""
obj.keyframe_insert(data_path='location', frame=frame_id)
obj.keyframe_insert(data_path='rotation_euler', frame=frame_id)
@staticmethod
def check_pose_for_object(obj: bpy.types.Object, position: mathutils.Vector, rotation: mathutils.Vector,
bvh_cache: dict, objects_to_check_against: list,
list_of_objects_with_no_inside_check: list):
"""
Checks if a object placed at the given pose intersects with any object given in the list.
The bvh_cache adds all current objects to the bvh tree, which increases the speed.
If an object is already in the cache it is removed, before performing the check.
:param obj: Object which should be checked. Type: :class:`bpy.types.Object`
:param position: 3D Vector of the location of the object. Type: :class:`mathutils.Vector`
:param rotation: 3D Vector of the rotation in euler angles. If this is None, the rotation is not changed \
Type: :class:`mathutils.Vector`
:param bvh_cache: Dict of all the bvh trees, removes the `obj` from the cache before adding it again. \
Type: :class:`dict`
:param objects_to_check_against: List of objects which the object is checked again \
Type: :class:`list`
:param list_of_objects_with_no_inside_check: List of objects on which no inside check is performed. \
This check is only done for the objects in \
`objects_to_check_against`. Type: :class:`list`
:return: Type: :class:`bool`, True if no collision was found, false if at least one collision was found
"""
# assign it a new pose
obj.location = position
if rotation:
obj.rotation_euler = rotation
bpy.context.view_layer.update()
# Remove bvh cache, as object has changed
if obj.name in bvh_cache:
del bvh_cache[obj.name]
no_collision = True
# Now check for collisions
for already_placed in objects_to_check_against:
# First check if bounding boxes collides
intersection = check_bb_intersection(obj, already_placed)
# if they do
if intersection:
skip_inside_check = already_placed in list_of_objects_with_no_inside_check
# then check for more refined collisions
intersection, bvh_cache = check_intersection(obj, already_placed, bvh_cache=bvh_cache,
skip_inside_check=skip_inside_check)
if intersection:
no_collision = False
break
return no_collision
| 41.937853 | 116 | 0.580224 | 7,253 | 0.977098 | 0 | 0 | 2,716 | 0.36589 | 0 | 0 | 4,469 | 0.602048 |
1271a5b636e4e2a4b34227856118cd1f4633a6d6 | 872 | py | Python | models.py | harmonyinnovationhub/project-beta | f98a08972ffb9c01123aa4849dc06a8b0beec171 | [
"MIT"
] | null | null | null | models.py | harmonyinnovationhub/project-beta | f98a08972ffb9c01123aa4849dc06a8b0beec171 | [
"MIT"
] | null | null | null | models.py | harmonyinnovationhub/project-beta | f98a08972ffb9c01123aa4849dc06a8b0beec171 | [
"MIT"
] | null | null | null | from core import app
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
db = SQLAlchemy(app)
migrate = Migrate(app, db)
# user table
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
token = db.Column(db.String(50), unique=True)
name = db.Column(db.String(50))
password = db.Column(db.String(128))
admin = db.Column(db.Boolean)
class LogLag(db.Model):
id = db.Column(db.Integer, primary_key=True)
logitude = db.Column(db.Float(128))
latitude = db.Column(db.Float(128))
def __init__(self, logitude, latitude):
self.logitude = logitude
self.latitude = latitude
class Upload(db.Model):
id = db.Column(db.Integer, primary_key=True)
file = db.Column(db.Text, nullable=False)
name = db.Column(db.Text, nullable=False)
mimetype = db.Column(db.Text, nullable=False) | 28.129032 | 49 | 0.692661 | 708 | 0.811927 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 0.013761 |
1272e8c25f35ec0988e71a71e9975672d9b50122 | 6,285 | py | Python | create_swag/lm/load_data.py | gauravkmr/swagaf | 0613674cca3612664c3a77a7d26a888ca7d3b127 | [
"MIT"
] | 182 | 2018-08-10T23:40:41.000Z | 2022-03-25T11:23:17.000Z | create_swag/lm/load_data.py | gauravkmr/swagaf | 0613674cca3612664c3a77a7d26a888ca7d3b127 | [
"MIT"
] | 3 | 2018-10-08T02:35:56.000Z | 2019-11-19T23:24:22.000Z | create_swag/lm/load_data.py | gauravkmr/swagaf | 0613674cca3612664c3a77a7d26a888ca7d3b127 | [
"MIT"
] | 40 | 2018-08-18T14:50:55.000Z | 2022-03-07T23:49:54.000Z | # First make the vocabulary, etc.
import os
import pickle as pkl
import random
import simplejson as json
from allennlp.common.util import get_spacy_model
from allennlp.data import Instance
from allennlp.data import Token
from allennlp.data import Vocabulary
from allennlp.data.dataset import Batch
from allennlp.data.fields import TextField
from allennlp.data.token_indexers import SingleIdTokenIndexer
from allennlp.data.token_indexers.elmo_indexer import ELMoTokenCharactersIndexer
from torch.utils.data import Dataset
from torch.utils.data.dataloader import DataLoader
from tqdm import tqdm
from raw_data.events import DATA_PATH
from pytorch_misc import pairwise
from create_swag.lm.config import NUM_FOLDS
def load_lm_data(fold=None, mode='train'):
"""
Turns the sequential data into instances.
:param split:
:return:
"""
# Get or make vocab
spacy_model = get_spacy_model("en_core_web_sm", pos_tags=False, parse=False, ner=False)
if os.path.exists('vocabulary'):
print("Loading cached vocab. caution if you're building the dataset again!!!!", flush=True)
vocab = Vocabulary.from_files('vocabulary')
with open(os.path.join(DATA_PATH, 'events-3.json'), 'r') as f:
lm_data = json.load(f)
lm_data = [data_item for s in ('train', 'val', 'test') for data_item in lm_data[s]]
else:
assert fold is None
with open(os.path.join(DATA_PATH, 'events-3.json'), 'r') as f:
lm_data = json.load(f)
lm_data = [data_item for s in ('train', 'val', 'test') for data_item in lm_data[s]]
# Manually doing this because I don't want to double count things
vocab = Vocabulary.from_instances(
[Instance({'story': TextField(
[Token(x) for x in ['@@bos@@'] + [x.orth_ for x in spacy_model(sent)] + ['@@eos@@']], token_indexers={
'tokens': SingleIdTokenIndexer(namespace='tokens', lowercase_tokens=True)})}) for data_item in
lm_data for sent in
data_item['sentences']], min_count={'tokens': 3})
vocab.get_index_to_token_vocabulary('tokens')
vocab.save_to_files('vocabulary')
print("VOCABULARY HAS {} ITEMS".format(vocab.get_vocab_size(namespace='tokens')))
if all([os.path.exists('lm-{}-of-{}.pkl'.format(i, NUM_FOLDS)) for i in range(NUM_FOLDS)]):
print("LOADING CACHED DATASET", flush=True)
if mode == 'val':
with open('lm-{}-of-{}.pkl'.format(fold, NUM_FOLDS), 'rb') as f:
print("Loading split{} for {}".format(fold, mode))
instances = pkl.load(f)
else:
instances = []
for other_fold in range(NUM_FOLDS):
if other_fold != fold:
with open('lm-{}-of-{}.pkl'.format(other_fold, NUM_FOLDS), 'rb') as f:
print("Loading split{} for {}".format(other_fold, mode))
instances += pkl.load(f)
return instances, vocab
print("MAKING THE DATASET", flush=True)
assert fold is None
for item in tqdm(lm_data):
item['sentences_tokenized'] = [[st.orth_ for st in spacy_model(sent)] for sent in item['sentences']]
def _to_instances(data):
# flatten this
instances = []
for item in data:
for s1, s2 in pairwise(item['sentences_tokenized']):
instances.append((
Instance({'story': TextField([Token(x) for x in ['@@bos@@'] + s1 + s2 + ['@@eos@@']],
token_indexers={
'tokens': SingleIdTokenIndexer(namespace='tokens',
lowercase_tokens=True)})}),
s1,
s2,
item,
))
return instances
random.seed(123456)
random.shuffle(lm_data)
all_sets = []
for fold_ in range(NUM_FOLDS):
val_set = _to_instances(lm_data[len(lm_data) * fold_ // NUM_FOLDS:len(lm_data) * (fold_ + 1) // NUM_FOLDS])
with open('lm-{}-of-{}.pkl'.format(fold_, NUM_FOLDS), 'wb') as f:
pkl.dump(val_set, f)
all_sets.extend(val_set)
return all_sets, vocab
class RawPassages(Dataset):
def __init__(self, fold, mode):
self.mode = mode
self.fold = fold
self.instances, self.vocab = load_lm_data(fold=self.fold, mode=self.mode)
self.dataloader = DataLoader(dataset=self, batch_size=32,
shuffle=self.mode == 'train', num_workers=0,
collate_fn=self.collate, drop_last=self.mode == 'train')
self.indexer = ELMoTokenCharactersIndexer()
def collate(self, instances_l):
batch = Batch([x[0] for x in instances_l])
batch.index_instances(self.vocab)
batch_dict = {k: v['tokens'] for k, v in batch.as_tensor_dict().items()}
batch_dict['story_tokens'] = [instance[0].fields['story'].tokens for instance in instances_l]
batch_dict['story_full'] = [x[1] + x[2] for x in instances_l]
batch_dict['items'] = [x[3] for x in instances_l]
return batch_dict
def __len__(self):
return len(self.instances)
def __getitem__(self, index):
"""
:param index:
:return: * raw rocstories
* entities
* entity IDs + sentences
* Instance. to print use r3.fields['verb_phrase'].field_list[5].tokens
"""
return self.instances[index]
@classmethod
def splits(cls, fold):
return cls(fold, mode='train'), cls(fold, mode='val')
if __name__ == '__main__':
instances, vocab = load_lm_data()
# train, val = RawPassages.splits()
# for item in train.dataloader:
# for story in item['story_tokens']:
# tok_text = [x.text.lower() for x in story]
# remapped_text = [vocab.get_token_from_index(vocab.get_token_index(x)) for x in tok_text]
# print('({}) {} -> {}'.format('D' if tok_text != remapped_text else ' ',
# ' '.join(tok_text), ' '.join(remapped_text)), flush=True)
| 41.622517 | 118 | 0.590294 | 1,434 | 0.228162 | 0 | 0 | 101 | 0.01607 | 0 | 0 | 1,540 | 0.245028 |
12732154001636677778fe83629d7b02a01b585b | 148 | py | Python | psana/psana/momentum/Energy.py | JBlaschke/lcls2 | 30523ef069e823535475d68fa283c6387bcf817b | [
"BSD-3-Clause-LBNL"
] | 16 | 2017-11-09T17:10:56.000Z | 2022-03-09T23:03:10.000Z | psana/psana/momentum/Energy.py | JBlaschke/lcls2 | 30523ef069e823535475d68fa283c6387bcf817b | [
"BSD-3-Clause-LBNL"
] | 6 | 2017-12-12T19:30:05.000Z | 2020-07-09T00:28:33.000Z | psana/psana/momentum/Energy.py | JBlaschke/lcls2 | 30523ef069e823535475d68fa283c6387bcf817b | [
"BSD-3-Clause-LBNL"
] | 25 | 2017-09-18T20:02:43.000Z | 2022-03-27T22:27:42.000Z | import numpy as np
def CalcEnergy(m_amu,Px_au,Py_au,Pz_au):
amu2au = 1836.15
return 27.2*(Px_au**2 + Py_au**2 + Pz_au**2)/(2*amu2au*m_amu)
| 24.666667 | 65 | 0.668919 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
12732acde8058d66a7523c9ed83348d21998d829 | 357 | py | Python | timer.py | davidbarkhuizen/simagora | 43d44dc058adcde4738574cb1132abaa02e6516e | [
"MIT"
] | 1 | 2017-04-30T23:31:43.000Z | 2017-04-30T23:31:43.000Z | timer.py | davidbarkhuizen/simagora | 43d44dc058adcde4738574cb1132abaa02e6516e | [
"MIT"
] | null | null | null | timer.py | davidbarkhuizen/simagora | 43d44dc058adcde4738574cb1132abaa02e6516e | [
"MIT"
] | null | null | null | from time import clock
import logging
#~ class Timer(object):
#~
#~ def start(self,s):
#~ self.s = s
#~ self.started = clock()
#~
#~ def stop(self):
#~ self.stopped = clock()
#~ t = self.stopped - self.started
#~ self.log(t)
#~
#~ def log(self, t):
#~ line = self.s + ',' + str(t)
#~ logging.info(line)
| 19.833333 | 38 | 0.504202 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 264 | 0.739496 |
89bf882df35c847766d8e10365b7efa85c803827 | 173 | py | Python | prototype/data/datasets/__init__.py | Sense-GVT/BigPretrain | d8d9b43d94dd1364c18c1e5ba21b85a31cdbba9e | [
"Apache-2.0"
] | 8 | 2021-10-18T05:11:55.000Z | 2021-11-10T11:54:13.000Z | prototype/data/datasets/__init__.py | Sense-GVT/BigPretrain | d8d9b43d94dd1364c18c1e5ba21b85a31cdbba9e | [
"Apache-2.0"
] | null | null | null | prototype/data/datasets/__init__.py | Sense-GVT/BigPretrain | d8d9b43d94dd1364c18c1e5ba21b85a31cdbba9e | [
"Apache-2.0"
] | 1 | 2021-09-10T03:17:19.000Z | 2021-09-10T03:17:19.000Z | from .imagenet_dataset import ImageNetDataset, RankedImageNetDataset # noqa
from .custom_dataset import CustomDataset # noqa
from .imagnetc import ImageNet_C_Dataset
| 28.833333 | 76 | 0.82659 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14 | 0.080925 |
89c3edf7f3339956ac7cca062ad64b1b9a40814b | 1,457 | py | Python | setup.py | BCD65/electricityLoadForecasting | 07a6ed060afaf7cc2906c0389b5c9e9b0fede193 | [
"MIT"
] | null | null | null | setup.py | BCD65/electricityLoadForecasting | 07a6ed060afaf7cc2906c0389b5c9e9b0fede193 | [
"MIT"
] | null | null | null | setup.py | BCD65/electricityLoadForecasting | 07a6ed060afaf7cc2906c0389b5c9e9b0fede193 | [
"MIT"
] | null | null | null |
import setuptools
from distutils.core import setup
setup(
name = 'electricityLoadForecasting',
version = '0.1.dev0',
packages = setuptools.find_packages(),
scripts = ['scripts/main_forecasting.py',
'scripts/preprocessing_eCO2mix.py',
],
author = 'Ben',
author_email = 'bcd6591[at]gmail.com',
license = 'MIT License',
long_description = open('README.txt').read(),
python_requires = ">= 3.6",
install_requires = [
'astral==1.2',
'chardet',
'cvxpy',
'datetime',
'h5py',
'ipdb',
'matplotlib',
'numpy',
'pandas',
'prox_tv',
'pytz',
'scikit-learn',
'scipy',
'seaborn',
'termcolor',
'tzlocal',
'unidecode',
#'openblas',
#'sklearn-contrib-py-earth',
#'spams',
#'rpy2',
#'xgboost',
],
) | 35.536585 | 61 | 0.320522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 383 | 0.262869 |
89c4524ab54cd0dacd5f166c3bbe02a8a3454498 | 5,966 | py | Python | google/ads/google_ads/v3/proto/services/shopping_performance_view_service_pb2.py | andy0937/google-ads-python | cb5da7f4a75076828d1fc3524b08cc167670435a | [
"Apache-2.0"
] | null | null | null | google/ads/google_ads/v3/proto/services/shopping_performance_view_service_pb2.py | andy0937/google-ads-python | cb5da7f4a75076828d1fc3524b08cc167670435a | [
"Apache-2.0"
] | null | null | null | google/ads/google_ads/v3/proto/services/shopping_performance_view_service_pb2.py | andy0937/google-ads-python | cb5da7f4a75076828d1fc3524b08cc167670435a | [
"Apache-2.0"
] | 1 | 2020-03-13T00:14:31.000Z | 2020-03-13T00:14:31.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v3/proto/services/shopping_performance_view_service.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.ads.google_ads.v3.proto.resources import shopping_performance_view_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_resources_dot_shopping__performance__view__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.api import client_pb2 as google_dot_api_dot_client__pb2
from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v3/proto/services/shopping_performance_view_service.proto',
package='google.ads.googleads.v3.services',
syntax='proto3',
serialized_options=_b('\n$com.google.ads.googleads.v3.servicesB#ShoppingPerformanceViewServiceProtoP\001ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v3/services;services\242\002\003GAA\252\002 Google.Ads.GoogleAds.V3.Services\312\002 Google\\Ads\\GoogleAds\\V3\\Services\352\002$Google::Ads::GoogleAds::V3::Services'),
serialized_pb=_b('\nNgoogle/ads/googleads_v3/proto/services/shopping_performance_view_service.proto\x12 google.ads.googleads.v3.services\x1aGgoogle/ads/googleads_v3/proto/resources/shopping_performance_view.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\"?\n!GetShoppingPerformanceViewRequest\x12\x1a\n\rresource_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x32\xae\x02\n\x1eShoppingPerformanceViewService\x12\xee\x01\n\x1aGetShoppingPerformanceView\x12\x43.google.ads.googleads.v3.services.GetShoppingPerformanceViewRequest\x1a:.google.ads.googleads.v3.resources.ShoppingPerformanceView\"O\x82\xd3\xe4\x93\x02\x39\x12\x37/v3/{resource_name=customers/*/shoppingPerformanceView}\xda\x41\rresource_name\x1a\x1b\xca\x41\x18googleads.googleapis.comB\x8a\x02\n$com.google.ads.googleads.v3.servicesB#ShoppingPerformanceViewServiceProtoP\x01ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v3/services;services\xa2\x02\x03GAA\xaa\x02 Google.Ads.GoogleAds.V3.Services\xca\x02 Google\\Ads\\GoogleAds\\V3\\Services\xea\x02$Google::Ads::GoogleAds::V3::Servicesb\x06proto3')
,
dependencies=[google_dot_ads_dot_googleads__v3_dot_proto_dot_resources_dot_shopping__performance__view__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_client__pb2.DESCRIPTOR,google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,])
_GETSHOPPINGPERFORMANCEVIEWREQUEST = _descriptor.Descriptor(
name='GetShoppingPerformanceViewRequest',
full_name='google.ads.googleads.v3.services.GetShoppingPerformanceViewRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='resource_name', full_name='google.ads.googleads.v3.services.GetShoppingPerformanceViewRequest.resource_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\002'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=277,
serialized_end=340,
)
DESCRIPTOR.message_types_by_name['GetShoppingPerformanceViewRequest'] = _GETSHOPPINGPERFORMANCEVIEWREQUEST
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GetShoppingPerformanceViewRequest = _reflection.GeneratedProtocolMessageType('GetShoppingPerformanceViewRequest', (_message.Message,), dict(
DESCRIPTOR = _GETSHOPPINGPERFORMANCEVIEWREQUEST,
__module__ = 'google.ads.googleads_v3.proto.services.shopping_performance_view_service_pb2'
,
__doc__ = """Request message for
[ShoppingPerformanceViewService.GetShoppingPerformanceView][google.ads.googleads.v3.services.ShoppingPerformanceViewService.GetShoppingPerformanceView].
Attributes:
resource_name:
Required. The resource name of the Shopping performance view
to fetch.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v3.services.GetShoppingPerformanceViewRequest)
))
_sym_db.RegisterMessage(GetShoppingPerformanceViewRequest)
DESCRIPTOR._options = None
_GETSHOPPINGPERFORMANCEVIEWREQUEST.fields_by_name['resource_name']._options = None
_SHOPPINGPERFORMANCEVIEWSERVICE = _descriptor.ServiceDescriptor(
name='ShoppingPerformanceViewService',
full_name='google.ads.googleads.v3.services.ShoppingPerformanceViewService',
file=DESCRIPTOR,
index=0,
serialized_options=_b('\312A\030googleads.googleapis.com'),
serialized_start=343,
serialized_end=645,
methods=[
_descriptor.MethodDescriptor(
name='GetShoppingPerformanceView',
full_name='google.ads.googleads.v3.services.ShoppingPerformanceViewService.GetShoppingPerformanceView',
index=0,
containing_service=None,
input_type=_GETSHOPPINGPERFORMANCEVIEWREQUEST,
output_type=google_dot_ads_dot_googleads__v3_dot_proto_dot_resources_dot_shopping__performance__view__pb2._SHOPPINGPERFORMANCEVIEW,
serialized_options=_b('\202\323\344\223\0029\0227/v3/{resource_name=customers/*/shoppingPerformanceView}\332A\rresource_name'),
),
])
_sym_db.RegisterServiceDescriptor(_SHOPPINGPERFORMANCEVIEWSERVICE)
DESCRIPTOR.services_by_name['ShoppingPerformanceViewService'] = _SHOPPINGPERFORMANCEVIEWSERVICE
# @@protoc_insertion_point(module_scope)
| 53.267857 | 1,127 | 0.826852 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,983 | 0.5 |
89c4bc4a43842e2dc017090ed080633222516f16 | 18,930 | py | Python | python-client/trustedanalytics/core/graph.py | skavulya/atk | c83f0bee2530282e39bf28d4a15355561b5eca4d | [
"Apache-2.0"
] | null | null | null | python-client/trustedanalytics/core/graph.py | skavulya/atk | c83f0bee2530282e39bf28d4a15355561b5eca4d | [
"Apache-2.0"
] | null | null | null | python-client/trustedanalytics/core/graph.py | skavulya/atk | c83f0bee2530282e39bf28d4a15355561b5eca4d | [
"Apache-2.0"
] | null | null | null | # vim: set encoding=utf-8
#
# Copyright (c) 2015 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from trustedanalytics.core.errorhandle import IaError
f, f2 = {}, {}
import logging
logger = logging.getLogger(__name__)
from trustedanalytics.core.decorators import *
api = get_api_decorator(logger)
from trustedanalytics.meta.metaprog import CommandInstallable as CommandLoadable
from trustedanalytics.meta.docstub import doc_stubs_import
from trustedanalytics.meta.namedobj import name_support
from trustedanalytics.core.files import OrientDBGraph
import uuid
import json
from trustedanalytics.core.column import Column
from trustedanalytics.meta.clientside import raise_deprecation_warning
__all__ = ["drop_frames", "drop_graphs", "Frame", "get_frame", "get_frame_names", "get_graph", "get_graph_names"]
def _get_backend():
from trustedanalytics.meta.config import get_graph_backend
return get_graph_backend()
# Graph
try:
# boilerplate required here for static analysis to pick up the inheritance (the whole point of docstubs)
from trustedanalytics.core.docstubs1 import _DocStubsGraph
doc_stubs_import.success(logger, "_DocStubsGraph")
except Exception as e:
doc_stubs_import.failure(logger, "_DocStubsGraph", e)
class _DocStubsGraph(object): pass
@api
@name_support('graph')
class _BaseGraph(CommandLoadable):
_entity_type = 'graph'
def __init__(self):
CommandLoadable.__init__(self)
def __repr__(self):
try:
return self._backend.get_repr(self)
except:
return super(_BaseGraph, self).__repr__() + " (Unable to collect metadata from server)"
@api
@property
@returns(data_type=str, description="Status of the graph.")
def __status(self):
"""
Read-only property - Current graph life cycle status.
One of three statuses: Active, Dropped, Finalized
- Active: Entity is available for use
- Dropped: Entity has been dropped by user or by garbage collection which found it stale
- Finalized: Entity's data has been deleted
"""
try:
return self._backend.get_status(self)
except:
return super(_BaseGraph, self).__repr__() + " (Unable to collect metadata from server)"
@api
@property
@returns(data_type=str, description="Date string of the last time this frame's data was accessed")
def __last_read_date(self):
"""
Read-only property - Last time this frame's data was accessed.
"""
try:
return self._backend.get_last_read_date(self)
except:
return "(Unable to collect metadata from server)"
@api
class Graph(_DocStubsGraph, _BaseGraph):
"""Creates a seamless property graph.
A seamless graph is a collection of vertex and edge lists stored as frames.
This allows frame-like operations against graph data.
Many frame methods are available to work with vertices and edges.
Vertex and edge properties are stored as columns.
A seamless graph is better suited for bulk :term:`OLAP`-type operations
"""
_entity_type = 'graph:'
@api
@arg('source', 'OrientDBGraph | None', "A source of initial data.")
@arg('name', str, """Name for the new graph.
Default is None.""")
def __init__(self, source=None, name=None, _info=None):
"""Initialize the graph.
Examples
--------
This example uses a single source data frame and creates a graph of 'user'
and 'movie' vertices connected by 'rating' edges.
The first step is to bring in some data to create a frame as the source
for a graph:
<hide>
>>> import trustedanalytics as ta
>>> ta.connect()
-etc-
</hide>
>>> schema = [('viewer', str), ('profile', ta.int32), ('movie', str), ('rating', ta.int32)]
>>> data1 = [['fred',0,'Croods',5],
... ['fred',0,'Jurassic Park',5],
... ['fred',0,'2001',2],
... ['fred',0,'Ice Age',4],
... ['wilma',0,'Jurassic Park',3],
... ['wilma',0,'2001',5],
... ['wilma',0,'Ice Age',4],
... ['pebbles',1,'Croods',4],
... ['pebbles',1,'Land Before Time',3],
... ['pebbles',1,'Ice Age',5]]
>>> data2 = [['betty',0,'Croods',5],
... ['betty',0,'Jurassic Park',3],
... ['betty',0,'Land Before Time',4],
... ['betty',0,'Ice Age',3],
... ['barney',0,'Croods',5],
... ['barney',0,'Jurassic Park',5],
... ['barney',0,'Land Before Time',3],
... ['barney',0,'Ice Age',5],
... ['bamm bamm',1,'Croods',5],
... ['bamm bamm',1,'Land Before Time',3]]
>>> frame = ta.Frame(ta.UploadRows(data1, schema))
<progress>
>>> frame2 = ta.Frame(ta.UploadRows(data2, schema))
<progress>
>>> frame.inspect()
[#] viewer profile movie rating
===============================================
[0] fred 0 Croods 5
[1] fred 0 Jurassic Park 5
[2] fred 0 2001 2
[3] fred 0 Ice Age 4
[4] wilma 0 Jurassic Park 3
[5] wilma 0 2001 5
[6] wilma 0 Ice Age 4
[7] pebbles 1 Croods 4
[8] pebbles 1 Land Before Time 3
[9] pebbles 1 Ice Age 5
Now, make an empty graph object:
>>> graph = ta.Graph()
Then, define the types of vertices and edges this graph will be made of:
>>> graph.define_vertex_type('viewer')
<progress>
>>> graph.define_vertex_type('film')
<progress>
>>> graph.define_edge_type('rating', 'viewer', 'film')
<progress>
And finally, add the data to the graph:
>>> graph.vertices['viewer'].add_vertices(frame, 'viewer', ['profile'])
<progress>
>>> graph.vertices['viewer'].inspect()
[#] _vid _label viewer profile
===================================
[0] 1 viewer fred 0
[1] 8 viewer pebbles 1
[2] 5 viewer wilma 0
>>> graph.vertices['film'].add_vertices(frame, 'movie')
<progress>
>>> graph.vertices['film'].inspect()
[#] _vid _label movie
===================================
[0] 19 film Land Before Time
[1] 14 film Ice Age
[2] 12 film Jurassic Park
[3] 11 film Croods
[4] 13 film 2001
>>> graph.edges['rating'].add_edges(frame, 'viewer', 'movie', ['rating'])
<progress>
>>> graph.edges['rating'].inspect()
[#] _eid _src_vid _dest_vid _label rating
==============================================
[0] 24 1 14 rating 4
[1] 22 1 12 rating 5
[2] 21 1 11 rating 5
[3] 23 1 13 rating 2
[4] 29 8 19 rating 3
[5] 30 8 14 rating 5
[6] 28 8 11 rating 4
[7] 27 5 14 rating 4
[8] 25 5 12 rating 3
[9] 26 5 13 rating 5
Explore basic graph properties:
>>> graph.vertex_count
<progress>
8
>>> graph.vertices
viewer : [viewer, profile], count = 3
film : [movie], count = 5
>>> graph.edge_count
<progress>
10
>>> graph.edges
rating : [rating], count = 10
>>> graph.status
u'ACTIVE'
>>> graph.last_read_date
<datetime.datetime>
>>> graph
Graph <unnamed>
status = ACTIVE (last_read_date = -etc-)
vertices =
viewer : [viewer, profile], count = 3
film : [movie], count = 5
edges =
rating : [rating], count = 10
Data from other frames can be added to the graph by making more calls
to `add_vertices` and `add_edges`.
<skip>
>>> frame2 = ta.Frame(ta.CsvFile("/datasets/extra-movie-data.csv", frame.schema))
<progress>
</skip>
>>> graph.vertices['viewer'].add_vertices(frame2, 'viewer', ['profile'])
<progress>
>>> graph.vertices['viewer'].inspect()
[#] _vid _label viewer profile
=====================================
[0] 5 viewer wilma 0
[1] 1 viewer fred 0
[2] 31 viewer betty 0
[3] 35 viewer barney 0
[4] 8 viewer pebbles 1
[5] 39 viewer bamm bamm 1
>>> graph.vertices['film'].add_vertices(frame2, 'movie')
<progress>
>>> graph.vertices['film'].inspect()
[#] _vid _label movie
===================================
[0] 13 film 2001
[1] 14 film Ice Age
[2] 11 film Croods
[3] 19 film Land Before Time
[4] 12 film Jurassic Park
>>> graph.vertex_count
<progress>
11
>>> graph.edges['rating'].add_edges(frame2, 'viewer', 'movie', ['rating'])
<progress>
>>> graph.edges['rating'].inspect(20)
[##] _eid _src_vid _dest_vid _label rating
===============================================
[0] 24 1 14 rating 4
[1] 22 1 12 rating 5
[2] 21 1 11 rating 5
[3] 23 1 13 rating 2
[4] 29 8 19 rating 3
[5] 30 8 14 rating 5
[6] 28 8 11 rating 4
[7] 27 5 14 rating 4
[8] 25 5 12 rating 3
[9] 26 5 13 rating 5
[10] 60 39 19 rating 3
[11] 59 39 11 rating 5
[12] 53 31 19 rating 4
[13] 54 31 14 rating 3
[14] 52 31 12 rating 3
[15] 51 31 11 rating 5
[16] 57 35 19 rating 3
[17] 58 35 14 rating 5
[18] 56 35 12 rating 5
[19] 55 35 11 rating 5
>>> graph.edge_count
<progress>
20
Now we'll copy the graph and then change it.
>>> graph2 = graph.copy()
<progress>
>>> graph2
Graph <unnamed>
status = ACTIVE (last_read_date = -etc-)
vertices =
viewer : [viewer, profile], count = 6
film : [movie], count = 5
edges =
rating : [rating], count = 20
We can rename the columns in the frames representing the vertices and edges,
similar to regular frame operations.
>>> graph2.vertices['viewer'].rename_columns({'viewer': 'person'})
<progress>
>>> graph2.vertices
viewer : [person, profile], count = 6
film : [movie], count = 5
>>> graph2.edges['rating'].rename_columns({'rating': 'score'})
<progress>
>>> graph2.edges
rating : [score], count = 20
We can apply filter and drop functions to the vertex and edge frames.
>>> graph2.vertices['viewer'].filter(lambda v: v.person.startswith("b"))
<progress>
>>> graph2.vertices['viewer'].inspect()
[#] _vid _label person profile
=====================================
[0] 31 viewer betty 0
[1] 35 viewer barney 0
[2] 39 viewer bamm bamm 1
>>> graph2.vertices['viewer'].drop_duplicates("profile")
<progress>
<hide>
>>> graph2.vertices['viewer'].sort('_vid') # sort to ensure final inspect matches
<progress>
</hide>
>>> graph2.vertices['viewer'].inspect()
[#] _vid _label person profile
=====================================
[0] 31 viewer betty 0
[1] 39 viewer bamm bamm 1
Now check our edges to see that they have also be filtered.
>>> graph2.edges['rating'].inspect()
[#] _eid _src_vid _dest_vid _label score
=============================================
[0] 60 39 19 rating 3
[1] 59 39 11 rating 5
[2] 53 31 19 rating 4
[3] 54 31 14 rating 3
[4] 52 31 12 rating 3
[5] 51 31 11 rating 5
Only source vertices 31 and 39 remain.
Drop row for the movie 'Croods' (vid 41) from the film VertexFrame.
>>> graph2.vertices['film'].inspect()
[#] _vid _label movie
===================================
[0] 13 film 2001
[1] 14 film Ice Age
[2] 11 film Croods
[3] 19 film Land Before Time
[4] 12 film Jurassic Park
>>> graph2.vertices['film'].drop_rows(lambda row: row.movie=='Croods')
<progress>
>>> graph2.vertices['film'].inspect()
[#] _vid _label movie
===================================
[0] 13 film 2001
[1] 14 film Ice Age
[2] 19 film Land Before Time
[3] 12 film Jurassic Park
Dangling edges (edges that correspond to the movie 'Croods', vid 41) were also removed:
>>> graph2.edges['rating'].inspect()
[#] _eid _src_vid _dest_vid _label score
=============================================
[0] 52 31 12 rating 3
[1] 54 31 14 rating 3
[2] 60 39 19 rating 3
[3] 53 31 19 rating 4
"""
if not hasattr(self, '_backend'):
self._backend = _get_backend()
from trustedanalytics.rest.graph import GraphInfo
if isinstance(_info, dict):
_info = GraphInfo(_info)
if isinstance(_info, GraphInfo):
self.uri = _info.uri
else:
self.uri = self._backend.create(self, name, 'atk/frame', _info)
if isinstance(source, OrientDBGraph):
self._backend._import_orientdb(self,source)
self._vertices = GraphFrameCollection(self, "vertices", self._get_vertex_frame, self._get_vertex_frames)
self._edges = GraphFrameCollection(self, "edges", self._get_edge_frame, self._get_edge_frames)
_BaseGraph.__init__(self)
@api
def ___get_vertex_frame(self, label):
"""
return a VertexFrame for the associated label
:param label: the label of the frame to return
"""
return self._backend.get_vertex_frame(self.uri, label)
@api
def ___get_vertex_frames(self):
"""
return all VertexFrames for this graph
"""
return self._backend.get_vertex_frames(self.uri)
@api
def ___get_edge_frame(self, label):
"""
return an EdgeFrame for the associated label
:param label: the label of the frame to return
"""
return self._backend.get_edge_frame(self.uri, label)
@api
def ___get_edge_frames(self):
"""
return all EdgeFrames for this graph
"""
return self._backend.get_edge_frames(self.uri)
@api
@property
def __vertices(self):
"""
Vertex frame collection
Acts like a dictionary where the vertex type is the key, returning the particular VertexFrame
Examples
--------
See :doc:`here <__init__>` for example usage in graph construction.
"""
return self._vertices
@api
@property
def __edges(self):
"""
Edge frame collection
Acts like a dictionary where the edge type is the key, returning the particular EdgeFrame
Examples
--------
See :doc:`here <__init__>` for example usage in graph construction.
"""
return self._edges
@api
@property
def __vertex_count(self):
"""
Get the total number of vertices in the graph.
Returns
-------
int32
The number of vertices in the graph.
Examples
--------
See :doc:`here <__init__>` for example usage in graph construction.
"""
return self._backend.get_vertex_count(self)
@api
@property
@returns(int, 'Total number of edges in the graph')
def __edge_count(self):
"""
Get the total number of edges in the graph.
Examples
--------
See :doc:`here <__init__>` for example usage in graph construction.
"""
return self._backend.get_edge_count(self)
class GraphFrameCollection(object):
"""
This class represents a collection of frames that make up either the edge
or vertex types of a graph.
"""
def __init__(self, graph, type_str, get_frame_func, get_frames_func):
"""
:param get_frame_func: method to call to return a single frame in
the collection
:param get_frames_func: method to call to return all of the frames
in the collection
"""
self._graph = graph
if type_str not in ["vertices", "edges"]:
raise ValueError("Bad type_str %s in graph collection" % type_str)
self._type_str = type_str
self._get_frame_func = get_frame_func
self._get_frames_func = get_frames_func
def __getitem__(self, item):
"""
Retrieve a single frame from the collection
:param item:
"""
return self._get_frame_func(item)
def __iter__(self):
"""
iterator for all of the frames in the collection. will call the server
"""
for frame in self._get_frames_func():
yield frame
def __get_props_str(self, info):
return "[%s]" % ", ".join(info['properties'])
def __repr__(self):
"""
printable representation of object
"""
graph_info = self._graph._backend._get_graph_info(self._graph)
if self._type_str == "vertices":
return "\n".join(["%s : %s, count = %d" % (v['label'], self.__get_props_str(v), v['count']) for v in graph_info.vertices])
if self._type_str == "edges":
return "\n".join(["%s : %s, count = %d" % (e['label'], self.__get_props_str(e), e['count']) for e in graph_info.edges])
return ""
| 32.469983 | 134 | 0.539356 | 17,104 | 0.899832 | 192 | 0.010101 | 15,362 | 0.808186 | 0 | 0 | 14,532 | 0.76452 |
89c56780209adbe0387cf368029ab9a32fd8735c | 377 | py | Python | sound_play/scripts/test/test_sound_client.py | iory/audio_common | 1db0394dcef452b93474665ee902ab80c3e7e439 | [
"BSD-3-Clause"
] | 742 | 2017-07-05T02:49:36.000Z | 2022-03-30T12:55:43.000Z | sound_play/scripts/test/test_sound_client.py | iory/audio_common | 1db0394dcef452b93474665ee902ab80c3e7e439 | [
"BSD-3-Clause"
] | 124 | 2015-06-25T22:52:21.000Z | 2022-02-28T15:02:23.000Z | sound_play/scripts/test/test_sound_client.py | iory/audio_common | 1db0394dcef452b93474665ee902ab80c3e7e439 | [
"BSD-3-Clause"
] | 425 | 2017-07-04T22:03:29.000Z | 2022-03-29T06:59:06.000Z | #!/usr/bin/env python
import unittest
import rospy
import rostest
from sound_play.libsoundplay import SoundClient
class TestCase(unittest.TestCase):
def test_soundclient_constructor(self):
s = SoundClient()
self.assertIsNotNone(s)
if __name__ == '__main__':
rostest.rosrun('sound_play', 'test_sound_client', TestCase)
__author__ = 'Felix Duvallet'
| 20.944444 | 63 | 0.745358 | 136 | 0.360743 | 0 | 0 | 0 | 0 | 0 | 0 | 78 | 0.206897 |
89c68600cd84e244db83ebc8d0fd8aa50d19edfb | 2,308 | py | Python | misc/getch.py | Chiel92/tfate | 63632133e3bf5bf0ae481387da71ef81f164f539 | [
"MIT"
] | 3 | 2015-02-20T14:43:54.000Z | 2018-03-21T09:17:18.000Z | misc/getch.py | Chiel92/tfate | 63632133e3bf5bf0ae481387da71ef81f164f539 | [
"MIT"
] | null | null | null | misc/getch.py | Chiel92/tfate | 63632133e3bf5bf0ae481387da71ef81f164f539 | [
"MIT"
] | null | null | null | #!python
"""This module is for messing with input characters."""
import os
import sys
unicurses_path = os.path.dirname(os.path.abspath(__file__)) + '/../libs/unicurses'
sys.path.insert(0, unicurses_path)
import unicurses as curses
def key_info(key):
try:
_ord = ord(key)
except:
_ord = -1
try:
_chr = chr(key)
except:
_chr = -1
try:
unctrl = curses.unctrl(key)
except:
unctrl = 'no unctrl'
try:
name = curses.keyname(key)
except:
name = 'no name'
return ('repr: {}, type: {}, ord: {}, chr: {}, unctrl: {}, name: {}\n'
.format(repr(key), type(key), _ord, _chr, unctrl, name))
def getchar(stdscr):
while 1:
try:
char = stdscr.get_wch()
break
except curses.error:
pass
stdscr.addstr(key_info(char))
if isinstance(char, str):
_ord = ord(char)
# Replace special characters with a readable string
if _ord == 27:
result = 'Esc'
elif _ord == 10:
result = '\n'
elif _ord == 9:
result = '\t'
elif _ord < 32:
result = curses.unctrl(char)
result = result.decode()
result = 'Ctrl-' + result[1]
else:
result = char
elif isinstance(char, int):
# char must be some kind of function key
if char == curses.KEY_BACKSPACE:
result = '\b'
else:
result = curses.keyname(char)
result = result.decode()
result = result[4] + result[5:].lower()
# Remove parenthesis for function keys
result.replace('(', '')
result.replace(')', '')
else:
raise IOError('Can\'t handle input character type: {}.'
.format(str(type(char))))
stdscr.addstr(key_info(result))
return result
def main(stdscr):
stdscr.keypad(1)
curses.raw()
for i in range(127):
stdscr.addstr(repr(chr(i)))
stdscr.addstr('\n\n')
for i in range(127):
stdscr.addstr(repr(curses.unctrl(chr(i))))
stdscr.addstr('special characters: {}\n\n'.format('œă好'))
while 1:
c = getchar(stdscr)
if c == 'q':
break
curses.wrapper(main)
| 23.793814 | 82 | 0.52513 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 415 | 0.179498 |
89c86b75d76dab47c9165b27b0027d8c0bc0fad1 | 1,598 | py | Python | exercises/linked-list/example.py | haithamk/python-exercism | 8166a98ba771e0d527efdda421d3d9e741f0459b | [
"MIT"
] | 1 | 2021-05-15T19:59:04.000Z | 2021-05-15T19:59:04.000Z | exercises/linked-list/example.py | toroad/python | ce085c81a82ae5fb460fe166323dbbaa5a2588c5 | [
"MIT"
] | null | null | null | exercises/linked-list/example.py | toroad/python | ce085c81a82ae5fb460fe166323dbbaa5a2588c5 | [
"MIT"
] | 2 | 2018-03-03T08:32:12.000Z | 2019-08-22T11:55:53.000Z | class Node(object):
def __init__(self, value, next=None, prev=None):
self.value = value
self.next = next
self.prev = prev
class LinkedList(object):
def __init__(self):
self.head = None
self.tail = None
self.length = 0
def push(self, value):
new_node = Node(value)
if not self.head:
self.head = self.tail = new_node
else:
new_node.prev = self.tail
self.tail.next = new_node
self.tail = new_node
self.length += 1
def pop(self):
node = self.tail
if node is None or node.prev is None:
self.head = self.tail = None
else:
self.tail = self.tail.prev
self.tail.next = None
self.length -= 1
return node.value
def shift(self):
node = self.head
if node is None or node.next is None:
self.head = self.tail = None
else:
self.head = self.head.next
self.head.prev = None
self.length -= 1
return node.value
def unshift(self, value):
new_node = Node(value)
if not self.head:
self.head = self.tail = new_node
else:
new_node.next = self.head
self.head.prev = new_node
self.head = new_node
self.length += 1
def __len__(self):
return self.length
def __iter__(self):
current_node = self.head
while (current_node):
yield current_node.value
current_node = current_node.next
| 25.774194 | 52 | 0.532541 | 1,594 | 0.997497 | 164 | 0.102628 | 0 | 0 | 0 | 0 | 0 | 0 |
89ca0ec001dbf86f13d792104e70016b1f984f59 | 879 | py | Python | tests/unitTest/testBitWiseSupervisor.py | huitredelombre/BERBER | 49ba60f1836e9a7fef89dcadd24d0a812f5fe4bd | [
"Unlicense"
] | null | null | null | tests/unitTest/testBitWiseSupervisor.py | huitredelombre/BERBER | 49ba60f1836e9a7fef89dcadd24d0a812f5fe4bd | [
"Unlicense"
] | null | null | null | tests/unitTest/testBitWiseSupervisor.py | huitredelombre/BERBER | 49ba60f1836e9a7fef89dcadd24d0a812f5fe4bd | [
"Unlicense"
] | null | null | null | import unittest
import sys
sys.path.append("../../src/")
from supervisors.bitWiseSupervisor import BitWiseSupervisor
from senders.scapySender import ScapySender
from simulations.randomSimulation import RandomSimulation
class testArgParser(unittest.TestCase):
def testApplyBER(self):
sender = ScapySender(46, "eth0")
sender.totalSize = 1000
simul = RandomSimulation(0, 0, 0, 0)
payload = simul.getRandomString(100)
sender.setPayload(payload)
print("payload tested : " + payload)
supervisor = BitWiseSupervisor(sender, 0, 0, 8000000)
# ber is 0, frame must be clean
self.assertFalse(supervisor.applyBER())
sender.resetTrame()
supervisor.BER = 1
# ber is 1, frame must be wrong
self.assertTrue(supervisor.applyBER())
if __name__ == '__main__':
unittest.main()
| 25.852941 | 61 | 0.676906 | 607 | 0.690557 | 0 | 0 | 0 | 0 | 0 | 0 | 109 | 0.124005 |
89cc8c96ba145a294249c4b138a3c09de02694bc | 949 | py | Python | headless_chrome.py | MineRobber9000/discordscript | c9c4eda0e28db72890d0490617ed361722c7c44c | [
"MIT"
] | null | null | null | headless_chrome.py | MineRobber9000/discordscript | c9c4eda0e28db72890d0490617ed361722c7c44c | [
"MIT"
] | null | null | null | headless_chrome.py | MineRobber9000/discordscript | c9c4eda0e28db72890d0490617ed361722c7c44c | [
"MIT"
] | null | null | null | from selenium import webdriver
def _options_factory():
"""Produces a selenium.webdriver.ChromeOptions object. Used to force "headless" on invocation. You shouldn't call this function."""
ret = webdriver.ChromeOptions()
ret.add_argument("headless")
return ret
def get_driver(*varargs,args=[]):
"""Creates headless selenium.webdriver.Chrome object. Supply command-line options in args or varargs."""
args.extend(varargs)
args = list(set(args))
opt = _options_factory()
for arg in args:
if arg=="headless": continue # already headless
opt.add_argument(arg)
return webdriver.Chrome(chrome_options=opt)
# import other useful things
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions
# BeautifulSoup support
from bs4 import BeautifulSoup
def soupify(driver):
return BeautifulSoup(driver.page_source,"html.parser")
| 32.724138 | 132 | 0.791359 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 337 | 0.355111 |
89cd7e4794021790017343f1b97d640a8664e0b4 | 2,677 | py | Python | lemonsoap/scent/columns_scent.py | Ekrekr/LemonSoap | 61b86b70a3788486235de2e8baeb7c68b80318a9 | [
"MIT"
] | null | null | null | lemonsoap/scent/columns_scent.py | Ekrekr/LemonSoap | 61b86b70a3788486235de2e8baeb7c68b80318a9 | [
"MIT"
] | 1 | 2019-08-23T18:30:31.000Z | 2019-08-23T18:32:23.000Z | lemonsoap/scent/columns_scent.py | Ekrekr/LemonSoap | 61b86b70a3788486235de2e8baeb7c68b80318a9 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
LemonSoap - headers scent.
Deals with column headers.
"""
import pandas as pd
import inflection
import re
import logging
from ..lemon_bar import LemonBar
from .scent_template import ScentTemplate
class ColumnsScent(ScentTemplate):
"""
Manages headers issue identification and fixing.
"""
def __init__(self, lf: LemonBar):
ScentTemplate.__init__(self, lf, "headers",
"columns_scent.ColumnsScent")
def check(self) -> bool:
"""
Identifies issues with headers in a dataframe.
Correct format is "snake_case", with no special characters. Numbers
are however allowed.
Returns:
False if no issues otherwise True.
"""
columns = self._lb().columns
for column in columns:
fixed = self._standardize(column)
if fixed != column:
self._log.info(f"* '{column}' incorrect format, "
f"should be '{fixed}.")
return self._finish_check()
def fix(self) -> LemonBar:
"""
Fixes headers in a given LemonBar.
Returns:
LemonBar with fixes applied.
"""
self.check()
for issue in self._issues:
# OK to call this here as well as in check as unlikely to be
# enough headers to cause an overhead.
fixed = self._standardize(issue[0])
self._log.info(f"* '{issue[0]}' replaced with '{fixed}'")
self._lb().rename(columns={issue[0]: fixed}, inplace=True)
return self._lb
def _standardize(self, inp: str) -> str:
"""
Converts input to standard column header format.
* snake_case.
* No special characters.
* Less than 24 characters long.
* Unique.
Args:
inp: string to fix.
Returns:
Converted input.
"""
# Make underscored, lower case with no special characters.
fixed = inp.replace(" ", "_")
fixed = inflection.underscore(fixed)
fixed = re.sub('\W+', '', fixed)
# Headers less than 24 chars.
if len(fixed) > 24:
fixed = fixed[:24]
# If not unique then try with repeatedly incrementing numbers.
# TODO: O(n^2) algorithm, becomes very slow with lots of headers that
# are the same. Should use precomputation table.
sim_num = 0
fixed_inc = fixed
while fixed_inc in self._lb().columns:
sim_str = str(sim_num)
fixed_inc = fixed + str(sim_num)
sim_num += 1
return fixed_inc
| 27.316327 | 77 | 0.566679 | 2,448 | 0.914456 | 0 | 0 | 0 | 0 | 0 | 0 | 1,300 | 0.485618 |
89ce8df07aa57ac1b43e0c11bcfa9d8eaf3ff5df | 2,347 | py | Python | experiments/expression/codex/codex_alignment.py | andrewcharlesjones/spatial-alignment | 70aecf800c5efea6a92990ccf87a1950752a268b | [
"MIT"
] | 14 | 2022-01-11T14:51:17.000Z | 2022-02-26T20:46:58.000Z | experiments/expression/codex/codex_alignment.py | andrewcharlesjones/spatial-alignment | 70aecf800c5efea6a92990ccf87a1950752a268b | [
"MIT"
] | 3 | 2022-01-26T17:16:24.000Z | 2022-02-24T13:22:39.000Z | experiments/expression/codex/codex_alignment.py | andrewcharlesjones/spatial-alignment | 70aecf800c5efea6a92990ccf87a1950752a268b | [
"MIT"
] | 1 | 2022-02-23T09:54:37.000Z | 2022-02-23T09:54:37.000Z | import pandas as pd
from os.path import join as pjoin
import numpy as np
import matplotlib.pyplot as plt
DATA_DIR = "../../../data/codex"
data = pd.read_csv(pjoin(DATA_DIR, "codex_mrl_expression.csv")) # , nrows=200)
marker_names = data.columns.values[1:-8]
sample_names = data.sample_Xtile_Ytile.str.split("_").str[0].values
sample_names_unique = np.unique(sample_names)
sample1_idx = np.where(sample_names == "BALBc-3")[0]
sample2_idx = np.where(sample_names == "BALBc-2")[0]
data_sample1 = data.iloc[sample1_idx, :]
data_sample2 = data.iloc[sample2_idx, :]
xtilespan = 1344
ytilespan = 1008
def tile_spatial_coordinates(data_df):
if "xcoord" in data_df.columns or "ycoord" in data_df.columns:
raise Exception("DataFrame already contains scaled coordinates.")
tile_nums_split = data_df.sample_Xtile_Ytile.str.split("_")
x_tile_nums = tile_nums_split.str[1].str[1:].values.astype(float)
y_tile_nums = tile_nums_split.str[2].str[1:].values.astype(float)
xcoords = (x_tile_nums - 1) * xtilespan + data_df["X.X"].values
ycoords = (y_tile_nums - 1) * ytilespan + data_df["Y.Y"].values
data_df["xcoord"] = xcoords
data_df["ycoord"] = ycoords
tile_spatial_coordinates(data_sample1)
tile_spatial_coordinates(data_sample2)
# plt.scatter(data_sample1.xcoord, data_sample1.ycoord)
# plt.show()
# import ipdb; ipdb.set_trace()
normalized_data1 = data_sample1[marker_names].values.copy()
keep_idx = np.where((np.abs(normalized_data1) >= 10_000).sum(1) == 0)[0]
data_sample1 = data_sample1.iloc[keep_idx]
normalized_data2 = data_sample2[marker_names].values.copy()
keep_idx = np.where((np.abs(normalized_data2) >= 10_000).sum(1) == 0)[0]
data_sample2 = data_sample2.iloc[keep_idx]
# import ipdb
# ipdb.set_trace()
for marker in marker_names:
plt.figure(figsize=(10, 5))
plt.subplot(121)
plt.title("Slice 1")
curr_data = data_sample1[marker].values
curr_data = (curr_data - curr_data.mean()) / curr_data.std()
plt.scatter(
data_sample1["xcoord"],
data_sample1["ycoord"],
c=data_sample1[marker],
s=1,
marker="s",
)
plt.subplot(122)
plt.title("Slice 2")
curr_data = data_sample2[marker].values
curr_data = (curr_data - curr_data.mean()) / curr_data.std()
plt.scatter(
data_sample2["xcoord"],
data_sample2["ycoord"],
c=data_sample2[marker],
s=1,
marker="s",
)
plt.show()
import ipdb
ipdb.set_trace()
| 28.277108 | 79 | 0.734555 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 360 | 0.153387 |
89cf07481d85093cb253e55bad9a9ca217559128 | 78 | py | Python | plugins/pie_branding.py | juergenz/pie | be490b9e18ee3158015a13826ed3caf442c07c51 | [
"MIT"
] | null | null | null | plugins/pie_branding.py | juergenz/pie | be490b9e18ee3158015a13826ed3caf442c07c51 | [
"MIT"
] | null | null | null | plugins/pie_branding.py | juergenz/pie | be490b9e18ee3158015a13826ed3caf442c07c51 | [
"MIT"
] | null | null | null | import pie
@pie.eventhandler('pie.PlayerChat')
async def onLoad():
pass | 11.142857 | 35 | 0.705128 | 0 | 0 | 0 | 0 | 65 | 0.833333 | 29 | 0.371795 | 16 | 0.205128 |
89cfb900a50afd4227256d6f741798ab183a0cd9 | 4,853 | py | Python | teleband/users/api/views.py | JMU-CIME/CPR-Music-Backend | b72b70ed8826595c96c028595181293edcf1e368 | [
"MIT"
] | 2 | 2022-01-08T20:21:43.000Z | 2022-03-18T03:31:30.000Z | teleband/users/api/views.py | JMU-CIME/CPR-Music-Backend | b72b70ed8826595c96c028595181293edcf1e368 | [
"MIT"
] | 16 | 2022-01-08T02:12:54.000Z | 2022-03-02T03:02:59.000Z | teleband/users/api/views.py | JMU-CIME/CPR-Music-Backend | b72b70ed8826595c96c028595181293edcf1e368 | [
"MIT"
] | 2 | 2022-01-08T00:21:37.000Z | 2022-01-18T05:33:15.000Z | import collections
import csv
from io import StringIO
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from rest_framework import permissions
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.mixins import ListModelMixin, RetrieveModelMixin, UpdateModelMixin
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.views import ObtainAuthToken
from invitations.utils import get_invitation_model
from invitations.exceptions import AlreadyAccepted, AlreadyInvited, UserRegisteredEmail
from invitations.forms import CleanEmailMixin
from .serializers import UserSerializer, UserInstrumentSerializer
from teleband.courses.models import Enrollment, Course
User = get_user_model()
Invitation = get_invitation_model()
class IsRelevantTeacherUpdate(permissions.IsAuthenticated):
def has_object_permission(self, request, view, obj):
if view.action not in ["update", "partial_update"]:
return True
# only permissible if request.user is a teacher of obj in any existing class
return Enrollment.objects.filter(
user=obj,
course__in=Course.objects.filter(
enrollment__user=request.user, enrollment__role__name="Teacher"
),
role__name="Student",
).exists()
class UserViewSet(RetrieveModelMixin, ListModelMixin, UpdateModelMixin, GenericViewSet):
serializer_class = UserSerializer
queryset = User.objects.all()
lookup_field = "username"
permission_classes = [IsRelevantTeacherUpdate & permissions.IsAuthenticated]
def get_queryset(self, *args, **kwargs):
if self.action in ["update", "partial_update"]:
return self.queryset.filter(
enrollment__course__in=[
e.course
for e in Enrollment.objects.filter(
user__username="admin", role__name="Teacher"
)
]
)
assert isinstance(self.request.user.id, int)
return self.queryset.filter(id=self.request.user.id)
def get_serializer_class(self):
if self.action in ["update", "partial_update"]:
return UserInstrumentSerializer
return self.serializer_class
@action(detail=False)
def me(self, request):
serializer = UserSerializer(request.user, context={"request": request})
return Response(status=status.HTTP_200_OK, data=serializer.data)
@action(
detail=False, methods=["post"], permission_classes=[permissions.IsAdminUser]
)
def bulk_create_teachers(self, request):
users_file = request.FILES["file"]
contents = "".join([line.decode("utf-8") for line in users_file.readlines()])
reader = csv.reader(StringIO(contents))
teacher_group = Group.objects.get(name="Teacher")
response = collections.defaultdict(list)
for row in reader:
# based on https://github.com/bee-keeper/django-invitations/blob/9069002f1a0572ae37ffec21ea72f66345a8276f/invitations/views.py#L63
invitee = row[0]
try:
validate_email(invitee)
CleanEmailMixin().validate_invitation(invitee)
invite = Invitation.create(invitee, group=teacher_group)
except (ValidationError):
response["invalid"].append({invitee: "invalid email"})
except (AlreadyAccepted):
response["invalid"].append({invitee: "already accepted"})
except (AlreadyInvited):
response["invalid"].append({invitee: "pending invite"})
except (UserRegisteredEmail):
response["invalid"].append({invitee: "user registered email"})
else:
invite.send_invitation(request)
response["valid"].append({invitee: "invited"})
return Response(status=status.HTTP_200_OK, data=response)
class IsAuthForDelete(permissions.IsAuthenticated):
def has_permission(self, request, view):
if request.method == "DELETE":
return super().has_permission(request, view)
return True
class ObtainDeleteAuthToken(ObtainAuthToken):
permission_classes = [IsAuthForDelete]
def delete(self, request, *args, **kwargs):
try:
Token.objects.get(user=request.user).delete()
return Response(status=status.HTTP_200_OK)
except Token.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
obtain_delete_auth_token = ObtainDeleteAuthToken.as_view()
| 37.620155 | 142 | 0.687616 | 3,750 | 0.772718 | 0 | 0 | 1,671 | 0.344323 | 0 | 0 | 493 | 0.101587 |
89cfe6c76017fa136a954dd90da05d73e796cbce | 10,031 | py | Python | models/ri_pcn.py | RexSkywalkerLee/VRCNet | 1dce3be11ed89375665fbbbc7462d56cb66f690b | [
"MIT"
] | null | null | null | models/ri_pcn.py | RexSkywalkerLee/VRCNet | 1dce3be11ed89375665fbbbc7462d56cb66f690b | [
"MIT"
] | null | null | null | models/ri_pcn.py | RexSkywalkerLee/VRCNet | 1dce3be11ed89375665fbbbc7462d56cb66f690b | [
"MIT"
] | null | null | null | from __future__ import print_function
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.utils.data
import torch.nn.functional as F
import math
from utils.model_utils import *
from utils.ri_utils import *
from models.vrcnet import Linear_ResBlock
class PCN_encoder(nn.Module):
def __init__(self, input_size=3, output_size=1024):
super(PCN_encoder, self).__init__()
self.conv1 = nn.Conv1d(input_size, 128, 1)
self.conv2 = nn.Conv1d(128, 256, 1)
self.conv3 = nn.Conv1d(512, 512, 1)
self.conv4 = nn.Conv1d(512, output_size, 1)
def forward(self, x):
batch_size, _, num_points = x.size()
x = F.relu(self.conv1(x))
x = self.conv2(x)
global_feature, _ = torch.max(x, 2)
x = torch.cat((x, global_feature.view(batch_size, -1, 1).repeat(1, 1, num_points).contiguous()), 1)
x = F.relu(self.conv3(x))
x = self.conv4(x)
global_feature, _ = torch.max(x, 2)
return global_feature.view(batch_size, -1)
class PCN_decoder(nn.Module):
def __init__(self, num_coarse, num_fine, scale, global_feature_size, output_size=3):
super(PCN_decoder, self).__init__()
self.num_coarse = num_coarse
self.num_fine = num_fine
self.output_size = output_size
self.cat_feature_num = 2 + 3 + global_feature_size
self.fc1 = nn.Linear(global_feature_size, global_feature_size)
self.fc2 = nn.Linear(global_feature_size, global_feature_size)
self.fc3 = nn.Linear(global_feature_size, num_coarse * output_size)
self.scale = scale
self.grid = gen_grid_up(2 ** (int(math.log2(scale))), 0.05).cuda().contiguous()
self.conv1 = nn.Conv1d(self.cat_feature_num, 512, 1)
self.conv2 = nn.Conv1d(512, 512, 1)
self.conv3 = nn.Conv1d(512, self.output_size, 1)
def forward(self, x):
batch_size = x.size()[0]
coarse = F.relu(self.fc1(x))
coarse = F.relu(self.fc2(coarse))
coarse = self.fc3(coarse).view(-1, self.output_size, self.num_coarse)
#id0 = torch.zeros(coarse.shape[0], 1, coarse.shape[2]).cuda().contiguous()
#coarse_input = torch.cat((coarse, id0), 1)
#id1 = torch.ones(org_points_input.shape[0], 1, org_points_input.shape[2]).cuda().contiguous()
#org_points_input = torch.cat((org_points_input, id1), 1)
#points = torch.cat((coarse_input, org_points_input), 2)
#coarse = inverse_point_ortho_feature(a1, a2, a3, coarse.transpose(1, 2).contiguous())
#full_points = torch.cat((org.transpose(1, 2).contiguous(), coarse), dim=1)
#a1, a2, a3, _ = point_ortho_feature(full_points, pca=True)
#_, _, _, coarse = point_ortho_feature(coarse, False, a1.detach(), a2.detach(), a3.detach())
#coarse = coarse.transpose(1, 2).contiguous()
grid = self.grid.clone().detach()
grid_feat = grid.unsqueeze(0).repeat(batch_size, 1, self.num_coarse).contiguous().cuda()
point_feat = ((coarse.transpose(1, 2).contiguous()).unsqueeze(2).repeat(1, 1, self.scale, 1).view(-1, self.num_fine, self.output_size)).transpose(1, 2).contiguous()
global_feat = x.unsqueeze(2).repeat(1, 1, self.num_fine)
feat = torch.cat((grid_feat, point_feat, global_feat), 1)
center = ((coarse.transpose(1, 2).contiguous()).unsqueeze(2).repeat(1, 1, self.scale, 1).view(-1, self.num_fine, self.output_size)).transpose(1, 2).contiguous()
fine = self.conv3(F.relu(self.conv2(F.relu(self.conv1(feat))))) + center
return coarse, fine
'''
class Model(nn.Module):
def __init__(self, args, global_feature_size=1024, feature_append=9):
super(Model, self).__init__()
self.input_size = args.input_size
self.output_size = args.output_size
self.num_coarse = args.num_coarse
self.num_points = args.num_points
self.train_loss = args.loss
self.scale = self.num_points // self.num_coarse
self.encoder = PCN_encoder(output_size=global_feature_size)
self.decoder = PCN_decoder(self.num_coarse, self.num_points, self.scale, global_feature_size+feature_append)
#self.axis_inference = PCN_encoder(input_size=3, output_size=9)
def forward(self, x, gt, is_training=True, mean_feature=None, alpha=None):
#axis = self.axis_inference(x)
#a1, a2, a3 = axis.chunk(3, dim=1)
#a1 = a1.squeeze() / (torch.norm(a1, 2, 1, keepdim=True) + 1e-7)
#a2 = a2.squeeze() / (torch.norm(a2, 2, 1, keepdim=True) + 1e-7)
#a3 = a3.squeeze() / (torch.norm(a3, 2, 1, keepdim=True) + 1e-7)
a1, a2, a3, x = point_ortho_feature(x.transpose(1, 2).contiguous())
#_, _, _, x = point_projection_feature(x.transpose(1, 2).contiguous(), False, a1, a2, a3)
x = x.transpose(1, 2).contiguous()
feat = self.encoder(x)
feat = torch.cat([feat, a1, a2, a3], dim=1)
out1, out2 = self.decoder(feat)
out1 = inverse_point_ortho_feature(a1, a2, a3, out1.transpose(1, 2).contiguous())
out2 = inverse_point_ortho_feature(a1, a2, a3, out2.transpose(1, 2).contiguous())
if is_training:
if self.train_loss == 'emd':
loss1 = calc_emd(out1, gt)
loss2 = calc_emd(out2, gt)
elif self.train_loss == 'cd':
loss1, _ = calc_cd(out1, gt)
loss2, _ = calc_cd(out2, gt)
else:
raise NotImplementedError('Train loss is either CD or EMD!')
total_train_loss = loss1.mean() + loss2.mean() * alpha
return out2, loss2, total_train_loss
else:
#emd = calc_emd(out2, gt, eps=0.004, iterations=3000)
cd_p, cd_t, f1 = calc_cd(out2, gt, calc_f1=True)
return {'out1': out1, 'out2': out2, 'cd_p': cd_p, 'cd_t': cd_t, 'f1': f1}
'''
class Model(nn.Module):
def __init__(self, args, size_z=128, global_feature_size=1024, feature_append=9):
super(Model, self).__init__()
self.input_size = args.input_size
self.output_size = args.output_size
self.num_coarse = args.num_coarse
self.num_points = args.num_points
self.scale = self.num_points // self.num_coarse
self.size_z = size_z
self.encoder = PCN_encoder(output_size=global_feature_size)
self.posterior_infer1 = Linear_ResBlock(input_size=global_feature_size+feature_append, output_size=global_feature_size+feature_append)
self.posterior_infer2 = Linear_ResBlock(input_size=global_feature_size+feature_append, output_size=size_z * 2)
self.prior_infer = Linear_ResBlock(input_size=global_feature_size+feature_append, output_size=size_z * 2)
self.generator = Linear_ResBlock(input_size=size_z, output_size=global_feature_size+feature_append)
self.decoder = PCN_decoder(self.num_coarse, self.num_points, self.scale, global_feature_size+feature_append)
def forward(self, x, gt, is_training=True, mean_feature=None, alpha=None):
num_input = x.size()[2]
if is_training:
y = pn2.gather_operation(gt.transpose(1, 2).contiguous(), pn2.furthest_point_sample(gt, num_input))
gt = torch.cat([gt, gt], dim=0)
points = torch.cat([x, y], dim=0)
x = torch.cat([x, x], dim=0)
else:
points = x
a1, a2, a3, points = point_ortho_feature(points.transpose(1, 2).contiguous(), pca=True)
feat = self.encoder(points.transpose(1, 2).contiguous())
feat = torch.cat([feat, a1, a2, a3], dim=1)
if is_training:
feat_x, feat_y = feat.chunk(2)
o_x = self.posterior_infer2(self.posterior_infer1(feat_x))
q_mu, q_std = torch.split(o_x, self.size_z, dim=1)
o_y = self.prior_infer(feat_y)
p_mu, p_std = torch.split(o_y, self.size_z, dim=1)
q_std = F.softplus(q_std)
p_std = F.softplus(p_std)
q_distribution = torch.distributions.Normal(q_mu, q_std)
p_distribution = torch.distributions.Normal(p_mu, p_std)
p_distribution_fix = torch.distributions.Normal(p_mu.detach(), p_std.detach())
m_distribution = torch.distributions.Normal(torch.zeros_like(p_mu), torch.ones_like(p_std))
z_q = q_distribution.rsample()
z_p = p_distribution.rsample()
z = torch.cat([z_q, z_p], dim=0)
feat = torch.cat([feat_x, feat_x], dim=0)
else:
o_x = self.posterior_infer2(self.posterior_infer1(feat))
q_mu, q_std = torch.split(o_x, self.size_z, dim=1)
q_std = F.softplus(q_std)
q_distribution = torch.distributions.Normal(q_mu, q_std)
p_distribution = q_distribution
p_distribution_fix = p_distribution
m_distribution = p_distribution
z = q_distribution.rsample()
feat += self.generator(z)
coarse, fine = self.decoder(feat)
coarse = inverse_point_ortho_feature(a1, a2, a3, coarse.transpose(1, 2).contiguous())
fine = inverse_point_ortho_feature(a1, a2, a3, fine.transpose(1, 2).contiguous())
if is_training:
dl_rec = torch.distributions.kl_divergence(m_distribution, p_distribution)
dl_g = torch.distributions.kl_divergence(p_distribution_fix, q_distribution)
dl_g_ = torch.distributions.kl_divergence(m_distribution, q_distribution)
loss2, _ = calc_cd(coarse, gt)
loss1, _ = calc_cd(fine, gt)
total_train_loss = loss1.mean() + loss2.mean() * alpha
total_train_loss += (dl_rec.mean() + dl_g.mean() + dl_g_.mean()) * 10
return fine, loss2, total_train_loss
else:
#emd = calc_emd(fine, gt, eps=0.004, iterations=3000)
cd_p, cd_t, f1 = calc_cd(fine, gt, calc_f1=True)
return {'out1': coarse, 'out2': fine, 'cd_p': cd_p, 'cd_t': cd_t, 'f1': f1}
| 46.873832 | 172 | 0.632539 | 7,450 | 0.742698 | 0 | 0 | 0 | 0 | 0 | 0 | 3,060 | 0.305054 |
89d03fb8a0b8c366fe6c36d8c3d59eb5d6f108ae | 4,267 | py | Python | tests/unit/test_game.py | BrunoASNascimento/ETHGlobal-Project | bbb69d07de17698d716a6990e71bdeedd873582d | [
"MIT"
] | null | null | null | tests/unit/test_game.py | BrunoASNascimento/ETHGlobal-Project | bbb69d07de17698d716a6990e71bdeedd873582d | [
"MIT"
] | null | null | null | tests/unit/test_game.py | BrunoASNascimento/ETHGlobal-Project | bbb69d07de17698d716a6990e71bdeedd873582d | [
"MIT"
] | null | null | null | from scripts.helpful_scripts import (
LOCAL_BLOCKCHAIN_ENVIRONMENTS,
get_account,
fund_with_link,
get_contract,
)
from brownie import Game, accounts, config, network, exceptions
from scripts.deploy_game import deploy_game
from web3 import Web3
import pytest
# if network.show_active() not in LOCAL_BLOCKCHAIN_ENVIRONMENTS:
# pytest.skip()
class TestGame:
@pytest.fixture
def game(self):
return deploy_game(
5, bytes("team1", encoding="utf8"), bytes("team2", encoding="utf8")
)
@pytest.fixture
def team1(self):
return (
[get_account(i) for i in range(1, 11)],
[10, 11, 12, 13, 4, 15, 16, 1, 2, 3],
)
@pytest.fixture
def team2(self):
return ([get_account(i) for i in range(7, 13)], [20, 2, 4, 1, 9, 8])
def test_check_player_exists(self, game, team1, team2):
game.playersTeamOne = team1[0]
game.playersTeamTwo = team2[0]
assert game.checkPlayerExists(get_account(7))
assert game.checkPlayerExists(get_account(12))
assert game.checkPlayerExists(get_account(1))
assert not game.checkPlayerExists(get_account(19))
@pytest.mark.skip("internal function")
def test_get_lowest_bet(self, game, team1, team2):
assert game.getLowestBet(team1[1]) == 7
assert game.getLowestBet(team2[1]) == 3
def test_event_result(self, game):
assert game.getEventResult() == bytes("team1", encoding="utf-8")
def test_bet(self):
pass
# test que o endereco da aposta entra na lista certa
# test que o endereco da aposta entra na lista top 5 se for top 5
# test que o endereco da aposta NAO entra na lista top 5 se nao for top 5
# test que o endereco da aposta pode aparecer nos dois times
# test que o endereco da aposta, caso ja exista, aumente o valor da sua aposta na lista de valores de apostas
def test_sum_of_bets(self, game, team1, team2):
# test que o endereco da aposta, caso ja exista no top5, aumente o valor da sua aposta na lista de top5
account = get_account()
account2 = get_account(2)
# game.playersTeamOne = team1[0]
# game.playersTeamTwo = team2[0]
game.bet(1, {"from": account, "value": 7 * 10 ** 18})
game.bet(2, {"from": account2, "value": 7 * 10 ** 18})
game.bet(2, {"from": account2, "value": 7 * 10 ** 18})
assert game.totalBetsOne() == 7 * 10 ** 18
assert game.totalBetsTwo() == 14 * 10 ** 18
@pytest.mark.skip("getLowestBet is internal")
def test_same_bet_value(self, game, team1, team2):
# test que apostas iguais ao minimo nao entram no top5
# game.playersTeamOne = team1[0]
# game.playersTeamTwo = team2[0]
game.bet(1, {"from": get_account(5), "value": 7 * 10 ** 18})
game.bet(2, {"from": get_account(7), "value": 7 * 10 ** 18})
game.bet(2, {"from": get_account(1), "value": 7 * 10 ** 18})
assert game.getLowestBet((team1[1])) == 7
assert game.getLowestBet((team2[1])) == 7
def test_duplication_of_list(self, game, team1, team2):
# test que o endereco da aposta nao duplica na lista
game.playersTeamOne = team1[0]
game.playersTeamTwo = team2[0]
assert (
len(
set(
[x for x in game.playersTeamOne if game.playersTeamOne.count(x) > 1]
)
)
== 0
)
assert (
len(
set(
[x for x in game.playersTeamTwo if game.playersTeamTwo.count(x) > 1]
)
)
== 0
)
def test_distribute_prizes(self):
account = get_account()
game = deploy_game(
2, bytes("team1", encoding="utf8"), bytes("team2", encoding="utf8")
)
game.bet(1, {"from": account, "value": 7 * 10 ** 18})
game.bet(1, {"from": get_account(2), "value": 7 * 10 ** 18})
game.bet(2, {"from": get_account(5), "value": 9 * 10 ** 18})
game.distributePrizes({"from": account})
assert account
def test_distribute_prizes_owner_only(self):
pass
def test_cant_bet_closed_game(self):
pass
| 35.264463 | 113 | 0.594563 | 3,905 | 0.915163 | 0 | 0 | 1,172 | 0.274666 | 0 | 0 | 1,013 | 0.237403 |
89d04b9eb8d01d291b7226fc499b9d1ce4373de0 | 466 | py | Python | arquivos_de_exercicios_descubra_o_python/Cap. 04/escreveArquivo_start.py | DiegoDBLe/Python-Linkedin | 0365fb2c83d04c10a2ebd8b56baddb91a4525811 | [
"MIT"
] | null | null | null | arquivos_de_exercicios_descubra_o_python/Cap. 04/escreveArquivo_start.py | DiegoDBLe/Python-Linkedin | 0365fb2c83d04c10a2ebd8b56baddb91a4525811 | [
"MIT"
] | null | null | null | arquivos_de_exercicios_descubra_o_python/Cap. 04/escreveArquivo_start.py | DiegoDBLe/Python-Linkedin | 0365fb2c83d04c10a2ebd8b56baddb91a4525811 | [
"MIT"
] | null | null | null | #
# Escrevendo arquivos com funções do Python
#
def escreveArquivo():
arquivo = open('NovoArquivo.txt', 'w+')
arquivo.write('Linha gerada com a função Escrevendo Arquivo \r\n')
arquivo.close()
#escreveArquivo()]
def alteraArquivo():
arquivo = open('NovoArquivo.txt', 'a+') # a de append que dizer escreva nas proximas linhas do arquivo
arquivo.write('Linha gerada com a função Altera Arquivo \r\n')
arquivo.close()
alteraArquivo()
| 18.64 | 106 | 0.690987 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 271 | 0.574153 |
89d0cf9f3e0655b88c1a304fee38eb9854817a94 | 1,150 | py | Python | graphio/queries/query_parameters.py | JTaeger/graphio | e856d4266842540cfe56ba7367d8f97183ae2954 | [
"Apache-2.0"
] | null | null | null | graphio/queries/query_parameters.py | JTaeger/graphio | e856d4266842540cfe56ba7367d8f97183ae2954 | [
"Apache-2.0"
] | null | null | null | graphio/queries/query_parameters.py | JTaeger/graphio | e856d4266842540cfe56ba7367d8f97183ae2954 | [
"Apache-2.0"
] | null | null | null | def params_create_rels_unwind_from_objects(relationships, property_identifier=None):
"""
Format Relationship properties into a one level dictionary matching the query generated in
`query_create_rels_from_list`. This is necessary because you cannot access nested dictionairies
in the UNWIND query.
UNWIND { rels } AS rel
MATCH (a:Gene), (b:GeneSymbol)
WHERE a.sid = rel.start_sid AND b.sid = rel.end_sid AND b.taxid = rel.end_taxid
CREATE (a)-[r:MAPS]->(b)
SET r = rel.properties
Call with params:
{'start_sid': 1, 'end_sid': 2, 'end_taxid': '9606', 'properties': {'foo': 'bar} }
:param relationships: List of Relationships.
:return: List of parameter dictionaries.
"""
if not property_identifier:
property_identifier = 'rels'
output = []
for r in relationships:
d = {}
for k, v in r.start_node_properties.items():
d['start_{}'.format(k)] = v
for k, v in r.end_node_properties.items():
d['end_{}'.format(k)] = v
d['properties'] = r.properties
output.append(d)
return {property_identifier: output}
| 33.823529 | 99 | 0.646087 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 678 | 0.589565 |
89d191bec6b5c9759bd10872394402ac762205b9 | 477 | py | Python | user/migrations/0002_userprofile_relations.py | Trippr-dwoc/Trippr-backend | 69a8bb8e1a742b64b4eaf8612f97806e6191e8fb | [
"MIT"
] | null | null | null | user/migrations/0002_userprofile_relations.py | Trippr-dwoc/Trippr-backend | 69a8bb8e1a742b64b4eaf8612f97806e6191e8fb | [
"MIT"
] | null | null | null | user/migrations/0002_userprofile_relations.py | Trippr-dwoc/Trippr-backend | 69a8bb8e1a742b64b4eaf8612f97806e6191e8fb | [
"MIT"
] | null | null | null | # Generated by Django 3.2.3 on 2021-10-19 18:54
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='relations',
field=models.ManyToManyField(related_name='_user_userprofile_relations_+', to=settings.AUTH_USER_MODEL),
),
]
| 23.85 | 116 | 0.649895 | 351 | 0.735849 | 0 | 0 | 0 | 0 | 0 | 0 | 122 | 0.255765 |
89d29a1fa605d5be215d81daea59d7f25ded9268 | 6,239 | py | Python | src/utils/testing_utils.py | rluiseugenio/dpa_rita | 4da52a103b455c97d7f3bc6253f1c455c340dfc4 | [
"MIT"
] | null | null | null | src/utils/testing_utils.py | rluiseugenio/dpa_rita | 4da52a103b455c97d7f3bc6253f1c455c340dfc4 | [
"MIT"
] | 10 | 2020-05-06T14:30:44.000Z | 2022-03-12T00:33:00.000Z | src/utils/testing_utils.py | rluiseugenio/dpa_rita | 4da52a103b455c97d7f3bc6253f1c455c340dfc4 | [
"MIT"
] | 6 | 2020-04-15T04:30:36.000Z | 2020-10-23T16:02:24.000Z | #python -m marbles test_semantic_columns.py
import unittest
from marbles.mixins import mixins
import pandas as pd
import requests
from pyspark.sql import SparkSession
import psycopg2 as pg
import pandas as pd
import marbles
from pyspark.sql.types import StructType, StructField, StringType
import psycopg2 as pg
#from src.features.build_features import crear_features
from src import(
MY_USER,
MY_PASS,
MY_HOST,
MY_PORT,
MY_DB,
)
def get_clean_data_test():
clean_rita = StructType([StructField('year', StringType(), True),
StructField('quarter', StringType(), True),
StructField('month', StringType(), True),
StructField('dayofmonth', StringType(), True),
StructField('dayofweek', StringType(), True),
StructField('flightdate', StringType(), True),
StructField('reporting_airline', StringType(), True),
StructField('dot_id_reporting_airline', StringType(), True),
StructField('iata_code_reporting_airline', StringType(), True),
StructField('tail_number', StringType(), True),
StructField('flight_number_reporting_airline', StringType(), True),
StructField('originairportid', StringType(), True),
StructField('originairportseqid', StringType(), True),
StructField('origincitymarketid', StringType(), True),
StructField('origin', StringType(), True),
StructField('origincityname', StringType(), True),
StructField('originstate', StringType(), True),
StructField('originstatefips', StringType(), True),
StructField('originstatename', StringType(), True),
StructField('originwac', StringType(), True),
StructField('destairportid', StringType(), True),
StructField('destairportseqid', StringType(), True),
StructField('destcitymarketid', StringType(), True),
StructField('dest', StringType(), True),
StructField('destcityname', StringType(), True),
StructField('deststate', StringType(), True),
StructField('deststatefips', StringType(), True),
StructField('deststatename', StringType(), True),
StructField('destwac', StringType(), True),
StructField('crsdeptime', StringType(), True),
StructField('deptime', StringType(), True),
StructField('depdelay', StringType(), True),
StructField('depdelayminutes', StringType(), True),
StructField('depdel15', StringType(), True),
StructField('departuredelaygroups', StringType(), True),
StructField('deptimeblk', StringType(), True),
StructField('taxiout', StringType(), True),
StructField('wheelsoff', StringType(), True),
StructField('wheelson', StringType(), True),
StructField('taxiin', StringType(), True),
StructField('crsarrtime', StringType(), True),
StructField('arrtime', StringType(), True),
StructField('arrdelay', StringType(), True),
StructField('arrdelayminutes', StringType(), True),
StructField('arrdel15', StringType(), True),
StructField('arrivaldelaygroups', StringType(), True),
StructField('arrtimeblk', StringType(), True),
StructField('cancelled', StringType(), True),
StructField('diverted', StringType(), True),
StructField('crselapsedtime', StringType(), True),
StructField('actualelapsedtime', StringType(), True),
StructField('airtime', StringType(), True),
StructField('flights', StringType(), True),
StructField('distance', StringType(), True),
StructField('distancegroup', StringType(), True),
StructField('divairportlandings', StringType(), True),
StructField('rangoatrasohoras', StringType(), True)
])
config_psyco = "host='{0}' dbname='{1}' user='{2}' password='{3}'".format(MY_HOST,MY_DB,MY_USER,MY_PASS)
connection = pg.connect(config_psyco)
pdf = pd.read_sql_query('select * from clean.rita limit 1;',con=connection)
spark = SparkSession.builder.config('spark.driver.extraClassPath', 'postgresql-9.4.1207.jar').getOrCreate()
df = spark.createDataFrame(pdf, schema=clean_rita)
return df
def crear_features_test(base):
from pyspark.sql import functions as f
base = base.withColumn('findesemana', f.when(f.col('dayofweek') == 5, 1).when(f.col('dayofweek') == 6, 1).when(f.col('dayofweek') == 7, 1).otherwise(0))
base = base.withColumn('quincena', f.when(f.col('dayofmonth') == 15, 1).when(f.col('dayofmonth') == 14, 1).when(f.col('dayofmonth') == 16, 1).when(f.col('dayofmonth') == 29, 1).when(f.col('dayofmonth') == 30, 1).when(f.col('dayofmonth') == 31, 1).when(f.col('dayofmonth') == 1, 1).when(f.col('dayofmonth') == 2, 1).when(f.col('dayofmonth') == 3, 1).otherwise(0))
base = base.withColumn('dephour', f.when(f.col('dayofweek') == 5, 1).otherwise(0))
base = base.withColumn('seishoras', f.when(f.col('dephour') == 6, 1).when(f.col('dephour') == 12, 1).when(f.col('dephour') == 18, 1).when(f.col('dephour') == 0, 1).otherwise(0))
return base
| 58.858491 | 366 | 0.536624 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,263 | 0.202436 |
89d312b14f6d96924f16b029fd0ee12233883f44 | 6,572 | py | Python | polygon.py | darinamazur/Math-modeling- | 8de5913fd8314dc51641700c9a1088c6f34fd2a3 | [
"MIT"
] | null | null | null | polygon.py | darinamazur/Math-modeling- | 8de5913fd8314dc51641700c9a1088c6f34fd2a3 | [
"MIT"
] | null | null | null | polygon.py | darinamazur/Math-modeling- | 8de5913fd8314dc51641700c9a1088c6f34fd2a3 | [
"MIT"
] | null | null | null | import math
class polygon:
def __init__(self, arr):
self.original_arr = arr
self.size = len(self.original_arr)
self.__set_min_max_by_original__()
self.__refactor_original_seq__()
self.sorted_arr.append(self.sorted_arr[0])
self.size += 1
def __set_min_max_by_original__(self):
self.x_min_ind = 0
self.x_max_ind = 0
self.y_min_ind = 0
self.y_max_ind = 0
for i in range(1, self.size):
if self.original_arr[i][0] > self.original_arr[self.x_max_ind][0]:
self.x_max_ind = i
if self.original_arr[i][0] < self.original_arr[self.x_min_ind][0]:
self.x_min_ind = i
if self.original_arr[i][1] > self.original_arr[self.y_max_ind][1]:
self.y_max_ind = i
if self.original_arr[i][1] < self.original_arr[self.y_min_ind][1]:
self.y_min_ind = i
def __refactor_original_seq__(self):
self.sorted_arr = []
for i in range(self.x_min_ind, self.size):
self.sorted_arr.append(self.original_arr[i])
for i in range(0, self.x_min_ind):
self.sorted_arr.append(self.original_arr[i])
self.x_max_ind = (self.x_max_ind - self.x_min_ind) % self.size
self.y_max_ind = (self.y_max_ind - self.x_min_ind) % self.size
self.y_min_ind = (self.y_min_ind - self.x_min_ind) % self.size
self.x_min_ind = 0
def __equal__(x1, x2):
return abs(x1 - x2) < 1E-4
def get_top_border(self, x):
if polygon.__equal__(x, self.sorted_arr[self.x_max_ind][0]):
if polygon.__equal__(self.sorted_arr[self.x_max_ind][0], self.sorted_arr[self.x_max_ind + 1][0]):
return max(self.sorted_arr[self.x_max_ind][1], self.sorted_arr[self.x_max_ind + 1][1])
else:
return self.sorted_arr[self.x_max_ind][1]
if polygon.__equal__(x, self.sorted_arr[self.x_min_ind][0]):
if polygon.__equal__(self.sorted_arr[self.x_min_ind][0], self.sorted_arr[self.x_min_ind + 1][0]):
return max(self.sorted_arr[self.x_min_ind][1], self.sorted_arr[self.x_min_ind + 1][1])
else:
return self.sorted_arr[self.x_min_ind][1]
for i in range(self.x_min_ind, self.x_max_ind):
if x >= self.sorted_arr[i][0] and x < self.sorted_arr[i + 1][0]:
if self.sorted_arr[i][0] != self.sorted_arr[i + 1][0]:
x1 = self.sorted_arr[i][0]
x2 = self.sorted_arr[i + 1][0]
y1 = self.sorted_arr[i][1]
y2 = self.sorted_arr[i + 1][1]
return y1 + (x - x1) * (y2 - y1) / (x2 - x1)
else:
return max(self.sorted_arr[i][1], self.sorted_arr[i + 1][1])
exit(3)
def get_bottom_border(self, x):
if polygon.__equal__(x, self.sorted_arr[self.x_max_ind][0]):
if polygon.__equal__(self.sorted_arr[self.x_max_ind][0], self.sorted_arr[self.x_max_ind + 1][0]):
return min(self.sorted_arr[self.x_max_ind][1], self.sorted_arr[self.x_max_ind + 1][1])
else:
return self.sorted_arr[self.x_max_ind][1]
if polygon.__equal__(x, self.sorted_arr[self.x_min_ind][0]):
if polygon.__equal__(self.sorted_arr[self.x_min_ind][0], self.sorted_arr[self.x_min_ind + 1][0]):
return min(self.sorted_arr[self.x_min_ind][1], self.sorted_arr[self.x_min_ind + 1][1])
else:
return self.sorted_arr[self.x_min_ind][1]
for i in range(self.x_max_ind, self.size - 1):
if x < self.sorted_arr[i][0] and x >= self.sorted_arr[i + 1][0]:
if self.sorted_arr[i][0] != self.sorted_arr[i + 1][0]:
x1 = self.sorted_arr[i][0]
x2 = self.sorted_arr[i + 1][0]
y1 = self.sorted_arr[i][1]
y2 = self.sorted_arr[i + 1][1]
return y1 + (x - x1) * (y2 - y1) / (x2 - x1)
else:
return min(self.sorted_arr[i][1], self.sorted_arr[i + 1][1])
exit(3)
def get_x_min(self):
return self.sorted_arr[self.x_min_ind][0]
def get_x_max(self):
return self.sorted_arr[self.x_max_ind][0]
def get_y_min(self):
return self.sorted_arr[self.y_min_ind][1]
def get_y_max(self):
return self.sorted_arr[self.y_max_ind][1]
def get_contour_length(self):
res = 0
for i in range(0, self.size - 1):
res += math.sqrt((self.sorted_arr[i][0] - self.sorted_arr[i + 1][0]) ** 2 + (self.sorted_arr[i][1] - self.sorted_arr[i + 1][1]) ** 2)
return res
def get_contour_sequence(self, dpi=10):
# returns 2d-array with 1-dimension length same as points in array,
# second dimension have length 3, contains x, y, multiplier constant
n = math.ceil(self.get_contour_length() * dpi)
res_arr = []
for i in range(0, self.size - 1):
x_cur = self.sorted_arr[i][0]
x_next = self.sorted_arr[i + 1][0]
y_cur = self.sorted_arr[i][1]
y_next = self.sorted_arr[i + 1][1]
if not polygon.__equal__(x_cur, x_next):
y_x = lambda x: y_cur + (x - x_cur) * (y_next - y_cur) / (x_next - x_cur)
section_length = math.sqrt((x_next - x_cur) ** 2 + (y_next - y_cur) ** 2)
n = math.ceil(section_length * dpi)
if n != 0:
step_x = (x_next - x_cur) / float(n)
step_len = section_length / float(n)
for i in range(0, n):
tmp_x = x_cur + step_x * (i + 0.5)
tmp_y = y_x(tmp_x)
res_arr.insert(0, [tmp_x, tmp_y, step_len])
else:
section_length = math.sqrt((x_next - x_cur) ** 2 + (y_next - y_cur) ** 2)
n = math.ceil(section_length * dpi)
if n != 0:
step_p = (y_next - y_cur) / float(n)
step_len = section_length / float(n)
for i in range(0, n):
tmp_y = y_cur + step_p * (i + 0.5)
res_arr.insert(0, [x_cur, tmp_y, step_len])
return res_arr
def contains_point(self, x, y):
if x > self.get_x_max() or x < self.get_x_min():
return False
return self.get_top_border(x) >= y and self.get_bottom_border(x) <= y
| 46.609929 | 145 | 0.549452 | 6,558 | 0.99787 | 0 | 0 | 0 | 0 | 0 | 0 | 135 | 0.020542 |
89d33b22e0bc047ce85d01052a68ca4989846e8f | 20,352 | py | Python | post_processing/utils.py | fywalter/TorchSeg | 729eb22d8c5d607466055552fd82e0819d5f29e2 | [
"MIT"
] | null | null | null | post_processing/utils.py | fywalter/TorchSeg | 729eb22d8c5d607466055552fd82e0819d5f29e2 | [
"MIT"
] | null | null | null | post_processing/utils.py | fywalter/TorchSeg | 729eb22d8c5d607466055552fd82e0819d5f29e2 | [
"MIT"
] | 2 | 2020-07-31T14:40:49.000Z | 2020-07-31T17:52:30.000Z |
# coding: utf-8
# In[20]:
import numpy as np
import pydensecrf.densecrf as dcrf
import os
import cv2
import random
from tqdm import tqdm
# In[21]:
from skimage.color import gray2rgb
from skimage.color import rgb2gray
import matplotlib.pyplot as plt
from sklearn.metrics import f1_score, accuracy_score
from pydensecrf.utils import unary_from_labels, create_pairwise_bilateral, create_pairwise_gaussian, unary_from_softmax
#from osgeo import gdal
get_ipython().run_line_magic('matplotlib', 'inline')
# In[22]:
# Color maps for direction map
COLOR_LR = [0,128,128]
COLOR_UD = [128,0,128]
COLOR_DIAG = [255,215,0]
COLOR_ADIAG = [1,255,255]
INF = 10000
# In[23]:
MAX = 0
SUM = 1
VEC = 0
MAT = 1
# In[24]:
def dir_to_features(dir_map):
"""Converts direction color map to feature used for crf kernel. The
feature is obtained by computing the intersections of the x, y axis and the
line determined by the position of one point and its direction. (More details in
the report)
Parameters
____________
dir_map: numpy.array
Direction map that maps each pixel to a direction in
[left_right, up_down, diagonal, anti-diagonal], each direction
is represented by a color.
"""
(h, w, c) = dir_map.shape
feature_map = np.zeros((h,w,2))
for i in range(h):
for j in range(w):
dir_color = dir_map[i,j]
if dir_color[0] == COLOR_LR[0]: # dir = lr
feature_map[i,j] = np.array([INF,i])
if dir_color[0] == COLOR_UP[0]: # dir = ud
feature_map[i,j] = np.array([j,INF])
if dir_color[1] == COLOR_DIAG[0]: # dir = diag
feature_map[i,j] = np.array([j-i,i-j])
if dir_color[1] == COLOR_ADIAG[0]: # dir = adiag
feature_map[i,j] = np.array([i+j, i+j])
return feature_map
# In[25]:
def gen_dir_map(img):
"""Generate direction map from a rgb img
Parameters
____________
img: numpy.array
Rgb img with width = height
"""
window_size = 101
half_size = int((window_size-1)/2)
sigma_1 = 2
sigma_2 = 40
(h, w, c) = img.shape
assert h==w, "h and w are not equal"
dir_map = np.zeros((h,w))
pos_mat = np.zeros((h,w,2))
for i in range(h):
for j in range(w):
pos_mat[i,j,0]=i
pos_mat[i,j,1]=j
padded_pos = np.pad(pos_mat, ((half_size, half_size), (half_size, half_size), (0,0)))
padded_img = np.pad(img, ((half_size, half_size), (half_size, half_size), (0,0)))
index_mask_lr = np.zeros((window_size, window_size)).astype("bool")
index_mask_lr[half_size,:]=True
index_mask_ud = np.zeros((window_size, window_size)).astype("bool")
index_mask_ud[:,half_size]=True
index_mask_diag = np.identity(window_size).astype("bool")
index_mask_adiag = np.fliplr(np.identity(window_size)).astype("bool")
mask_list = [index_mask_lr, index_mask_ud, index_mask_diag, index_mask_adiag]
for i in range(h):
for j in range(w):
img_nbr = padded_img[i:i+window_size,j:j+window_size]
pos_nbr = padded_pos[i:i+window_size,j:j+window_size]
img_nbr = img_nbr - img[i,j,:]
pos_nbr = pos_nbr - np.array([i,j])
dir_intensity = np.zeros(4)
for dir_index, index_mask in enumerate(mask_list):
img_nbr_dir = img_nbr[index_mask]
pos_nbr_dir = pos_nbr[index_mask]
img_nbr_dir = np.sum(img_nbr_dir**2, axis=1)/(2*sigma_1**2)
pos_nbr_dir = np.sum(pos_nbr_dir**2, axis=1)/(2*sigma_2**2)
k = np.exp(-img_nbr_dir-pos_nbr_dir)
dir_intensity[dir_index]=np.sum(k)
dir_map[i,j]=np.argmax(dir_intensity)+1
return dir_map
# In[26]:
def visualize_dir_map(img, dir_map, save_file=False,
filename=None, vis_path=None, dir_path=None):
"""Visualize a direction map
Parameters
____________
img: numpy.array
Rgb img
dir_map: numpy.array
Correspongding direction map
...
"""
h = img.shape[0]
w = img.shape[1]
vis_dir = np.zeros(img.shape)
vis_dir[dir_map==1] = np.array(COLOR_LR)
vis_dir[dir_map==2] = np.array(COLOR_UD)
vis_dir[dir_map==3] = np.array(COLOR_DIAG)
vis_dir[dir_map==4] = np.array(COLOR_ADIAG)
plt.figure(figsize=(10,5))
plt.subplot(1,2,1); plt.imshow(img); plt.title('Original Image (blurred)'); plt.axis('off');
plt.subplot(1,2,2); plt.imshow(dir_map); plt.title('Direction map'); plt.axis('off');
if save_file:
plt.savefig(os.path.join(vis_path, filename),dpi=300)
plt.close()
cv2.imwrite(os.path.join(dir_path, filename), vis_dir)
# In[27]:
def gen_dir_map_and_visualize(image_path= './images/',
vis_path='./vis_dir_blur_/',
dir_path='./dir_map_/',
process_all=True):
"""Generate direction color map for images in image_path
Parameters
____________
image_path: string
Image path
vis_path: string
Path to save visualization results
dir_path: string
Path to save direction map
process_all: Bool
False to generate a single visualization result without save. True to
generate and save visualizaiton results for all images.
"""
if not os.path.exists(dir_path):
os.mkdir(dir_path)
if not os.path.exists(vis_path):
os.mkdir(vis_path)
if process_all:
for file in tqdm(os.listdir(image_path)):
img = cv2.imread(os.path.join(image_path, file))
img = cv2.GaussianBlur(img,(5,5),0)
dir_map = gen_dir_map(img)
visualize_dir_map(img, dir_map, filename=file, save_file=True,
vis_path=vis_path, dir_path=dir_path)
else:
img = cv2.imread('./images/satImage_001.png')
img = cv2.GaussianBlur(img,(5,5),0)
dir_map = gen_dir_map(img)
visualize_dir_map(img, dir_map, save_file=False)
# In[28]:
def crf_with_dir_kernel(original_img, dir_feature, prob,
iter_num, compat_smooth, compat_appearance, compat_struct,
w_smooth, w_appearance, w_struct,
sigma_smooth, sigma_app_color, sigma_app_pos,
sigma_struct_pos, sigma_struct_feat):
"""CRF with a Gaussian smoothing kernel, an appearance kernel and a structural kernel
"""
(h,w) = prob.shape
y = np.zeros((h,w,2))
y[:,:,1] = prob
y[:,:,0] = 1-y[:,:,1]
annotated_image=y.transpose((2, 0, 1))
#Gives no of class labels in the annotated image
n_labels = 2
#Setting up the CRF model
d = dcrf.DenseCRF2D(original_img.shape[1], original_img.shape[0], n_labels)
# get unary potentials (neg log probability)
U = unary_from_softmax(annotated_image)
unary = np.ascontiguousarray(U)
d.setUnaryEnergy(unary)
compat_smooth = compat_smooth * w_smooth
compat_appearance = compat_appearance * w_appearance
compat_struct = compat_struct * w_struct
# Smooth kernel
d.addPairwiseGaussian(sxy=(sigma_smooth, sigma_smooth), compat=compat_smooth.astype(np.float32),
kernel=dcrf.DIAG_KERNEL,
normalization=dcrf.NORMALIZE_SYMMETRIC)
# Appearance kernel
d.addPairwiseBilateral(sxy=(sigma_app_pos, sigma_app_pos),
srgb=(sigma_app_color, sigma_app_color, sigma_app_color),
rgbim=original_image,
compat=compat_appearance.astype(np.float32),
kernel=dcrf.DIAG_KERNEL,
normalization=dcrf.NORMALIZE_SYMMETRIC)
# Structural kernel
pairwise_energy = create_pairwise_bilateral(sdims=(sigma_struct_pos,sigma_struct_pos),
schan=(sigma_struct_feat,sigma_struct_feat),
img=dir_feature, chdim=2)
d.addPairwiseEnergy(pairwise_energy, compat=compat_struct.astype(np.float32))
Q = d.inference(iter_num)
proba = np.array(Q)
return proba[1].reshape((dir_feature.shape[0], dir_feature.shape[1]))
# In[29]:
def crf(original_image, prob,
iter_num=4, compat_smooth = np.array([[-0.4946432, 1.27117338],[0.59452892, 0.23182234]]),
compat_appearance = np.array([[-0.30571318, 0.83015124],[1.3217825, -0.13046645]]),
w_smooth=3.7946478055761963, w_appearance=1.8458537690881878,
sigma_smooth=8.575103751642672, sigma_color=2.0738539891571977, sigma_color_pos=20):
"""Basic CRF with a Gaussian smoothing kernel and an appearance kernel
"""
(h,w) = prob.shape
y = np.zeros((h,w,2))
y[:,:,1] = prob
y[:,:,0] = 1-y[:,:,1]
annotated_image=y.transpose((2, 0, 1))
#Gives no of class labels in the annotated image
n_labels = 2
#print("No of labels in the Image are ")
#print(n_labels)
#Setting up the CRF model
d = dcrf.DenseCRF2D(original_image.shape[1], original_image.shape[0], n_labels)
# get unary potentials (neg log probability)
U = unary_from_softmax(annotated_image)
unary = np.ascontiguousarray(U)
d.setUnaryEnergy(unary)
compat_smooth=compat_smooth*w_smooth
compat_appearance=compat_appearance*w_appearance
# This adds the color-independent term, features are the locations only.
d.addPairwiseGaussian(sxy=(sigma_smooth, sigma_smooth), compat=compat_smooth.astype(np.float32), kernel=dcrf.DIAG_KERNEL,
normalization=dcrf.NORMALIZE_SYMMETRIC)
# This adds the color-dependent term, i.e. features are (x,y,r,g,b).
d.addPairwiseBilateral(sxy=(sigma_color_pos, sigma_color_pos), srgb=(sigma_color, sigma_color, sigma_color), rgbim=original_image,
compat=compat_appearance.astype(np.float32),
kernel=dcrf.DIAG_KERNEL,
normalization=dcrf.NORMALIZE_SYMMETRIC)
Q = d.inference(iter_num)
proba = np.array(Q)
return proba[1].reshape((original_image.shape[0], original_image.shape[1]))
# In[30]:
def crf_smooth(original_image, prob, use_2d = True, iter_num=1, w=4.921522279119057, sigma_sm=4.325251720130304):
"""CRF with only a smoothing kernel
"""
(h,w) = prob.shape
y = np.zeros((h,w,2))
y[:,:,1] = prob
y[:,:,0] = 1-y[:,:,1]
annotated_image=y.transpose((2, 0, 1))
#Gives no of class labels in the annotated image
n_labels = 2
#Setting up the CRF model
if use_2d :
d = dcrf.DenseCRF2D(original_image.shape[1], original_image.shape[0], n_labels)
# get unary potentials (neg log probability)
U = unary_from_softmax(annotated_image)
unary = np.ascontiguousarray(U)
d.setUnaryEnergy(unary)
# This adds the color-independent term, features are the locations only.
d.addPairwiseGaussian(sxy=(sigma_sm, sigma_sm), compat=w, kernel=dcrf.DIAG_KERNEL,
normalization=dcrf.NORMALIZE_SYMMETRIC)
Q = d.inference(iter_num)
proba = np.array(Q)
return proba[1].reshape((original_image.shape[0], original_image.shape[1]))
# In[31]:
def propagate_max_mat(img, prob):
"""Probability propagation (max) in 4 directions via matrix multiplication
"""
prob_out = prob.copy()
prop_size = 51
half_size = int((prop_size-1)/2)
prop_num = 3
sigma_1 = 5
sigma_2 = 42
(h, w) = prob.shape
pos_mat = np.zeros((h,w,2))
for i in range(h):
for j in range(w):
pos_mat[i,j,0]=i
pos_mat[i,j,1]=j
padded_pos = np.pad(pos_mat, ((half_size, half_size), (half_size, half_size), (0,0)))
padded_img = np.pad(img, ((half_size, half_size), (half_size, half_size), (0,0)))
index_mask = np.zeros((prop_size, prop_size)).astype("bool")
for i in range(prop_size):
index_mask[i,half_size]=1
index_mask[half_size,i]=1
index_mask[i,i]=1
index_mask[prop_size-1-i,i]=1
for iteration in range(prop_num):
padded_prob = np.pad(prob_out, ((half_size, half_size), (half_size, half_size)))
# propagate prob (maximum)
for i in range(h):
for j in range(w):
if prob_out[i,j]<0.01:
continue
img_nbr = padded_img[i:i+prop_size,j:j+prop_size]
pos_nbr = padded_pos[i:i+prop_size,j:j+prop_size]
img_nbr = img_nbr - img[i,j,:]
pos_nbr = pos_nbr - np.array([i,j])
img_nbr[~index_mask]=0
pos_nbr[~index_mask]=0
img_nbr = np.sum(img_nbr**2, axis=2)/(2*sigma_1**2)
pos_nbr = np.sum(pos_nbr**2, axis=2)/(2*sigma_2**2)
k = np.exp(-img_nbr-pos_nbr)*prob_out[i,j]
k = k*index_mask
padded_prob[i:i+prop_size,j:j+prop_size] = np.maximum(padded_prob[i:i+prop_size,j:j+prop_size], k)
prob_out = padded_prob[half_size:h+half_size,half_size:w+half_size]
return prob_out
# In[32]:
def propagate_max_vec(img, prob, prop_size=11,
prop_num=16, sigma_1=1.039316347691348, sigma_2=40):
"""
vec means only do propagation along x and y axis
max means propagate using max function
Args:
prop_size: neighborhood size
prop_num: number of iteration/propagation
sigma_1: variance of color
sigma_2: variance of distance
"""
prob_out = prob.copy()
half_size = int((prop_size-1)/2)
(h, w, c) = img.shape
pos_mat = np.zeros((h,w,2)) # position matrix
for i in range(h):
for j in range(w):
pos_mat[i,j,0]=i
pos_mat[i,j,1]=j
padded_pos = np.pad(pos_mat, ((half_size, half_size), (half_size, half_size), (0,0)))
padded_img = np.pad(img, ((half_size, half_size), (half_size, half_size), (0,0)))
for iteration in range(prop_num):
padded_prob = np.pad(prob_out, ((half_size, half_size), (half_size, half_size)))
padded_prob_fix = padded_prob.copy()
# propagate prob (maximum)
assert h==w, "h and w are not equal"
for i in range(h):
# prop along y for row i
img_nbr = padded_img[i:i+prop_size,:]
pos_nbr = padded_pos[i:i+prop_size,:]
img_nbr = img_nbr - padded_img[i+half_size,:,:]
pos_nbr = pos_nbr - padded_pos[i+half_size,:,:]
img_nbr = np.sum(img_nbr**2, axis=2)/(2*sigma_1**2)
pos_nbr = np.sum(pos_nbr**2, axis=2)/(2*sigma_2**2)
k = np.exp(-img_nbr-pos_nbr)*padded_prob_fix[i+half_size,:]
padded_prob[i:i+prop_size,:] = np.maximum(padded_prob[i:i+prop_size,:], k)
# prop along x for col i
img_nbr = padded_img[:,i:i+prop_size]
pos_nbr = padded_pos[:,i:i+prop_size]
img_nbr = img_nbr - padded_img[:,i+half_size,:].reshape((padded_img.shape[0],1,c))
pos_nbr = pos_nbr - padded_pos[:,i+half_size,:].reshape((padded_img.shape[0],1,2))
img_nbr = np.sum(img_nbr**2, axis=2)/(2*sigma_1**2)
pos_nbr = np.sum(pos_nbr**2, axis=2)/(2*sigma_2**2)
k = np.exp(-img_nbr-pos_nbr)*padded_prob_fix[:,i+half_size].reshape((-1,1))
padded_prob[:,i:i+prop_size] = np.maximum(padded_prob[:,i:i+prop_size], k)
prob_out = padded_prob[half_size:h+half_size,half_size:w+half_size]
return prob_out
# In[33]:
def propagate_sum_vec(img, prob, prop_size=11, prop_num=1, sigma_1=1.5319569104856783, sigma_2=80):
"""
vec means only do propagation along x and y axis
sum means propagate in a additive schema (with total probability fixed)
Args:
prop_size: neighborhood size
prop_num: number of iteration/propagation
sigma_1: variance of color
sigma_2: variance of distance
"""
# print(np.sum(prob))
prob_out = prob.copy()
half_size = int((prop_size-1)/2)
(h, w, c) = img.shape
pos_mat = np.zeros((h,w,2)) # position matrix
for i in range(h):
for j in range(w):
pos_mat[i,j,0]=i
pos_mat[i,j,1]=j
padded_pos = np.pad(pos_mat, ((half_size, half_size), (half_size, half_size), (0,0)))
padded_img = np.pad(img, ((half_size, half_size), (half_size, half_size), (0,0)))
padded_prob = np.pad(prob, ((half_size, half_size), (half_size, half_size)))
for iteration in range(prop_num):
padded_prob_fix = padded_prob.copy()
padded_prob = np.pad(np.zeros((h,w)), ((half_size, half_size), (half_size, half_size)))
# propagate prob (sum)
assert h==w, "h and w are not equal"
# compute the degree mat
deg_mat = np.zeros((h+2*half_size,w+2*half_size))
for i in range(h):
# prop along y for row i
img_nbr = padded_img[i:i+prop_size,:]
pos_nbr = padded_pos[i:i+prop_size,:]
img_nbr = img_nbr - padded_img[i+half_size,:,:]
pos_nbr = pos_nbr - padded_pos[i+half_size,:,:]
img_nbr = np.sum(img_nbr**2, axis=2)/(2*sigma_1**2)
pos_nbr = np.sum(pos_nbr**2, axis=2)/(2*sigma_2**2)
k = np.exp(-img_nbr-pos_nbr)
deg_mat[i+half_size,:] = deg_mat[i+half_size,:]+np.sum(k,axis=0)
# prop along x for col i
img_nbr = padded_img[:,i:i+prop_size]
pos_nbr = padded_pos[:,i:i+prop_size]
img_nbr = img_nbr - padded_img[:,i+half_size,:].reshape((padded_img.shape[0],1,c))
pos_nbr = pos_nbr - padded_pos[:,i+half_size,:].reshape((padded_img.shape[0],1,2))
img_nbr = np.sum(img_nbr**2, axis=2)/(2*sigma_1**2)
pos_nbr = np.sum(pos_nbr**2, axis=2)/(2*sigma_2**2)
k = np.exp(-img_nbr-pos_nbr)
deg_mat[:,i+half_size] = deg_mat[:,i+half_size]+np.sum(k,axis=1)
for i in range(h):
# prop along y for row i
img_nbr = padded_img[i:i+prop_size,:]
pos_nbr = padded_pos[i:i+prop_size,:]
img_nbr = img_nbr - padded_img[i+half_size,:,:]
pos_nbr = pos_nbr - padded_pos[i+half_size,:,:]
img_nbr = np.sum(img_nbr**2, axis=2)/(2*sigma_1**2)
pos_nbr = np.sum(pos_nbr**2, axis=2)/(2*sigma_2**2)
k = np.exp(-img_nbr-pos_nbr) # similarity matrix
k = k/deg_mat[i+half_size,:] #devided by degree
prop_prob = k * padded_prob_fix[i+half_size,:]
padded_prob[i:i+prop_size,:] = padded_prob[i:i+prop_size,:] + prop_prob
# prop along x for col i
img_nbr = padded_img[:,i:i+prop_size]
pos_nbr = padded_pos[:,i:i+prop_size]
img_nbr = img_nbr - padded_img[:,i+half_size,:].reshape((padded_img.shape[0],1,c))
pos_nbr = pos_nbr - padded_pos[:,i+half_size,:].reshape((padded_img.shape[0],1,2))
img_nbr = np.sum(img_nbr**2, axis=2)/(2*sigma_1**2)
pos_nbr = np.sum(pos_nbr**2, axis=2)/(2*sigma_2**2)
k = np.exp(-img_nbr-pos_nbr) # similarity matrix
k = k/deg_mat[:,i+half_size].reshape((-1,1)) #devided by degree
prop_prob = k * padded_prob_fix[:,i+half_size].reshape((-1,1))
padded_prob[:,i:i+prop_size] = padded_prob[:,i:i+prop_size]+ prop_prob
# padded_prob = padded_prob + 0.5 * padded_prob_fix # lazy propagation
prob_out = padded_prob[half_size:h+half_size,half_size:w+half_size]
# print(np.sum(prob_out))
prob_out[prob_out>1]=1
return prob_out
# In[34]:
def prob_to_patch(im):
"""Convert pixel level probability prediction to patch version
"""
patch_list = []
patch_size = 16
for j in range(0, im.shape[1], patch_size):
for i in range(0, im.shape[0], patch_size):
patch = im[i:i + patch_size, j:j + patch_size]
df = np.mean(patch)
patch_list.append(df)
return np.array(patch_list)
| 37.071038 | 134 | 0.604068 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,831 | 0.188237 |
89d3bf0df91591781ea747a7755b48cfdfadf9ad | 783 | py | Python | lists/management/commands/seed_list.py | nasir733/airbnb-clone | 9ac746b6f3f3c8fc45f97773266e6f5f182d14b9 | [
"MIT"
] | null | null | null | lists/management/commands/seed_list.py | nasir733/airbnb-clone | 9ac746b6f3f3c8fc45f97773266e6f5f182d14b9 | [
"MIT"
] | null | null | null | lists/management/commands/seed_list.py | nasir733/airbnb-clone | 9ac746b6f3f3c8fc45f97773266e6f5f182d14b9 | [
"MIT"
] | null | null | null | import random
from django.core.management.base import BaseCommand
from django.contrib.admin.utils import flatten
from django_seed import Seed
from lists import models as list_models
from users import models as user_models
from rooms import models as room_models
NAME = "lists"
class Command(BaseCommand):
help = f"This command creates {NAME}"
def handle(self, *args, **options):
users = user_models.User.objects.all()
rooms = room_models.Room.objects.all()
for user in users:
list_model = list_models.List.objects.create(user=user, name="Favs.")
to_add = rooms[random.randint(0, 5) : random.randint(6, 30)]
list_model.rooms.add(*to_add)
self.stdout.write(self.style.SUCCESS(f"{0} {NAME} created!"))
| 29 | 81 | 0.696041 | 501 | 0.639847 | 0 | 0 | 0 | 0 | 0 | 0 | 66 | 0.084291 |
89d4a95a515f918c5e953ba7e49e998786a0f9cc | 4,181 | py | Python | rdkit/DataStructs/UnitTestBitEnsemble.py | kazuyaujihara/rdkit | 06027dcd05674787b61f27ba46ec0d42a6037540 | [
"BSD-3-Clause"
] | 1,609 | 2015-01-05T02:41:13.000Z | 2022-03-30T21:57:24.000Z | rdkit/DataStructs/UnitTestBitEnsemble.py | kazuyaujihara/rdkit | 06027dcd05674787b61f27ba46ec0d42a6037540 | [
"BSD-3-Clause"
] | 3,412 | 2015-01-06T12:13:33.000Z | 2022-03-31T17:25:41.000Z | rdkit/DataStructs/UnitTestBitEnsemble.py | bp-kelley/rdkit | e0de7c9622ce73894b1e7d9568532f6d5638058a | [
"BSD-3-Clause"
] | 811 | 2015-01-11T03:33:48.000Z | 2022-03-28T11:57:49.000Z | # $Id$
#
# Copyright (C) 2003-2006 greg Landrum and Rational Discovery LLC
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
""" unit testing code for BitEnsembles
"""
import os
import shutil
import tempfile
import unittest
from rdkit import RDConfig
from rdkit.DataStructs import SparseBitVect
# This import is important to initialize the BitEnsemble module
from rdkit.DataStructs import BitEnsembleDb
from rdkit.DataStructs.BitEnsemble import BitEnsemble
class TestCase(unittest.TestCase):
def test1(self):
ensemble = BitEnsemble()
ensemble.SetBits([1, 11, 21, 31])
self.assertEqual(ensemble.GetNumBits(), 4)
bv = SparseBitVect(100)
bv.SetBit(1)
bv.SetBit(11)
bv.SetBit(13)
score = ensemble.ScoreWithOnBits(bv)
assert score == 2, 'bad score: %d' % (score)
score = ensemble.ScoreWithIndex(bv)
assert score == 2, 'bad score: %d' % (score)
def test2(self):
ensemble = BitEnsemble([1, 11, 21, 31])
bv = SparseBitVect(100)
bv.SetBit(1)
bv.SetBit(11)
bv.SetBit(13)
score = ensemble.ScoreWithOnBits(bv)
assert score == 2, 'bad score: %d' % (score)
score = ensemble.ScoreWithIndex(bv)
assert score == 2, 'bad score: %d' % (score)
def test3(self):
ensemble = BitEnsemble()
for bit in [1, 11, 21, 31]:
ensemble.AddBit(bit)
bv = SparseBitVect(100)
bv.SetBit(1)
bv.SetBit(11)
bv.SetBit(13)
score = ensemble.ScoreWithOnBits(bv)
assert score == 2, 'bad score: %d' % (score)
score = ensemble.ScoreWithIndex(bv)
assert score == 2, 'bad score: %d' % (score)
def _setupDb(self):
from rdkit.Dbase.DbConnection import DbConnect
fName = RDConfig.RDTestDatabase
if RDConfig.useSqlLite:
_, tempName = tempfile.mkstemp(suffix='sqlt')
self.tempDbName = tempName
shutil.copyfile(fName, tempName)
else: # pragma: nocover
tempName = '::RDTests'
self.conn = DbConnect(tempName)
self.dbTblName = 'bit_ensemble_test'
return self.conn
def tearDown(self):
if hasattr(self, 'tempDbName') and RDConfig.useSqlLite and os.path.exists(self.tempDbName):
try:
os.unlink(self.tempDbName)
except: # pragma: nocover
import traceback
traceback.print_exc()
def testdb1(self):
""" test the sig - db functionality """
conn = self._setupDb()
ensemble = BitEnsemble()
for bit in [1, 3, 4]:
ensemble.AddBit(bit)
sigBs = [([0, 0, 0, 0, 0, 0], (0, 0, 0)),
([0, 1, 0, 1, 0, 0], (1, 1, 0)),
([0, 1, 0, 0, 1, 0], (1, 0, 1)),
([0, 1, 0, 0, 1, 1], (1, 0, 1)), ]
ensemble.InitScoreTable(conn, self.dbTblName)
for bs, tgt in sigBs:
ensemble.ScoreToDb(bs, conn)
conn.Commit()
d = conn.GetData(table=self.dbTblName)
assert len(d) == len(sigBs), 'bad number of results returned'
for i in range(len(sigBs)):
bs, tgt = tuple(sigBs[i])
dbRes = tuple(d[i])
assert dbRes == tgt, 'bad bits returned: %s != %s' % (str(dbRes), str(tgt))
d = None
self.conn = None
def testdb2(self):
""" test the sig - db functionality """
conn = self._setupDb()
ensemble = BitEnsemble()
for bit in [1, 3, 4]:
ensemble.AddBit(bit)
sigBs = [([0, 0, 0, 0, 0, 0], (0, 0, 0)),
([0, 1, 0, 1, 0, 0], (1, 1, 0)),
([0, 1, 0, 0, 1, 0], (1, 0, 1)),
([0, 1, 0, 0, 1, 1], (1, 0, 1)), ]
ensemble.InitScoreTable(conn, self.dbTblName, idInfo='id varchar(10)', actInfo='act int')
for bs, tgt in sigBs:
ensemble.ScoreToDb(bs, conn, id='foo', act=1)
conn.Commit()
d = conn.GetData(table=self.dbTblName)
assert len(d) == len(sigBs), 'bad number of results returned'
for i in range(len(sigBs)):
bs, tgt = tuple(sigBs[i])
dbRes = tuple(d[i])
assert dbRes[1:-1] == tgt, 'bad bits returned: %s != %s' % (str(dbRes[1:-1]), str(tgt))
d = None
self.conn = None
if __name__ == '__main__': # pragma: nocover
unittest.main()
| 29.443662 | 95 | 0.608467 | 3,482 | 0.832815 | 0 | 0 | 0 | 0 | 0 | 0 | 821 | 0.196365 |
89d4b8941114154f6cc741048f8332377d3a8b83 | 1,678 | py | Python | simple_classroom/apps/classroom/migrations/0003_auto_20150207_1835.py | maxicecilia/simple_classroom | 1f1e21414476923952f612a35ce3ff26ac053090 | [
"MIT"
] | 7 | 2015-05-08T22:43:15.000Z | 2020-12-04T15:38:17.000Z | simple_classroom/apps/classroom/migrations/0003_auto_20150207_1835.py | maxicecilia/simple_classroom | 1f1e21414476923952f612a35ce3ff26ac053090 | [
"MIT"
] | 7 | 2020-06-05T17:40:40.000Z | 2022-03-11T23:17:13.000Z | simple_classroom/apps/classroom/migrations/0003_auto_20150207_1835.py | maxicecilia/simple_classroom | 1f1e21414476923952f612a35ce3ff26ac053090 | [
"MIT"
] | 4 | 2015-05-13T05:47:32.000Z | 2020-05-08T18:06:24.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('classroom', '0002_assignment_description'),
]
operations = [
migrations.AddField(
model_name='assignment',
name='evaluation_date',
field=models.DateTimeField(null=True, verbose_name='Fecha de evaluaci\xf3n', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='assignment',
name='is_evaluated',
field=models.BooleanField(default=False, help_text='Tildar para indicar que la evaluaci\xf3n ya fue tomada y est\xe1 disponible.', verbose_name='Evaluado'),
preserve_default=True,
),
migrations.AddField(
model_name='assignment',
name='is_scored',
field=models.BooleanField(default=False, help_text='Tildar para indicar que la evaluaci\xf3n ya fue corregida y las notas est\xe1n disponibles.', verbose_name='Corregido'),
preserve_default=True,
),
migrations.AddField(
model_name='assignment',
name='score_date',
field=models.DateTimeField(null=True, verbose_name='Fecha de Notas', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='assignment',
name='is_published',
field=models.BooleanField(default=False, help_text='Tildar para mostrar la asignaci\xf3n a los inscriptos.', verbose_name='Publicado'),
preserve_default=True,
),
]
| 37.288889 | 184 | 0.628129 | 1,569 | 0.935042 | 0 | 0 | 0 | 0 | 0 | 0 | 490 | 0.292014 |
89d564054c9eaa814b62ed98ed13a84890b170f0 | 6,484 | py | Python | pyburst/misc/resolution.py | zacjohnston/pyburst | f7d5ae9a229704d1cbcf656afb9fb3e29fb71c0c | [
"MIT"
] | 4 | 2019-05-01T07:30:15.000Z | 2021-08-04T15:04:38.000Z | pyburst/misc/resolution.py | zacjohnston/pyburst | f7d5ae9a229704d1cbcf656afb9fb3e29fb71c0c | [
"MIT"
] | null | null | null | pyburst/misc/resolution.py | zacjohnston/pyburst | f7d5ae9a229704d1cbcf656afb9fb3e29fb71c0c | [
"MIT"
] | 4 | 2019-03-26T12:38:37.000Z | 2022-03-09T05:30:18.000Z | import numpy as np
import matplotlib.pyplot as plt
import os
from pyburst.grids import grid_analyser, grid_strings, grid_tools
# resolution tests
y_factors = {'dt': 3600,
'fluence': 1e39,
'peak': 1e38,
}
y_labels = {'dt': '$\Delta t$',
'rate': 'Burst rate',
'fluence': '$E_b$',
'peak': '$L_{peak}$',
'length': 'Burst length',
}
y_units = {'dt': 'hr',
'rate': 'day$^{-1}$',
'fluence': '$10^39$ erg',
'peak': '$10^38$ erg s$^{-1}$',
'length': 's',
}
reference_params = {'accmass': 1e16,
'accdepth': 1e20}
other_param = {'accmass': 'accdepth',
'accdepth': 'accmass'}
x_bounds = {'accmass': [1e15, 1e17],
'accdepth': [1e19, 1e21]}
colors = {True: 'C1',
False: 'C0'}
# TODO add save plot, iterate over params
def save_all_plots(sources, ref_source, grid_version,
params=('x', 'z', 'mass', 'accrate'), **kwargs):
kgrids = get_multigrids(sources, grid_version=grid_version)
source = get_not(sources, ref_source)
unique_all = kgrids[source].unique_params
unique_subset = {}
for p in params:
unique_subset[p] = unique_all[p]
params_full = grid_tools.enumerate_params(unique_subset)
n = len(params_full[params[0]])
for i in range(n):
params_sub = {}
for p in params:
params_sub[p] = params_full[p][i]
plot(params=params_sub, sources=sources, ref_source=ref_source,
kgrids=kgrids, save=True, display=False, title=False, **kwargs)
def plot(params, sources, ref_source, grid_version,
bprops=('rate', 'fluence', 'peak', 'length'), figsize=(9, 10), shaded=False,
display=True, save=False, kgrids=None, title=True, show_nbursts=True):
"""Plot burst properties for given resolution parameter
parameters
----------
params : dict
ref_source : str
source from which the reference model comes
sources: set(str)
list of source(s) to get models from
kgrids : {source: Kgrid}
dict of grid_analyser.Kgrid objects for each source
bprops : [str]
figsize : [int, int]
shaded : bool
shade between y_values of reference model
"""
check_params(params)
n = len(bprops)
fig, ax = plt.subplots(n, 2, sharex=False, figsize=figsize)
if kgrids is None:
kgrids = get_multigrids(sources, grid_version=grid_version)
for i, res_param in enumerate(reference_params):
ref_value = reference_params[res_param]
other_res_param = other_param[res_param]
full_params = dict(params)
full_params[other_res_param] = reference_params[other_res_param]
sub_summ, sub_params = get_subgrids(kgrids, params=full_params)
for j, bprop in enumerate(bprops):
u_bprop = f'u_{bprop}'
y_label = f'{y_labels[bprop]} ({y_units[bprop]})'
y_factor = y_factors.get(bprop, 1)
set_axes(ax[j, i], xscale='log',
ylabel=y_label if i == 0 else '',
xlabel=res_param if j == n-1 else '',
yticks=True if i == 0 else False)
for source in sources:
ref = source == ref_source
x = sub_params[source][res_param]
y = sub_summ[source][bprop] / y_factor
yerr = sub_summ[source][u_bprop] / y_factor
if show_nbursts:
n_bursts = sub_summ[source]['n_used']
for k in range(len(n_bursts)):
x_offset = 1.15
nb = n_bursts.iloc[k]
ax[j, i].text(x.iloc[k] * x_offset, y.iloc[k], f'{nb:.0f}',
verticalalignment='center')
if shaded and ref:
idx = np.where(x == ref_value)[0]
y_ref = y.iloc[idx]
yerr_ref = yerr.iloc[idx]
ax[j, i].fill_between(x_bounds[res_param],
np.full(2, y_ref + yerr_ref),
np.full(2, y_ref - yerr_ref), color='0.85')
ax[j, i].errorbar(x=x, y=y, yerr=yerr, ls='none',
marker='o', capsize=3, color=colors[ref])
if title:
ax[0, 0].set_title(params, fontsize=11)
plt.tight_layout()
if save:
source = get_not(sources, ref_source)
precisions = {'z': 4, 'x': 2, 'qb': 3, 'mass': 1, 'accrate': 2}
fixed_str = ''
for p, v in params.items():
precision = precisions.get(p, 3)
fixed_str += f'_{p}={v:.{precision}f}'
filename = f'resolution_{source}{fixed_str}.png'
path = os.path.join(grid_strings.plots_path(source), 'resolution')
filepath = os.path.join(path, filename)
print(f'Saving {filepath}')
plt.savefig(filepath)
plt.close(fig)
else:
plt.show(block=False)
def get_not(array, var):
"""Returns value in length-2 'array' that is not 'var'
"""
copy = list(array)
copy.remove(var)
return copy[0]
def get_multigrids(sources, grid_version):
kgrids = {}
for source in sources:
kgrids[source] = grid_analyser.Kgrid(source, grid_version=grid_version)
return kgrids
def get_subgrids(kgrids, params):
"""Returns subkgrids of multiple given sources
"""
sub_params = {}
sub_summ = {}
for source in kgrids:
sub_params[source] = kgrids[source].get_params(params=params)
sub_summ[source] = kgrids[source].get_summ(params=params)
return sub_summ, sub_params
def set_axes(ax, title='', xlabel='', ylabel='', yscale='linear', xscale='linear',
fontsize=14, yticks=True, xticks=True):
if not yticks:
ax.axes.tick_params(axis='both', left='off', labelleft='off')
if not xticks:
ax.axes.tick_params(axis='both', bottom='off', labelbottom='off')
ax.set_title(title, fontsize=fontsize)
ax.set_xlabel(xlabel, fontsize=fontsize)
ax.set_ylabel(ylabel, fontsize=fontsize)
ax.set_xscale(xscale)
ax.set_yscale(yscale)
def check_params(params, must_specify=('x', 'z', 'accrate', 'mass')):
for param in must_specify:
if param not in params:
raise ValueError(f'{param} not specified in params')
| 33.42268 | 85 | 0.566626 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,290 | 0.198951 |
89d59366b3b61d950e0acc7ff13715341f62b69d | 47,267 | py | Python | scielomanager/journalmanager/models.py | jamilatta/scielo-manager | d506c6828ba9b1089faa164bc42ba29a0f228e61 | [
"BSD-2-Clause"
] | null | null | null | scielomanager/journalmanager/models.py | jamilatta/scielo-manager | d506c6828ba9b1089faa164bc42ba29a0f228e61 | [
"BSD-2-Clause"
] | null | null | null | scielomanager/journalmanager/models.py | jamilatta/scielo-manager | d506c6828ba9b1089faa164bc42ba29a0f228e61 | [
"BSD-2-Clause"
] | null | null | null | # -*- encoding: utf-8 -*-
import urllib
import hashlib
import logging
import choices
import caching.base
from scielomanager import tools
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
from django.db import (
models,
transaction,
IntegrityError,
DatabaseError,
)
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext as __
from django.conf import settings
from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from django.template.defaultfilters import slugify
from django.core.exceptions import ImproperlyConfigured
from scielo_extensions import modelfields
from tastypie.models import create_api_key
import jsonfield
from scielomanager.utils import base28
from . import modelmanagers
User.__bases__ = (caching.base.CachingMixin, models.Model)
User.add_to_class('objects', caching.base.CachingManager())
logger = logging.getLogger(__name__)
EVENT_TYPES = [(ev_type, ev_type) for ev_type in ['added', 'deleted', 'updated']]
ISSUE_DEFAULT_LICENSE_HELP_TEXT = _(u"If not defined, will be applied the related journal's use license. \
The SciELO default use license is BY-NC. Please visit: http://ref.scielo.org/jf5ndd (5.2.11. Política de direitos autorais) for more details.")
def get_user_collections(user_id):
"""
Return all the collections of a given user, The returned collections are the collections where the
user could have access by the collections bar.
"""
user_collections = User.objects.get(pk=user_id).usercollections_set.all().order_by(
'collection__name')
return user_collections
def get_journals_default_use_license():
"""
Returns the default use license for all new Journals.
This callable is passed as the default value on Journal.use_license field.
The default use license is the one defined on SciELO criteria, and at
the time is BY-NC. See http://ref.scielo.org/jf5ndd for more information.
"""
try:
return UseLicense.objects.get(is_default=True)
except UseLicense.DoesNotExist:
raise ImproperlyConfigured("There is no UseLicense set as default")
class AppCustomManager(caching.base.CachingManager):
"""
Domain specific model managers.
"""
def available(self, is_available=True):
"""
Filter the queryset based on its availability.
"""
data_queryset = self.get_query_set()
if not isinstance(is_available, bool):
try:
if int(is_available) == 0:
is_available = False
else:
is_available = True
except (ValueError, TypeError):
is_available = True
data_queryset = data_queryset.filter(is_trashed=not is_available)
return data_queryset
class JournalCustomManager(AppCustomManager):
def all_by_user(self, user, is_available=True, pub_status=None):
"""
Retrieves all the user's journals, contextualized by
their default collection.
"""
default_collection = Collection.objects.get_default_by_user(user)
objects_all = self.available(is_available).filter(
collections=default_collection).distinct()
if pub_status:
if pub_status in [stat[0] for stat in choices.JOURNAL_PUBLICATION_STATUS]:
objects_all = objects_all.filter(pub_status=pub_status)
return objects_all
def recents_by_user(self, user):
"""
Retrieves the recently modified objects related to the given user.
"""
default_collection = Collection.objects.get_default_by_user(user)
recents = self.filter(
collections=default_collection).distinct().order_by('-updated')[:5]
return recents
def all_by_collection(self, collection, is_available=True):
objects_all = self.available(is_available).filter(
collections=collection)
return objects_all
def by_issn(self, issn):
"""
Get the journal assigned to `issn`, being electronic or print.
In some cases more than one instance of the same journal will be
returned due to the fact that journals present in more than one
collection is handled separately.
"""
if issn == '':
return Journal.objects.none()
journals = Journal.objects.filter(
models.Q(print_issn=issn) | models.Q(eletronic_issn=issn)
)
return journals
class SectionCustomManager(AppCustomManager):
def all_by_user(self, user, is_available=True):
default_collection = Collection.objects.get_default_by_user(user)
objects_all = self.available(is_available).filter(
journal__collections=default_collection).distinct()
return objects_all
class IssueCustomManager(AppCustomManager):
def all_by_collection(self, collection, is_available=True):
objects_all = self.available(is_available).filter(
journal__collections=collection)
return objects_all
class InstitutionCustomManager(AppCustomManager):
"""
Add capabilities to Institution subclasses to retrieve querysets
based on user's collections.
"""
def all_by_user(self, user, is_available=True):
default_collection = Collection.objects.get_default_by_user(user)
objects_all = self.available(is_available).filter(
collections__in=[default_collection]).distinct()
return objects_all
class CollectionCustomManager(AppCustomManager):
def all_by_user(self, user):
"""
Returns all the Collections related to the given
user.
"""
collections = self.filter(usercollections__user=user).order_by(
'name')
return collections
def get_default_by_user(self, user):
"""
Returns the Collection marked as default by the given user.
If none satisfies this condition, the first
instance is then returned.
Like any manager method that does not return Querysets,
`get_default_by_user` raises DoesNotExist if there is no
result for the given parameter.
"""
collections = self.filter(usercollections__user=user,
usercollections__is_default=True).order_by('name')
if not collections.count():
try:
collection = self.all_by_user(user)[0]
except IndexError:
raise Collection.DoesNotExist()
else:
collection.make_default_to_user(user)
return collection
return collections[0]
def get_managed_by_user(self, user):
"""
Returns all collections managed by a given user.
"""
collections = self.filter(usercollections__user=user,
usercollections__is_manager=True).order_by('name')
return collections
class RegularPressReleaseCustomManager(caching.base.CachingManager):
def by_journal_pid(self, journal_pid):
"""
Returns all PressReleases related to a Journal, given its
PID.
"""
journals = Journal.objects.filter(
models.Q(print_issn=journal_pid) | models.Q(eletronic_issn=journal_pid))
preleases = self.filter(issue__journal__in=journals.values('id')).select_related('translations')
return preleases
def all_by_journal(self, journal):
"""
Returns all PressReleases related to a Journal
"""
preleases = self.filter(issue__journal=journal)
return preleases
def by_issue_pid(self, issue_pid):
"""
Returns all PressReleases related to an Issue, given its
PID.
"""
issn_slice = slice(0, 9)
year_slice = slice(9, 13)
order_slice = slice(13, None)
issn = issue_pid[issn_slice]
year = issue_pid[year_slice]
order = int(issue_pid[order_slice])
preleases_qset = self.by_journal_pid(issn)
return preleases_qset.filter(issue__publication_year=year).filter(issue__order=order)
class AheadPressReleaseCustomManager(caching.base.CachingManager):
def by_journal_pid(self, journal_pid):
"""
Returns all PressReleases related to a Journal, given its
PID.
"""
preleases = self.filter(models.Q(journal__print_issn=journal_pid) | models.Q(journal__eletronic_issn=journal_pid))
return preleases
class Language(caching.base.CachingMixin, models.Model):
"""
Represents ISO 639-1 Language Code and its language name in English. Django
automaticaly translates language names, if you write them right.
http://en.wikipedia.org/wiki/ISO_639-1_language_matrix
"""
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
iso_code = models.CharField(_('ISO 639-1 Language Code'), max_length=2)
name = models.CharField(_('Language Name (in English)'), max_length=64)
def __unicode__(self):
return __(self.name)
class Meta:
ordering = ['name']
class UserProfile(caching.base.CachingMixin, models.Model):
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
user = models.OneToOneField(User)
email = models.EmailField(_('E-mail'), blank=False, unique=True, null=False)
@property
def gravatar_id(self):
return hashlib.md5(self.email.lower().strip()).hexdigest()
@property
def avatar_url(self):
params = urllib.urlencode({'s': 18, 'd': 'mm'})
return '{0}/avatar/{1}?{2}'.format(getattr(settings, 'GRAVATAR_BASE_URL',
'https://secure.gravatar.com'), self.gravatar_id, params)
@property
def get_default_collection(self):
"""
Return the default collection for this user
"""
return Collection.objects.get_default_by_user(self.user)
def save(self, force_insert=False, force_update=False):
self.user.email = self.email
self.user.save()
return super(UserProfile, self).save(force_insert, force_update)
class Collection(caching.base.CachingMixin, models.Model):
objects = CollectionCustomManager()
nocacheobjects = models.Manager()
collection = models.ManyToManyField(User, related_name='user_collection',
through='UserCollections', null=True, blank=True, )
name = models.CharField(_('Collection Name'), max_length=128, db_index=True, )
name_slug = models.SlugField(unique=True, db_index=True, blank=True, null=True)
url = models.URLField(_('Instance URL'), )
logo = models.ImageField(_('Logo'), upload_to='img/collections_logos', null=True, blank=True, )
acronym = models.CharField(_('Sigla'), max_length=16, db_index=True, blank=True, )
country = models.CharField(_('Country'), max_length=32,)
state = models.CharField(_('State'), max_length=32, null=False, blank=True,)
city = models.CharField(_('City'), max_length=32, null=False, blank=True,)
address = models.TextField(_('Address'),)
address_number = models.CharField(_('Number'), max_length=8,)
address_complement = models.CharField(_('Complement'), max_length=128, null=False, blank=True,)
zip_code = models.CharField(_('Zip Code'), max_length=16, null=True, blank=True, )
phone = models.CharField(_('Phone Number'), max_length=16, null=False, blank=True, )
fax = models.CharField(_('Fax Number'), max_length=16, null=False, blank=True, )
email = models.EmailField(_('Email'), )
def __unicode__(self):
return unicode(self.name)
class Meta:
ordering = ['name']
permissions = (("list_collection", "Can list Collections"),)
def save(self, *args, **kwargs):
self.name_slug = slugify(self.name)
super(Collection, self).save(*args, **kwargs)
def add_user(self, user, is_default=False, is_manager=False):
"""
Add the user to the current collection.
"""
UserCollections.objects.create(collection=self,
user=user,
is_default=is_default,
is_manager=is_manager)
def remove_user(self, user):
"""
Removes the user from the current collection.
If the user isn't already related to the given collection,
it will do nothing, silently.
"""
try:
uc = UserCollections.objects.get(collection=self, user=user)
except UserCollections.DoesNotExist:
return None
else:
uc.delete()
def make_default_to_user(self, user):
"""
Makes the current collection, the user's default.
"""
UserCollections.objects.filter(user=user).update(is_default=False)
uc, created = UserCollections.objects.get_or_create(
collection=self, user=user)
uc.is_default = True
uc.save()
def is_default_to_user(self, user):
"""
Returns a boolean value depending if the current collection
is set as default to the given user.
"""
try:
uc = UserCollections.objects.get(collection=self, user=user)
return uc.is_default
except UserCollections.DoesNotExist:
return False
def is_managed_by_user(self, user):
"""
Returns a boolean value depending if the current collection
is managed by the given user.
"""
try:
uc = UserCollections.objects.get(collection=self, user=user)
return uc.is_manager
except UserCollections.DoesNotExist:
return False
class UserCollections(caching.base.CachingMixin, models.Model):
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
user = models.ForeignKey(User)
collection = models.ForeignKey(Collection)
is_default = models.BooleanField(_('Is default'), default=False, null=False, blank=False)
is_manager = models.BooleanField(_('Is manager of the collection?'), default=False, null=False,
blank=False)
class Meta:
unique_together = ("user", "collection", )
class Institution(caching.base.CachingMixin, models.Model):
#Custom manager
objects = AppCustomManager()
nocacheobjects = models.Manager()
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
name = models.CharField(_('Institution Name'), max_length=256, db_index=True)
complement = models.TextField(_('Institution Complements'), blank=True, default="")
acronym = models.CharField(_('Sigla'), max_length=16, db_index=True, blank=True)
country = models.CharField(_('Country'), max_length=32)
state = models.CharField(_('State'), max_length=32, null=False, blank=True)
city = models.CharField(_('City'), max_length=32, null=False, blank=True)
address = models.TextField(_('Address'))
address_number = models.CharField(_('Number'), max_length=8)
address_complement = models.CharField(_('Address Complement'), max_length=128, null=False, blank=True)
zip_code = models.CharField(_('Zip Code'), max_length=16, null=True, blank=True)
phone = models.CharField(_('Phone Number'), max_length=16, null=False, blank=True)
fax = models.CharField(_('Fax Number'), max_length=16, null=False, blank=True)
cel = models.CharField(_('Cel Number'), max_length=16, null=False, blank=True)
email = models.EmailField(_('E-mail'))
is_trashed = models.BooleanField(_('Is trashed?'), default=False, db_index=True)
def __unicode__(self):
return u'%s' % (self.name)
class Meta:
ordering = ['name']
class Sponsor(Institution):
objects = InstitutionCustomManager()
nocacheobjects = models.Manager()
userobjects = modelmanagers.SponsorManager()
collections = models.ManyToManyField(Collection)
class Meta:
permissions = (("list_sponsor", "Can list Sponsors"),)
class SubjectCategory(caching.base.CachingMixin, models.Model):
#Custom manager
objects = JournalCustomManager()
nocacheobjects = models.Manager()
term = models.CharField(_('Term'), max_length=256, db_index=True)
def __unicode__(self):
return self.term
class StudyArea(caching.base.CachingMixin, models.Model):
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
study_area = models.CharField(_('Study Area'), max_length=256,
choices=sorted(choices.SUBJECTS, key=lambda SUBJECTS: SUBJECTS[1]))
def __unicode__(self):
return self.study_area
class Journal(caching.base.CachingMixin, models.Model):
"""
Represents a Journal that is managed by one SciELO Collection.
`editor_address` references the institution who operates the
process.
`publisher_address` references the institution who is responsible
for the Journal.
"""
#Custom manager
objects = JournalCustomManager()
nocacheobjects = models.Manager()
userobjects = modelmanagers.JournalManager()
#Relation fields
creator = models.ForeignKey(User, related_name='enjoy_creator', editable=False)
sponsor = models.ManyToManyField('Sponsor', verbose_name=_('Sponsor'), related_name='journal_sponsor', null=True, blank=True)
previous_title = models.ForeignKey('Journal', verbose_name=_('Previous title'), related_name='prev_title', null=True, blank=True)
use_license = models.ForeignKey('UseLicense', verbose_name=_('Use license'))
collections = models.ManyToManyField('Collection', through='Membership')
languages = models.ManyToManyField('Language',)
national_code = models.CharField(_('National Code'), max_length=64, null=True, blank=True)
abstract_keyword_languages = models.ManyToManyField('Language', related_name="abstract_keyword_languages", )
subject_categories = models.ManyToManyField(SubjectCategory, verbose_name=_("Subject Categories"), related_name="journals", null=True)
study_areas = models.ManyToManyField(StudyArea, verbose_name=_("Study Area"), related_name="journals_migration_tmp", null=True)
editors = models.ManyToManyField(User, related_name='user_editors', null=True, blank=True)
#Fields
current_ahead_documents = models.IntegerField(_('Total of ahead of print documents for the current year'), max_length=3, default=0, blank=True, null=True)
previous_ahead_documents = models.IntegerField(_('Total of ahead of print documents for the previous year'), max_length=3, default=0, blank=True, null=True)
twitter_user = models.CharField(_('Twitter User'), max_length=128, null=True, blank=True)
title = models.CharField(_('Journal Title'), max_length=256, db_index=True)
title_iso = models.CharField(_('ISO abbreviated title'), max_length=256, db_index=True)
short_title = models.CharField(_('Short Title'), max_length=256, db_index=True, null=True)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
acronym = models.CharField(_('Acronym'), max_length=16, blank=False)
scielo_issn = models.CharField(_('The ISSN used to build the Journal PID.'), max_length=16,
choices=sorted(choices.SCIELO_ISSN, key=lambda SCIELO_ISSN: SCIELO_ISSN[1]))
print_issn = models.CharField(_('Print ISSN'), max_length=9, db_index=True)
eletronic_issn = models.CharField(_('Electronic ISSN'), max_length=9, db_index=True)
subject_descriptors = models.CharField(_('Subject / Descriptors'), max_length=1024)
init_year = models.CharField(_('Initial Year'), max_length=4)
init_vol = models.CharField(_('Initial Volume'), max_length=16)
init_num = models.CharField(_('Initial Number'), max_length=16)
final_year = models.CharField(_('Final Year'), max_length=4, null=True, blank=True)
final_vol = models.CharField(_('Final Volume'), max_length=16, null=False, blank=True)
final_num = models.CharField(_('Final Number'), max_length=16, null=False, blank=True)
medline_title = models.CharField(_('Medline Title'), max_length=256, null=True, blank=True)
medline_code = models.CharField(_('Medline Code'), max_length=64, null=True, blank=True)
frequency = models.CharField(_('Frequency'), max_length=16,
choices=sorted(choices.FREQUENCY, key=lambda FREQUENCY: FREQUENCY[1]))
editorial_standard = models.CharField(_('Editorial Standard'), max_length=64,
choices=sorted(choices.STANDARD, key=lambda STANDARD: STANDARD[1]))
ctrl_vocabulary = models.CharField(_('Controlled Vocabulary'), max_length=64,
choices=choices.CTRL_VOCABULARY)
pub_level = models.CharField(_('Publication Level'), max_length=64,
choices=sorted(choices.PUBLICATION_LEVEL, key=lambda PUBLICATION_LEVEL: PUBLICATION_LEVEL[1]))
secs_code = models.CharField(_('SECS Code'), max_length=64, null=False, blank=True)
copyrighter = models.CharField(_('Copyrighter'), max_length=254)
url_online_submission = models.CharField(_('URL of online submission'), max_length=128, null=True, blank=True)
url_journal = models.CharField(_('URL of the journal'), max_length=128, null=True, blank=True)
notes = models.TextField(_('Notes'), max_length=254, null=True, blank=True)
index_coverage = models.TextField(_('Index Coverage'), null=True, blank=True)
cover = models.ImageField(_('Journal Cover'), upload_to='img/journal_cover/', null=True, blank=True)
logo = models.ImageField(_('Journal Logo'), upload_to='img/journals_logos', null=True, blank=True)
is_trashed = models.BooleanField(_('Is trashed?'), default=False, db_index=True)
other_previous_title = models.CharField(_('Other Previous Title'), max_length=255, blank=True)
editor_name = models.CharField(_('Editor Names'), max_length=512)
editor_address = models.CharField(_('Editor Address'), max_length=512)
editor_address_city = models.CharField(_('Editor City'), max_length=256)
editor_address_state = models.CharField(_('Editor State/Province/Region'), max_length=128)
editor_address_zip = models.CharField(_('Editor Zip/Postal Code'), max_length=64)
editor_address_country = modelfields.CountryField(_('Editor Country'))
editor_phone1 = models.CharField(_('Editor Phone 1'), max_length=32)
editor_phone2 = models.CharField(_('Editor Phone 2'), null=True, blank=True, max_length=32)
editor_email = models.EmailField(_('Editor E-mail'))
publisher_name = models.CharField(_('Publisher Name'), max_length=256)
publisher_country = modelfields.CountryField(_('Publisher Country'))
publisher_state = models.CharField(_('Publisher State/Province/Region'), max_length=64)
publication_city = models.CharField(_('Publication City'), max_length=64)
is_indexed_scie = models.BooleanField(_('SCIE'), default=False)
is_indexed_ssci = models.BooleanField(_('SSCI'), default=False)
is_indexed_aehci = models.BooleanField(_('A&HCI'), default=False)
def __unicode__(self):
return self.title
class Meta:
ordering = ['title']
permissions = (("list_journal", "Can list Journals"),
("list_editor_journal", "Can list editor Journals"))
def issues_as_grid(self, is_available=True):
objects_all = self.issue_set.available(is_available).order_by(
'-publication_year', '-volume')
grid = OrderedDict()
for issue in objects_all:
year_node = grid.setdefault(issue.publication_year, OrderedDict())
volume_node = year_node.setdefault(issue.volume, [])
volume_node.append(issue)
for year, volume in grid.items():
for vol, issues in volume.items():
issues.sort(key=lambda x: x.order)
return grid
def has_issues(self, issues):
"""
Returns ``True`` if all the given issues are bound to the journal.
``issues`` is a list of Issue pk.
"""
issues_to_test = set(int(issue) for issue in issues)
bound_issues = set(issue.pk for issue in self.issue_set.all())
return issues_to_test.issubset(bound_issues)
def reorder_issues(self, new_order, publication_year, volume=None):
"""
Make persistent the ordering received as a list of ``pk``,
to all the issues in a given ``publication_year`` and ``volume``.
The lenght of ``new_order`` must match with the subset of
issues by ``publication_year`` and ``volume``.
"""
filters = {'publication_year': publication_year}
if volume:
filters['volume'] = volume
issues = self.issue_set.filter(**filters)
issues_count = issues.count()
new_order_count = len(new_order)
if new_order_count != issues_count:
raise ValueError('new_order lenght does not match. %s:%s' % (new_order_count, issues_count))
with transaction.commit_on_success():
for i, pk in enumerate(new_order):
order = i + 1
issue = issues.get(pk=pk)
issue.order = order
issue.save()
def is_editor(self, user):
"""
Returns a boolean value depending if the given user is an editor
of the current journal.
"""
try:
self.editors.get(id=user.id)
except ObjectDoesNotExist:
return False
return True
@property
def scielo_pid(self):
"""
Returns the ISSN used as PID on SciELO public catalogs.
"""
attr = u'print_issn' if self.scielo_issn == u'print' else u'eletronic_issn'
return getattr(self, attr)
def join(self, collection, responsible):
"""Make this journal part of the collection.
"""
Membership.objects.create(journal=self,
collection=collection,
created_by=responsible,
status='inprogress')
def membership_info(self, collection, attribute=None):
"""Retrieve info about the relation of this journal with a
given collection.
"""
rel = self.membership_set.get(collection=collection)
if attribute:
return getattr(rel, attribute)
else:
return rel
def change_status(self, collection, new_status, reason, responsible):
rel = self.membership_info(collection)
rel.status = new_status
rel.reason = reason
rel.save()
class Membership(models.Model):
"""
Represents the many-to-many relation
between Journal and Collection.
"""
journal = models.ForeignKey('Journal')
collection = models.ForeignKey('Collection')
status = models.CharField(max_length=16, default="inprogress",
choices=choices.JOURNAL_PUBLICATION_STATUS)
since = models.DateTimeField(auto_now=True)
reason = models.TextField(_('Why are you changing the publication status?'),
blank=True, default="")
created_by = models.ForeignKey(User, editable=False)
def save(self, *args, **kwargs):
"""
Always save a copy at JournalTimeline
"""
super(Membership, self).save(*args, **kwargs)
JournalTimeline.objects.create(journal=self.journal,
collection=self.collection,
status=self.status,
reason=self.reason,
created_by=self.created_by,
since=self.since)
class Meta():
unique_together = ("journal", "collection")
class JournalTimeline(models.Model):
"""
Represents the status history of a journal.
"""
journal = models.ForeignKey('Journal', related_name='statuses')
collection = models.ForeignKey('Collection')
status = models.CharField(max_length=16,
choices=choices.JOURNAL_PUBLICATION_STATUS)
since = models.DateTimeField()
reason = models.TextField(default="")
created_by = models.ForeignKey(User)
class JournalTitle(caching.base.CachingMixin, models.Model):
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
journal = models.ForeignKey(Journal, related_name='other_titles')
title = models.CharField(_('Title'), null=False, max_length=128)
category = models.CharField(_('Title Category'), null=False, max_length=128, choices=sorted(choices.TITLE_CATEGORY, key=lambda TITLE_CATEGORY: TITLE_CATEGORY[1]))
class JournalMission(caching.base.CachingMixin, models.Model):
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
journal = models.ForeignKey(Journal, related_name='missions')
description = models.TextField(_('Mission'))
language = models.ForeignKey('Language', blank=False, null=True)
class UseLicense(caching.base.CachingMixin, models.Model):
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
license_code = models.CharField(_('License Code'), unique=True, null=False, blank=False, max_length=64)
reference_url = models.URLField(_('License Reference URL'), null=True, blank=True)
disclaimer = models.TextField(_('Disclaimer'), null=True, blank=True, max_length=512)
is_default = models.BooleanField(_('Is Default?'), default=False)
def __unicode__(self):
return self.license_code
class Meta:
ordering = ['license_code']
def save(self, *args, **kwargs):
"""
Only one UseLicense must be the default (is_default==True).
If already have one, these will be unset as default (is_default==False)
If None is already setted, this instance been saved, will be the default.
If the only one is unsetted as default, then will be foreced to be the default anyway,
to allways get one license setted as default
"""
qs = UseLicense.objects.filter(is_default=True)
if (qs.count() == 0 ) or (self in qs):
# no other was default, or ``self`` is the current default one,
# so ``self`` will be set as default
self.is_default = True
if self.is_default:
if self.pk:
qs = qs.exclude(pk=self.pk)
if qs.count() != 0:
qs.update(is_default=False)
super(UseLicense, self).save(*args, **kwargs)
class TranslatedData(caching.base.CachingMixin, models.Model):
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
translation = models.CharField(_('Translation'), null=True, blank=True, max_length=512)
language = models.CharField(_('Language'), choices=sorted(choices.LANGUAGE, key=lambda LANGUAGE: LANGUAGE[1]), null=False, blank=False, max_length=32)
model = models.CharField(_('Model'), null=False, blank=False, max_length=32)
field = models.CharField(_('Field'), null=False, blank=False, max_length=32)
def __unicode__(self):
return self.translation if self.translation is not None else 'Missing trans: {0}.{1}'.format(self.model, self.field)
class SectionTitle(caching.base.CachingMixin, models.Model):
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
section = models.ForeignKey('Section', related_name='titles')
title = models.CharField(_('Title'), max_length=256, null=False)
language = models.ForeignKey('Language')
class Meta:
ordering = ['title']
class Section(caching.base.CachingMixin, models.Model):
"""
Represents a multilingual section of one/many Issues of
a given Journal.
``legacy_code`` contains the section code used by the old
title manager. We've decided to store this value just by
historical reasons, and we don't know if it will last forever.
"""
#Custom manager
objects = SectionCustomManager()
nocacheobjects = models.Manager()
userobjects = modelmanagers.SectionManager()
journal = models.ForeignKey(Journal)
code = models.CharField(unique=True, max_length=21, blank=True)
legacy_code = models.CharField(null=True, blank=True, max_length=16)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
is_trashed = models.BooleanField(_('Is trashed?'), default=False, db_index=True)
def __unicode__(self):
return ' / '.join([sec_title.title for sec_title in self.titles.all().order_by('language')])
@property
def actual_code(self):
if not self.pk or not self.code:
raise AttributeError('section must be saved in order to have a code')
return self.code
def is_used(self):
try:
return True if self.issue_set.all().count() else False
except ValueError: # raised when the object is not yet saved
return False
def add_title(self, title, language):
"""
Adds a section title in the given language.
A Language instance must be passed as the language argument.
"""
SectionTitle.objects.create(section=self,
title=title, language=language)
def _suggest_code(self, rand_generator=base28.genbase):
"""
Suggests a code for the section instance.
The code is formed by the journal acronym + 4 pseudo-random
base 28 chars.
``rand_generator`` is the callable responsible for the pseudo-random
chars sequence. It may accept the number of chars as argument.
"""
num_chars = getattr(settings, 'SECTION_CODE_TOTAL_RANDOM_CHARS', 4)
fmt = '{0}-{1}'.format(self.journal.acronym, rand_generator(num_chars))
return fmt
def _create_code(self, *args, **kwargs):
if not self.code:
tries = kwargs.pop('max_tries', 5)
while tries > 0:
self.code = self._suggest_code()
try:
super(Section, self).save(*args, **kwargs)
except IntegrityError:
tries -= 1
logger.warning('conflict while trying to generate a section code. %i tries remaining.' % tries)
continue
else:
logger.info('code created successfully for %s' % unicode(self))
break
else:
msg = 'max_tries reached while trying to generate a code for the section %s.' % unicode(self)
logger.error(msg)
raise DatabaseError(msg)
class Meta:
permissions = (("list_section", "Can list Sections"),)
def save(self, *args, **kwargs):
"""
If ``code`` already exists, the section is saved. Else,
the ``code`` will be generated before the save process is
performed.
"""
if self.code:
super(Section, self).save(*args, **kwargs)
else:
# the call to super().save is delegated to _create_code
# because there are needs to control saving max tries.
self._create_code(*args, **kwargs)
class Issue(caching.base.CachingMixin, models.Model):
#Custom manager
objects = IssueCustomManager()
nocacheobjects = models.Manager()
section = models.ManyToManyField(Section, blank=True)
journal = models.ForeignKey(Journal)
volume = models.CharField(_('Volume'), blank=True, max_length=16)
number = models.CharField(_('Number'), blank=True, max_length=16)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
publication_start_month = models.IntegerField(_('Start Month'), blank=True, null=True, choices=choices.MONTHS)
publication_end_month = models.IntegerField(_('End Month'), blank=True, null=True, choices=choices.MONTHS)
publication_year = models.IntegerField(_('Year'))
is_marked_up = models.BooleanField(_('Is Marked Up?'), default=False, null=False, blank=True)
use_license = models.ForeignKey(UseLicense, null=True, help_text=ISSUE_DEFAULT_LICENSE_HELP_TEXT)
total_documents = models.IntegerField(_('Total of Documents'), default=0)
ctrl_vocabulary = models.CharField(_('Controlled Vocabulary'), max_length=64,
choices=sorted(choices.CTRL_VOCABULARY, key=lambda CTRL_VOCABULARY: CTRL_VOCABULARY[1]), null=False, blank=True)
editorial_standard = models.CharField(_('Editorial Standard'), max_length=64,
choices=sorted(choices.STANDARD, key=lambda STANDARD: STANDARD[1]))
cover = models.ImageField(_('Issue Cover'), upload_to='img/issue_cover/', null=True, blank=True)
is_trashed = models.BooleanField(_('Is trashed?'), default=False, db_index=True)
label = models.CharField(db_index=True, blank=True, null=True, max_length=64)
order = models.IntegerField(_('Issue Order'), blank=True)
type = models.CharField(_('Type'), max_length=15, choices=choices.ISSUE_TYPES, default='regular', editable=False)
suppl_text = models.CharField(_('Suppl Text'), max_length=15, null=True, blank=True)
class Meta:
permissions = (("list_issue", "Can list Issues"),
("reorder_issue", "Can Reorder Issues"))
@property
def scielo_pid(self):
"""
Returns the PID used on SciELO public catalogs, in the form:
``journal_issn + year + order``
"""
jissn = self.journal.scielo_pid
return ''.join(
[
jissn,
unicode(self.publication_year),
u'%04d' % self.order,
]
)
@property
def identification(self):
values = [self.number]
if self.type == 'supplement':
values.append('suppl.%s' % self.suppl_text)
return ' '.join([val for val in values if val]).strip().replace(
'spe', 'special').replace('ahead', 'ahead of print')
def __unicode__(self):
return "{0} ({1})".format(self.volume, self.identification).replace('()', '')
@property
def publication_date(self):
return '{0} / {1} - {2}'.format(self.publication_start_month,
self.publication_end_month,
self.publication_year)
@property
def suppl_type(self):
if self.type == 'supplement':
if self.number != '' and self.volume == '':
return 'number'
elif self.number == '' and self.volume != '':
return 'volume'
else:
raise AttributeError('Issues of type %s do not have an attribute named: suppl_type' % self.get_type_display())
def _suggest_order(self, force=False):
"""
Based on ``publication_year``, ``volume`` and a pre defined
``order``, this method suggests the subsequent ``order`` value.
If the Issues already has a ``order``, it suggests it. Else,
a query is made for the given ``publication_year`` and ``volume``
and the ``order`` attribute of the last instance is used.
When force ``True`` this method ignore order attribute from the instance
and return the suggest order.
"""
if self.order and force == False:
return self.order
filters = {
'publication_year': self.publication_year,
'journal': self.journal,
}
try:
last = Issue.objects.filter(**filters).order_by('order').reverse()[0]
next_order = last.order + 1
except IndexError:
next_order = 1
return next_order
def _get_default_use_license(self):
return self.journal.use_license
def save(self, *args, **kwargs):
self.label = unicode(self)
if self.use_license is None and self.journal:
self.use_license = self._get_default_use_license()
if not self.pk:
self.order = self._suggest_order()
else:
# the ordering control is based on publication year attr.
# if an issue is moved between pub years, the order must be reset.
if tools.has_changed(self, 'publication_year'):
self.order = self._suggest_order(force=True)
super(Issue, self).save(*args, **kwargs)
class IssueTitle(caching.base.CachingMixin, models.Model):
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
issue = models.ForeignKey(Issue)
language = models.ForeignKey('Language')
title = models.CharField(_('Title'), max_length=128)
class PendedForm(caching.base.CachingMixin, models.Model):
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
view_name = models.CharField(max_length=128)
form_hash = models.CharField(max_length=32)
user = models.ForeignKey(User, related_name='pending_forms')
created_at = models.DateTimeField(auto_now=True)
class PendedValue(caching.base.CachingMixin, models.Model):
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
form = models.ForeignKey(PendedForm, related_name='data')
name = models.CharField(max_length=255)
value = models.TextField()
class DataChangeEvent(models.Model):
"""
Tracks data changes to make possible for consumer apps to know
what to sync.
"""
changed_at = models.DateTimeField(auto_now=True)
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
event_type = models.CharField(max_length=16, choices=EVENT_TYPES)
collection = models.ForeignKey(Collection)
class PressRelease(caching.base.CachingMixin, models.Model):
"""
Represents a press-release bound to a Journal.
If ``issue`` is None, the pressrelease is refers to an ahead article.
It can be available in one or any languages (restricted by the Journal
publishing policy).
"""
nocacheobjects = models.Manager()
objects = models.Manager()
doi = models.CharField(_("Press release DOI number"),
max_length=128, null=True, blank=True)
def add_article(self, article):
"""
``article`` is a string of the article pid.
"""
PressReleaseArticle.objects.create(press_release=self,
article_pid=article)
def remove_article(self, article):
try:
pra = PressReleaseArticle.objects.get(press_release=self,
article_pid=article)
except PressReleaseArticle.DoesNotExist:
return None
else:
pra.delete()
def add_translation(self, title, content, language):
"""
Adds a new press-release translation.
``language`` is an instance of Language.
"""
PressReleaseTranslation.objects.create(press_release=self,
language=language,
title=title,
content=content)
def remove_translation(self, language):
"""
Removes the translation for the given press-release.
If the translation doesn't exist, nothing happens silently.
"""
qry_params = {'press_release': self}
if isinstance(language, basestring):
qry_params['language__iso_code'] = language
else:
qry_params['language'] = language
try:
pr = PressReleaseTranslation.objects.get(**qry_params)
except PressReleaseTranslation.DoesNotExist:
return None
else:
pr.delete()
def get_trans(self, language):
"""
Syntatic suggar for retrieving translations in a given language
"""
prt = self.translations.get(language__iso_code=language)
return prt
def __unicode__(self):
"""
Try to get the first title of the Press Release.
The form ensures at least one title.
"""
try:
title = PressReleaseTranslation.objects.filter(press_release=self).order_by('language')[0].title
except IndexError:
return __('No Title')
return title
class Meta:
abstract = False
permissions = (("list_pressrelease", "Can list PressReleases"),)
class PressReleaseTranslation(caching.base.CachingMixin, models.Model):
"""
Represents a press-release in a given language.
"""
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
press_release = models.ForeignKey(PressRelease, related_name='translations')
language = models.ForeignKey('Language')
title = models.CharField(_('Title'), max_length=128)
content = models.TextField(_('Content'))
class PressReleaseArticle(caching.base.CachingMixin, models.Model):
"""
Represents press-releases bound to Articles.
"""
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
press_release = models.ForeignKey(PressRelease, related_name='articles')
article_pid = models.CharField(_('PID'), max_length=32, db_index=True)
class RegularPressRelease(PressRelease):
objects = RegularPressReleaseCustomManager()
userobjects = modelmanagers.RegularPressReleaseManager()
issue = models.ForeignKey(Issue, related_name='press_releases')
class AheadPressRelease(PressRelease):
objects = AheadPressReleaseCustomManager()
userobjects = modelmanagers.AheadPressReleaseManager()
journal = models.ForeignKey(Journal, related_name='press_releases')
class Article(caching.base.CachingMixin, models.Model):
objects = caching.base.CachingManager()
nocacheobjects = models.Manager()
issue = models.ForeignKey(Issue, related_name='articles')
front = jsonfield.JSONField()
xml_url = models.CharField(_('XML URL'), max_length=256)
pdf_url = models.CharField(_('PDF URL'), max_length=256)
images_url = models.CharField(_('Images URL'), max_length=256)
def __unicode__(self):
return u' - '.join([self.title, str(self.issue)])
class Meta:
permissions = (("list_article", "Can list Article"),)
@property
def title(self):
if not 'title-group' in self.front:
return None
default_language = self.front.get('default-language', None)
if default_language in self.front['title-group']:
return self.front['title-group'][default_language]
return self.front['title-group'].values()[0]
@property
def titles(self):
if not 'title-group' in self.front:
return None
return self.front['title-group']
models.signals.post_save.connect(create_api_key, sender=User)
| 38.428455 | 166 | 0.66393 | 44,669 | 0.945016 | 0 | 0 | 2,770 | 0.058602 | 0 | 0 | 11,635 | 0.24615 |
89d735fe97fdd6718d6f7ee2b956443498bb36dc | 7,323 | py | Python | award/forms.py | aiventimptner/stura | 268e50d08ec975dd1c974ea27cea5c5b638df686 | [
"MIT"
] | null | null | null | award/forms.py | aiventimptner/stura | 268e50d08ec975dd1c974ea27cea5c5b638df686 | [
"MIT"
] | null | null | null | award/forms.py | aiventimptner/stura | 268e50d08ec975dd1c974ea27cea5c5b638df686 | [
"MIT"
] | null | null | null | from datetime import timedelta
from django import forms
from django.core.exceptions import ValidationError
from django.core.mail import send_mail
from django.http import HttpRequest
from django.urls import reverse
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from .models import Lecturer, Nomination, Verification, validate_domain
def strip_email_subdomain(email: str) -> (str, bool):
user, host = email.split('@')
if host.startswith('st.'):
return f"{user}@{host[3:]}", True
return f"{user}@{host}", False
def send_verification_email(nomination: Nomination, request: HttpRequest):
expiration = timezone.now() + timedelta(hours=24)
verification = Verification.objects.create(nomination=nomination, expiration=expiration)
message = _("""Hello %(name)s,
thank you for your nomination. As a last step, please confirm via this email that
the nomination has been submitted by you. All you have to do is click on the link
below. The link is valid until %(expiry)s!
Confirm nomination: %(url)s
For matching purposes, you can find your submitted details again below.
Lecturer: %(lecturer)s
Faculty: %(faculty)s
Reason:
%(reason)s
Best regards,
your student council
--
Student Council of the Otto-von-Guericke-University Magdeburg
Building 26, Room 002
Universitätsplatz 2
39106 Magdeburg
Phone: 0391/67-58971
Email: stura@ovgu.de
Twitter: https://twitter.com/sturaOVGU
Instagram: https://www.instagram.com/stura_ovgu/
""" % {
'name': nomination.get_username(),
'expiry': timezone.make_naive(expiration),
'url': request.build_absolute_uri(reverse('verify-token', kwargs={'token': verification.token})),
'lecturer': nomination.lecturer.get_full_name(),
'faculty': nomination.lecturer.get_faculty_display(),
'reason': nomination.reason,
})
send_mail(
subject=_("Your nomination for the teaching award of the student body"),
message=message, # TODO Does not get translated?!?!?
from_email=None,
recipient_list=[nomination.get_valid_email()],
)
class SubmissionForm(forms.Form):
first_name = forms.CharField(widget=forms.TextInput(attrs={
'class': 'input',
'autocomplete': 'off',
}), label=_("First name"))
last_name = forms.CharField(widget=forms.TextInput(attrs={
'class': 'input',
'autocomplete': 'off',
}), label=_("Last name"))
faculty = forms.CharField(widget=forms.Select(choices=Lecturer.FACULTIES), label=_("Faculty"))
reason = forms.CharField(widget=forms.Textarea(attrs={'class': 'textarea', 'rows': 3}), label=_("Reason"))
sub_email = forms.EmailField(widget=forms.EmailInput(attrs={'class': 'input'}), label=_("Email address"))
def clean_sub_email(self):
data = self.cleaned_data['sub_email'].lower()
try:
# Override default validation message because email host hasn't been cleaned yet
validate_domain(strip_email_subdomain(data)[0])
except ValidationError:
raise ValidationError(_("Only email addresses of the following domains are allowed: st.ovgu.de, ovgu.de"))
return data
def clean(self):
cleaned_data = super().clean()
first_name = cleaned_data.get('first_name')
last_name = cleaned_data.get('last_name')
faculty = cleaned_data.get('faculty')
sub_email = cleaned_data.get('sub_email')
if first_name and last_name:
try:
lecturer = Lecturer.objects.get(first_name=first_name, last_name=last_name)
except Lecturer.DoesNotExist:
lecturer = None
if lecturer and lecturer.faculty != faculty:
msg = _("Students before you have indicated this lecturer as part "
"of the F%(lecturer_faculty)s. If this is not correct, please "
"email us at 'verwaltung@stura-md.de'.\n"
"If there should be a person with this name at both "
"faculties (F%(lecturer_faculty)s, F%(faculty)s), please contact "
"us via email as well." % {'lecturer_faculty': lecturer.faculty, 'faculty': faculty})
self.add_error('faculty', msg)
return
if sub_email:
email, is_student = strip_email_subdomain(sub_email)
nomination = Nomination.objects.filter(lecturer=lecturer, sub_email=email)
if nomination.exists():
raise ValidationError(
_("A nomination for this teacher in combination with the "
"given email address were already received."),
code='ambiguous')
def save(self, request: HttpRequest):
lecturer, create = Lecturer.objects.get_or_create(first_name=self.cleaned_data['first_name'],
last_name=self.cleaned_data['last_name'],
faculty=self.cleaned_data['faculty'])
sub_email, is_student = strip_email_subdomain(self.cleaned_data['sub_email'])
# Check if submitter already nominated once and use same email host instead of provided value
is_student = Nomination.objects.filter(
sub_email=sub_email
).values_list('is_student', flat=True).first() or is_student
nomination = Nomination.objects.create(lecturer=lecturer,
reason=self.cleaned_data['reason'],
sub_email=sub_email,
is_student=is_student)
send_verification_email(nomination, request)
class RenewTokenForm(forms.Form):
email = forms.EmailField(widget=forms.EmailInput(attrs={'class': 'input'}), label=_("Email address"))
def clean_email(self):
return self.cleaned_data['email'].lower()
def clean(self):
cleaned_data = super().clean()
try:
sub_email, is_student = strip_email_subdomain(cleaned_data.get('sub_email'))
except AttributeError:
sub_email = None
if sub_email:
pending_nominations = Nomination.objects.filter(
sub_email=sub_email,
is_verified=False,
).exists()
if not pending_nominations:
raise ValidationError(
_("No nomination with the specified email address could be found or "
"all nominations with this email address are already confirmed."),
code='unknown')
def renew_tokens(self, request: HttpRequest):
sub_email, is_student = strip_email_subdomain(self.cleaned_data['email'])
verifications = Verification.objects.filter(
nomination__sub_email=sub_email,
nomination__is_verified=False,
).all()
for verification in verifications:
verification.delete()
nominations = Nomination.objects.filter(
sub_email=sub_email,
is_verified=False,
).all()
for nomination in nominations:
send_verification_email(nomination, request)
| 40.016393 | 118 | 0.632937 | 5,200 | 0.709995 | 0 | 0 | 0 | 0 | 0 | 0 | 2,070 | 0.282632 |
89d82542f7b1351b6e3de4ad757581394ba2df33 | 6,971 | py | Python | src/train-transformer.py | ARLab-VT/VT-Natural-Motion-Processing | 56622dd3a7ed90e3791601dc569a0312e889eaaf | [
"MIT"
] | 11 | 2020-12-07T11:29:18.000Z | 2022-02-25T05:04:46.000Z | src/train-transformer.py | ARLab-VT/VT-Natural-Motion-Processing | 56622dd3a7ed90e3791601dc569a0312e889eaaf | [
"MIT"
] | 1 | 2021-10-16T01:37:23.000Z | 2021-10-16T01:37:23.000Z | src/train-transformer.py | ARLab-VT/VT-Natural-Motion-Processing | 56622dd3a7ed90e3791601dc569a0312e889eaaf | [
"MIT"
] | 2 | 2020-08-15T23:27:23.000Z | 2021-02-11T19:06:27.000Z | # Copyright (c) 2020-present, Assistive Robotics Lab
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
from transformers.training_utils import fit
from transformers.transformers import (
InferenceTransformerEncoder,
InferenceTransformer
)
from common.data_utils import load_dataloader
from common.logging import logger
from common.losses import QuatDistance
import torch
from torch import nn, optim
import numpy as np
import argparse
torch.manual_seed(42)
np.random.seed(42)
torch.backends.cudnn.deterministic = False
torch.backends.cudnn.benchmark = False
def parse_args():
"""Parse arguments for module.
Returns:
argparse.Namespace: contains accessible arguments passed in to module
"""
parser = argparse.ArgumentParser()
parser.add_argument("--task",
help=("task for neural network to train on; "
"either prediction or conversion"))
parser.add_argument("--data-path",
help=("path to h5 files containing data "
"(must contain training.h5 and validation.h5)"))
parser.add_argument("--representation",
help=("will normalize if quaternions, will use expmap "
"to quat validation loss if expmap"),
default="quaternion")
parser.add_argument("--full-transformer",
help=("will use Transformer with both encoder and "
"decoder if true, will only use encoder "
"if false"),
default=False,
action="store_true")
parser.add_argument("--model-file-path",
help="path to model file for saving it after training")
parser.add_argument("--batch-size",
help="batch size for training", default=32)
parser.add_argument("--learning-rate",
help="initial learning rate for training",
default=0.001)
parser.add_argument("--beta-one",
help="beta1 for adam optimizer (momentum)",
default=0.9)
parser.add_argument("--beta-two",
help="beta2 for adam optimizer", default=0.999)
parser.add_argument("--seq-length",
help=("sequence length for model, will be divided "
"by downsample if downsample is provided"),
default=20)
parser.add_argument("--downsample",
help=("reduce sampling frequency of recorded data; "
"default sampling frequency is 240 Hz"),
default=1)
parser.add_argument("--in-out-ratio",
help=("ratio of input/output; "
"seq_length / downsample = input length = 10, "
"output length = input length / in_out_ratio"),
default=1)
parser.add_argument("--stride",
help=("stride used when reading data in "
"for running prediction tasks"),
default=3)
parser.add_argument("--num-epochs",
help="number of epochs for training", default=1)
parser.add_argument("--num-heads",
help="number of heads in Transformer")
parser.add_argument("--dim-feedforward",
help=("number of dimensions in feedforward layer "
"in Transformer"))
parser.add_argument("--dropout",
help="dropout percentage in Transformer")
parser.add_argument("--num-layers",
help="number of layers in Transformer")
args = parser.parse_args()
if args.data_path is None:
parser.print_help()
return args
if __name__ == "__main__":
args = parse_args()
for arg in vars(args):
logger.info(f"{arg} - {getattr(args, arg)}")
logger.info("Starting Transformer training...")
logger.info(f"Device count: {torch.cuda.device_count()}")
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
logger.info(f"Training on {device}...")
seq_length = int(args.seq_length)//int(args.downsample)
assert seq_length % int(args.in_out_ratio) == 0
lr = float(args.learning_rate)
normalize = True
train_dataloader, norm_data = load_dataloader(args, "training", normalize)
val_dataloader, _ = load_dataloader(args, "validation", normalize,
norm_data=norm_data)
encoder_feature_size = train_dataloader.dataset[0][0].shape[1]
decoder_feature_size = train_dataloader.dataset[0][1].shape[1]
num_heads = int(args.num_heads)
dim_feedforward = int(args.dim_feedforward)
dropout = float(args.dropout)
num_layers = int(args.num_layers)
quaternions = (args.representation == "quaternions")
if args.full_transformer:
model = InferenceTransformer(decoder_feature_size, num_heads,
dim_feedforward, dropout,
num_layers, quaternions=quaternions)
else:
model = InferenceTransformerEncoder(encoder_feature_size, num_heads,
dim_feedforward, dropout,
num_layers, decoder_feature_size,
quaternions=quaternions)
num_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
if torch.cuda.device_count() > 1:
model = nn.DataParallel(model)
model = model.to(device).double()
epochs = int(args.num_epochs)
beta1 = float(args.beta_one)
beta2 = float(args.beta_two)
optimizer = optim.AdamW(model.parameters(),
lr=lr,
betas=(beta1, beta2),
weight_decay=0.03)
scheduler = optim.lr_scheduler.MultiStepLR(optimizer,
milestones=[1, 3],
gamma=0.1)
dataloaders = (train_dataloader, val_dataloader)
training_criterion = nn.L1Loss()
validation_criteria = [nn.L1Loss(), QuatDistance()]
logger.info(f"Model for training: {model}")
logger.info(f"Number of parameters: {num_params}")
logger.info(f"Optimizer for training: {optimizer}")
logger.info(f"Criterion for training: {training_criterion}")
fit(model, optimizer, scheduler, epochs, dataloaders, training_criterion,
validation_criteria, device, args.model_file_path,
full_transformer=args.full_transformer)
logger.info("Completed Training...")
logger.info("\n")
| 39.384181 | 79 | 0.579544 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,025 | 0.290489 |
89d83015d934717ac40944dfdbcc9a9c8718e02e | 3,226 | py | Python | amocrm_asterisk_ng/integration/Integration.py | iqtek/amocrn_asterisk_ng | 429a8d0823b951c855a49c1d44ab0e05263c54dc | [
"MIT"
] | null | null | null | amocrm_asterisk_ng/integration/Integration.py | iqtek/amocrn_asterisk_ng | 429a8d0823b951c855a49c1d44ab0e05263c54dc | [
"MIT"
] | null | null | null | amocrm_asterisk_ng/integration/Integration.py | iqtek/amocrn_asterisk_ng | 429a8d0823b951c855a49c1d44ab0e05263c54dc | [
"MIT"
] | null | null | null | from typing import Collection
from typing import Sequence
from glassio.initializable_components import InitializableComponent
from glassio.logger import InitializableLogger
from amocrm_asterisk_ng.scenario import IScenario
__all__ = [
"Integration",
]
class Integration:
__slots__ = (
"__scenario",
"__listening_components",
"__control_components",
"__infrastructure_components",
"__logger",
)
def __init__(
self,
scenario: IScenario,
listening_components: Collection[InitializableComponent],
control_components: Collection[InitializableComponent],
infrastructure_components: Sequence[InitializableComponent],
logger: InitializableLogger,
) -> None:
self.__scenario = scenario
self.__listening_components = listening_components
self.__control_components = control_components
self.__infrastructure_components = infrastructure_components
self.__logger = logger
async def __initialize_component(
self,
component: InitializableComponent,
) -> None:
component_name = component.__class__.__name__
try:
await component.initialize()
except Exception as exc:
await self.__logger.critical(
f"Error of initialization: `{component_name}`.",
exception=exc,
)
raise Exception("Error of initialization.") from exc
await self.__logger.info(
f"Component: `{component_name}` initialized."
)
async def __deinitialize_component(
self,
component: InitializableComponent,
) -> None:
component_name = component.__class__.__name__
try:
await component.deinitialize()
except Exception as exc:
await self.__logger.critical(
f"Error of deinitialization: `{component_name}`.",
exception=exc,
)
raise Exception("Error of deinitialization.") from exc
await self.__logger.info(
f"Component: `{component_name}` deinitialized."
)
async def handle_startup(self) -> None:
await self.__logger.initialize()
await self.__logger.info("Integration initialization started.")
components = self.__infrastructure_components + self.__control_components + \
self.__listening_components
for component in components:
await self.__initialize_component(component)
await self.__scenario.upload()
await self.__logger.info("Integration initialization finished.")
async def handle_shutdown(self) -> None:
await self.__logger.info("Integration deinitialization started.")
# The infrastructure is shut down in reverse order.
components = self.__listening_components + self.__infrastructure_components[::-1] + \
self.__control_components
for component in components:
await self.__deinitialize_component(component)
await self.__scenario.unload()
await self.__logger.info("Integration deinitialization finished.")
await self.__logger.deinitialize()
| 33.257732 | 93 | 0.66181 | 2,963 | 0.918475 | 0 | 0 | 0 | 0 | 2,187 | 0.677929 | 557 | 0.17266 |
89d89227f288e0f65c724c9ca47de9d7c7896fe6 | 1,685 | py | Python | src/pyro_util/modules/__init__.py | MacoskoLab/pyro-util | 6ea5e1dfd082abed8e675743c59efe9b548671ab | [
"MIT"
] | null | null | null | src/pyro_util/modules/__init__.py | MacoskoLab/pyro-util | 6ea5e1dfd082abed8e675743c59efe9b548671ab | [
"MIT"
] | null | null | null | src/pyro_util/modules/__init__.py | MacoskoLab/pyro-util | 6ea5e1dfd082abed8e675743c59efe9b548671ab | [
"MIT"
] | null | null | null | from typing import Tuple
import torch
import torch.nn as nn
from pyro.distributions.util import broadcast_shape
from pyro_util.modules.weight_scaling import GammaReLU, WSLinear
T = torch.Tensor
def make_ws_fc(*dims: int) -> nn.Module:
"""Helper function for creating a fully connected neural network.
This version uses weight-scaled linear layers and gamma-scaled ReLU
:param dims: The size of the layers in the network (at least 2)
:return: nn.Sequential containing all the layers
"""
layers = [WSLinear(dims[0], dims[1])]
for in_dim, out_dim in zip(dims[1:], dims[2:]):
layers.append(GammaReLU())
layers.append(WSLinear(in_dim, out_dim))
return nn.Sequential(*layers)
def make_bn_fc(*dims: int) -> nn.Module:
"""Helper function for creating a fully connected neural network.
This version uses BatchNorm between linear layers.
:param dims: The size of the layers in the network (at least 2)
:return: nn.Sequential containing all the layers
"""
layers = [nn.Linear(dims[0], dims[1])]
for in_dim, out_dim in zip(dims[1:], dims[2:]):
layers.append(nn.BatchNorm1d(in_dim))
layers.append(nn.ReLU())
layers.append(nn.Linear(in_dim, out_dim))
return nn.Sequential(*layers)
def split_in_half(t: T) -> Tuple[T, T]:
"""Splits a tensor in half along the final dimension"""
return t.reshape(t.shape[:-1] + (2, -1)).unbind(-2)
def broadcast_inputs(input_args):
"""Helper for broadcasting inputs to neural net"""
shape = broadcast_shape(*[s.shape[:-1] for s in input_args]) + (-1,)
input_args = [s.expand(shape) for s in input_args]
return input_args
| 29.561404 | 72 | 0.68546 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 622 | 0.369139 |
89d9126f3e319e5ec89708f1f4c286d5fd016235 | 356 | py | Python | catalog_harvesting/util.py | ioos/catalog-harvesting | 7674f9d0aba21032737f09364db6cd0afa3664cc | [
"MIT"
] | null | null | null | catalog_harvesting/util.py | ioos/catalog-harvesting | 7674f9d0aba21032737f09364db6cd0afa3664cc | [
"MIT"
] | 17 | 2016-07-01T18:52:20.000Z | 2021-12-13T19:48:50.000Z | catalog_harvesting/util.py | ioos/catalog-harvesting | 7674f9d0aba21032737f09364db6cd0afa3664cc | [
"MIT"
] | 4 | 2016-06-28T15:29:28.000Z | 2017-05-16T18:54:04.000Z | #!/usr/bin/env python
'''
catalog_harvesting/util.py
General utilities for the project
'''
import random
def unique_id():
'''
Return a random 17-character string that works well for mongo IDs
'''
charmap = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
return ''.join([random.choice(charmap) for i in range(17)])
| 19.777778 | 78 | 0.72191 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 237 | 0.66573 |
89d965296e8228cfbfe6118b69532269f5a9bce1 | 10,442 | py | Python | render.py | ondrejkoren/WeatherFrame | 2f9da22e5fa162e9ec7af218f353c16eb1250572 | [
"MIT"
] | 2 | 2020-10-26T19:18:11.000Z | 2020-10-27T17:27:40.000Z | render.py | ondrejkoren/WeatherFrame | 2f9da22e5fa162e9ec7af218f353c16eb1250572 | [
"MIT"
] | null | null | null | render.py | ondrejkoren/WeatherFrame | 2f9da22e5fa162e9ec7af218f353c16eb1250572 | [
"MIT"
] | 1 | 2020-12-18T03:42:42.000Z | 2020-12-18T03:42:42.000Z | from WeatherScreens.RingScreen import RingScreen
from WeatherScreens.QuadrantScreen import QuadrantScreen
from WeatherScreens.ImageScreen import ImageScreen
from WeatherScreens.ScreenBase import ScreenBase
from datetime import datetime, timedelta
from suntime import Sun, SunTimeException
from dateutil import tz
import pyowm
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="WeatherFrame CLI Utility")
parser.add_argument("--lat", type=float,
help="Latitude in decimal form")
parser.add_argument("--long", type=float,
help="Longitude in decimal form")
parser.add_argument("--owm", type=str,
help="OpenWeatherMap API Token")
parser.add_argument("--type", type=str,
help="Screen type")
parser.add_argument("--image", type=str,
help="Image path")
args = parser.parse_args()
latitude = args.lat
longitude = args.long
owm_token = args.owm
screen_type = args.type
image_path = args.image
# MOCK data
weather_data = {
'wind': {'speed': 33.5, 'deg': 190, 'gust': 42.12},
'humidity': 100,
'humidity_indoor': 47,
'temp': {'temp': -33.77, 'temp_max': 0.56, 'temp_min': -2.0},
'temp_indoor': 24.12,
'status': 'Mist',
'clouds': 90,
'pressure': {'press': 1009, 'sea_level': 1038.381},
'observation_time': "2020-01-25 09:04:34+00",
'forecast': [
{'status': 'Clouds', 'temp': {'temp': -0.52, 'temp_max': 0.83, 'temp_min': -0.52, 'temp_kf': -1.35}, 'wind': {'speed': 2.21, 'deg': 88}, 'date': "2020-01-26 15:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': -1.69, 'temp_max': -0.68, 'temp_min': -1.69, 'temp_kf': -1.01}, 'wind': {'speed': 1.73, 'deg': 80}, 'date': "2020-01-26 18:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': -1.75, 'temp_max': -1.07, 'temp_min': -1.75, 'temp_kf': -0.68}, 'wind': {'speed': 1.42, 'deg': 45}, 'date': "2020-01-26 21:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': -1.66, 'temp_max': -1.32, 'temp_min': -1.66, 'temp_kf': -0.34}, 'wind': {'speed': 1.32, 'deg': 8}, 'date': "2020-01-27 00:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': -1.56, 'temp_kf': -273.15, 'temp_max': -1.56, 'temp_min': -1.56}, 'wind': {'speed': 0.83, 'deg': 17}, 'date': "2020-01-27 03:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': -1.48, 'temp_kf': -273.15, 'temp_max': -1.48, 'temp_min': -1.48}, 'wind': {'speed': 1.09, 'deg': 317}, 'date': "2020-01-27 06:00:00+00"},
{'status': 'Clear', 'temp': {'temp': 1.78, 'temp_kf': -273.15, 'temp_max': 1.78, 'temp_min': 1.78}, 'wind': {'speed': 1.53, 'deg': 302}, 'date': "2020-01-27 09:00:00+00"},
{'status': 'Clear', 'temp': {'temp': 4.87, 'temp_kf': -273.15, 'temp_max': 4.87, 'temp_min': 4.87}, 'wind': {'speed': 1.39, 'deg': 267}, 'date': "2020-01-27 12:00:00+00"},
{'status': 'Clear', 'temp': {'temp': 3.01, 'temp_kf': -273.15, 'temp_max': 3.01, 'temp_min': 3.01}, 'wind': {'speed': 1.96, 'deg': 187}, 'date': "2020-01-27 15:00:00+00"},
{'status': 'Clear', 'temp': {'temp': 1.33, 'temp_kf': -273.15, 'temp_max': 1.33, 'temp_min': 1.33}, 'wind': {'speed': 3.08, 'deg': 141}, 'date': "2020-01-27 18:00:00+00"},
{'status': 'Clear', 'temp': {'temp': 1.25, 'temp_kf': -273.15, 'temp_max': 1.25, 'temp_min': 1.25}, 'wind': {'speed': 3.64, 'deg': 140}, 'date': "2020-01-27 21:00:00+00"},
{'status': 'Clear', 'temp': {'temp': 1.46, 'temp_kf': -273.15, 'temp_max': 1.46, 'temp_min': 1.46}, 'wind': {'speed': 5.11, 'deg': 138}, 'date': "2020-01-28 00:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 2.65, 'temp_kf': -273.15, 'temp_max': 2.65, 'temp_min': 2.65}, 'wind': {'speed': 6.79, 'deg': 142}, 'date': "2020-01-28 03:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 3.88, 'temp_kf': -273.15, 'temp_max': 3.88, 'temp_min': 3.88}, 'wind': {'speed': 5.3, 'deg': 164}, 'date': "2020-01-28 06:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 5.47, 'temp_kf': -273.15, 'temp_max': 5.47, 'temp_min': 5.47}, 'wind': {'speed': 5.01, 'deg': 143}, 'date': "2020-01-28 09:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 6.44, 'temp_kf': -273.15, 'temp_max': 6.44, 'temp_min': 6.44}, 'wind': {'speed': 3.59, 'deg': 335}, 'date': "2020-01-28 12:00:00+00"},
{'status': 'Rain', 'temp': {'temp': 5.16, 'temp_kf': -273.15, 'temp_max': 5.16, 'temp_min': 5.16}, 'wind': {'speed': 3.21, 'deg': 264}, 'date': "2020-01-28 15:00:00+00"},
{'status': 'Rain', 'temp': {'temp': 3.55, 'temp_kf': -273.15, 'temp_max': 3.55, 'temp_min': 3.55}, 'wind': {'speed': 3.59, 'deg': 321}, 'date': "2020-01-28 18:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 3.97, 'temp_kf': -273.15, 'temp_max': 3.97, 'temp_min': 3.97}, 'wind': {'speed': 7.12, 'deg': 301}, 'date': "2020-01-28 21:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 2.98, 'temp_kf': -273.15, 'temp_max': 2.98, 'temp_min': 2.98}, 'wind': {'speed': 6.25, 'deg': 277}, 'date': "2020-01-29 00:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 1.37, 'temp_kf': -273.15, 'temp_max': 1.37, 'temp_min': 1.37}, 'wind': {'speed': 3.69, 'deg': 263}, 'date': "2020-01-29 03:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 2.09, 'temp_kf': -273.15, 'temp_max': 2.09, 'temp_min': 2.09}, 'wind': {'speed': 5.82, 'deg': 213}, 'date': "2020-01-29 06:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 4.53, 'temp_kf': -273.15, 'temp_max': 4.53, 'temp_min': 4.53}, 'wind': {'speed': 3.18, 'deg': 260}, 'date': "2020-01-29 09:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 5.56, 'temp_kf': -273.15, 'temp_max': 5.56, 'temp_min': 5.56}, 'wind': {'speed': 11.16, 'deg': 291}, 'date': "2020-01-29 12:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 4.4, 'temp_kf': -273.15, 'temp_max': 4.4, 'temp_min': 4.4}, 'wind': {'speed': 9.39, 'deg': 296}, 'date': "2020-01-29 15:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 3.49, 'temp_kf': -273.15, 'temp_max': 3.49, 'temp_min': 3.49}, 'wind': {'speed': 12.78, 'deg': 298}, 'date': "2020-01-29 18:00:00+00"},
{'status': 'Clear', 'temp': {'temp': 2.37, 'temp_kf': -273.15, 'temp_max': 2.37, 'temp_min': 2.37}, 'wind': {'speed': 6.79, 'deg': 288}, 'date': "2020-01-29 21:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 2.59, 'temp_kf': -273.15, 'temp_max': 2.59, 'temp_min': 2.59}, 'wind': {'speed': 8.32, 'deg': 292}, 'date': "2020-01-30 00:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 1.8, 'temp_kf': -273.15, 'temp_max': 1.8, 'temp_min': 1.8}, 'wind': {'speed': 7.83, 'deg': 294}, 'date': "2020-01-30 03:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 1.06, 'temp_kf': -273.15, 'temp_max': 1.06, 'temp_min': 1.06}, 'wind': {'speed': 5.74, 'deg': 303}, 'date': "2020-01-30 06:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 3.67, 'temp_kf': -273.15, 'temp_max': 3.67, 'temp_min': 3.67}, 'wind': {'speed': 9.05, 'deg': 305}, 'date': "2020-01-30 09:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 5.38, 'temp_kf': -273.15, 'temp_max': 5.38, 'temp_min': 5.38}, 'wind': {'speed': 9.72, 'deg': 299}, 'date': "2020-01-30 12:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 4.55, 'temp_kf': -273.15, 'temp_max': 4.55, 'temp_min': 4.55}, 'wind': {'speed': 4.51, 'deg': 294}, 'date': "2020-01-30 15:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 3.21, 'temp_kf': -273.15, 'temp_max': 3.21, 'temp_min': 3.21}, 'wind': {'speed': 4.77, 'deg': 298}, 'date': "2020-01-30 18:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 1.39, 'temp_kf': -273.15, 'temp_max': 1.39, 'temp_min': 1.39}, 'wind': {'speed': 1.37, 'deg': 269}, 'date': "2020-01-30 21:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 0.23, 'temp_kf': -273.15, 'temp_max': 0.23, 'temp_min': 0.23}, 'wind': {'speed': 1.08, 'deg': 155}, 'date': "2020-01-31 00:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': -0.07, 'temp_kf': -273.15, 'temp_max': -0.07, 'temp_min': -0.07}, 'wind': {'speed': 0.35, 'deg': 28}, 'date': "2020-01-31 03:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': -0.09, 'temp_kf': -273.15, 'temp_max': -0.09, 'temp_min': -0.09}, 'wind': {'speed': 0.47, 'deg': 342}, 'date': "2020-01-31 06:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 3.67, 'temp_kf': -273.15, 'temp_max': 3.67, 'temp_min': 3.67}, 'wind': {'speed': 1.49, 'deg': 286}, 'date': "2020-01-31 09:00:00+00"},
{'status': 'Clouds', 'temp': {'temp': 6.95, 'temp_kf': -273.15, 'temp_max': 6.95, 'temp_min': 6.95}, 'wind': {'speed': 1.9, 'deg': 258}, 'date': "2020-01-31 12:00:00+00"}
]
}
# correct weather data forecast dates
fixed_forecast = []
now = datetime.now()
datapoint_datetime = datetime.strptime(weather_data["forecast"][0]["date"], "%Y-%m-%d %H:%M:%S+00")
diff = now - datapoint_datetime
for x in weather_data["forecast"]:
x_date = datapoint_datetime = datetime.strptime(x["date"], "%Y-%m-%d %H:%M:%S+00")
x["date"] = x_date + timedelta(days=diff.days+1)
x["date"] = x["date"].strftime("%Y-%m-%d %H:%M:%S+00")
fixed_forecast.append(x)
weather_data["forecast"] = fixed_forecast
owm = pyowm.OWM(owm_token)
observation = owm.weather_at_coords(latitude, longitude)
w = observation.get_weather()
weather_data = {
'wind': w.get_wind(),
'humidity': w.get_humidity(),
'temp': w.get_temperature('celsius'),
'clouds': w.get_clouds(),
'pressure': w.get_pressure(),
'status': w.get_status(),
'observation_time': observation.get_reception_time(timeformat="iso")
}
screen = None
if screen_type == "ring":
screen = RingScreen(coordinates=(latitude, longitude),
weather_data=weather_data)
elif screen_type == "quadrant":
screen = QuadrantScreen(coordinates=(latitude, longitude),
weather_data=weather_data)
elif screen_type == "image":
screen = ImageScreen(path=image_path)
else:
screen = ScreenBase()
image = screen.render()
image.show()
| 83.536 | 187 | 0.538977 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,843 | 0.4638 |
89da74e72512e7844f3d0419ef0411f667024554 | 254 | py | Python | evergreen/manage/website/page/urls.py | craigsander/evergreen | 73c8e1ed546e3ac480add3e30e8696f4eb052496 | [
"MIT"
] | null | null | null | evergreen/manage/website/page/urls.py | craigsander/evergreen | 73c8e1ed546e3ac480add3e30e8696f4eb052496 | [
"MIT"
] | 6 | 2016-05-09T02:56:05.000Z | 2016-05-26T18:36:41.000Z | evergreen/manage/website/page/urls.py | craigsander/evergreen | 73c8e1ed546e3ac480add3e30e8696f4eb052496 | [
"MIT"
] | null | null | null | from django.conf.urls import url, include
from django.conf import settings
from . import views
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'manage/', views.index),
]
| 23.090909 | 60 | 0.755906 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 116 | 0.456693 |
89dabd1fdd786adb281a58df018fbc280d6e2986 | 9,563 | py | Python | algos/td3.py | SrikarValluri/hidden-state-rrl-sl | 80c90ce3e07fc61e6b910a763b9608debeb1e5c4 | [
"MIT"
] | null | null | null | algos/td3.py | SrikarValluri/hidden-state-rrl-sl | 80c90ce3e07fc61e6b910a763b9608debeb1e5c4 | [
"MIT"
] | null | null | null | algos/td3.py | SrikarValluri/hidden-state-rrl-sl | 80c90ce3e07fc61e6b910a763b9608debeb1e5c4 | [
"MIT"
] | 1 | 2021-12-07T13:45:33.000Z | 2021-12-07T13:45:33.000Z | import copy
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import random
from algos.dpg import eval_policy, collect_experience
from algos.dpg import ReplayBuffer
class TD3():
def __init__(self, actor, q1, q2, a_lr, c_lr, discount=0.99, tau=0.001, center_reward=False, policy_noise=0.2, update_freq=2, noise_clip=0.5, normalize=False):
if actor.is_recurrent or q1.is_recurrent or q2.is_recurrent:
self.recurrent = True
else:
self.recurrent = False
self.behavioral_actor = actor
self.behavioral_q1 = q1
self.behavioral_q2 = q2
self.target_actor = copy.deepcopy(actor)
self.target_q1 = copy.deepcopy(q1)
self.target_q2 = copy.deepcopy(q2)
self.soft_update(1.0)
self.actor_optimizer = torch.optim.Adam(self.behavioral_actor.parameters(), lr=a_lr)
self.q1_optimizer = torch.optim.Adam(self.behavioral_q1.parameters(), lr=c_lr, weight_decay=1e-2)
self.q2_optimizer = torch.optim.Adam(self.behavioral_q2.parameters(), lr=c_lr, weight_decay=1e-2)
self.discount = discount
self.tau = tau
self.center_reward = center_reward
self.update_every = update_freq
self.policy_noise = policy_noise
self.normalize = normalize
self.n = 0
def soft_update(self, tau):
for param, target_param in zip(self.behavioral_q1.parameters(), self.target_q1.parameters()):
target_param.data.copy_(tau * param.data + (1 - tau) * target_param.data)
for param, target_param in zip(self.behavioral_q2.parameters(), self.target_q2.parameters()):
target_param.data.copy_(tau * param.data + (1 - tau) * target_param.data)
for param, target_param in zip(self.behavioral_actor.parameters(), self.target_actor.parameters()):
target_param.data.copy_(tau * param.data + (1 - tau) * target_param.data)
def update_policy(self, replay_buffer, batch_size=256, traj_len=1000, grad_clip=None, noise_clip=0.2):
self.n += 1
states, actions, next_states, rewards, not_dones, steps, mask = replay_buffer.sample(batch_size, sample_trajectories=self.recurrent, max_len=traj_len)
with torch.no_grad():
if self.normalize:
states = self.behavioral_actor.normalize_state(states, update=False)
next_states = self.behavioral_actor.normalize_state(next_states, update=False)
noise = (torch.randn_like(actions) * self.policy_noise).clamp(-noise_clip, noise_clip)
next_actions = (self.target_actor(next_states) + noise)
target_q1 = self.target_q1(next_states, next_actions)
target_q2 = self.target_q2(next_states, next_actions)
target_q = rewards + not_dones * self.discount * torch.min(target_q1, target_q2) * mask
current_q1 = self.behavioral_q1(states, actions) * mask
current_q2 = self.behavioral_q2(states, actions) * mask
critic_loss = F.mse_loss(current_q1, target_q) + F.mse_loss(current_q2, target_q)
self.q1_optimizer.zero_grad()
self.q2_optimizer.zero_grad()
critic_loss.backward()
self.q1_optimizer.step()
self.q2_optimizer.step()
if self.n % self.update_every == 0:
actor_loss = -(self.behavioral_q1(states, self.behavioral_actor(states) * mask) * mask).mean()
self.actor_optimizer.zero_grad()
actor_loss.backward()
#if grad_clip is not None:
# torch.nn.utils.clip_grad_norm_(self.behavioral_actor.parameters(), grad_clip)
self.actor_optimizer.step()
self.soft_update(self.tau)
return critic_loss.item(), steps
def run_experiment(args):
from time import time
from util.log import create_logger
from util.env import env_factory
from policies.critic import FF_Q, LSTM_Q
from policies.actor import FF_Actor, LSTM_Actor
import locale, os
locale.setlocale(locale.LC_ALL, '')
# wrapper function for creating parallelized envs
env = env_factory(args.env_name)()
eval_env = env_factory(args.env_name)()
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if hasattr(env, 'seed'):
env.seed(args.seed)
obs_space = env.observation_space.shape[0]
act_space = env.action_space.shape[0]
if args.recurrent:
actor = LSTM_Actor(obs_space, act_space, env_name=args.env_name, max_action=args.max_action)
Q1 = LSTM_Q(obs_space, act_space, env_name=args.env_name)
Q2 = LSTM_Q(obs_space, act_space, env_name=args.env_name)
else:
actor = FF_Actor(obs_space, act_space, env_name=args.env_name, max_action=args.max_action)
Q1 = FF_Q(obs_space, act_space, env_name=args.env_name)
Q2 = FF_Q(obs_space, act_space, env_name=args.env_name)
algo = TD3(actor, Q1, Q2, args.a_lr, args.c_lr,
discount=args.discount,
tau=args.tau,
center_reward=args.center_reward,
policy_noise=args.policy_noise,
update_freq=args.update_every,
noise_clip=args.noise_clip,
normalize=args.normalize)
replay_buff = ReplayBuffer(obs_space, act_space, int(args.timesteps))
if algo.recurrent:
print("Recurrent Twin-Delayed Deep Deterministic Policy Gradient:")
else:
print("Twin-Delayed Deep Deterministic Policy Gradient:")
print(args)
print("\tenv: {}".format(args.env_name))
print("\tseed: {}".format(args.seed))
print("\ttimesteps: {:n}".format(int(args.timesteps)))
print("\tactor_lr: {}".format(args.a_lr))
print("\tcritic_lr: {}".format(args.c_lr))
print("\tdiscount: {}".format(args.discount))
print("\ttau: {}".format(args.tau))
print("\tnorm reward: {}".format(args.center_reward))
print("\tnorm states: {}".format(args.normalize))
print("\tbatch_size: {}".format(args.batch_size))
print("\twarmup period: {:n}".format(args.start_timesteps))
print()
iter = 0
episode_reward = 0
episode_timesteps = 0
# create a tensorboard logging object
logger = create_logger(args)
if args.save_actor is None:
args.save_actor = os.path.join(logger.dir, 'actor.pt')
# Keep track of some statistics for each episode
training_start = time()
episode_start = time()
episode_loss = 0
update_steps = 0
best_reward = None
# Fill replay buffer, update policy until n timesteps have passed
timesteps = 0
state = env.reset().astype(np.float32)
while timesteps < args.timesteps:
buffer_ready = (algo.recurrent and iter > args.batch_size) or (not algo.recurrent and replay_buff.size > args.batch_size)
warmup = timesteps < args.start_timesteps
state, r, done = collect_experience(algo.behavioral_actor, env, replay_buff, state, episode_timesteps,
max_len=args.traj_len,
random_action=warmup,
noise=args.expl_noise,
do_trajectory=algo.recurrent,
normalize=algo.normalize)
episode_reward += r
episode_timesteps += 1
timesteps += 1
# Update the policy once our replay buffer is big enough
if buffer_ready and done and not warmup:
update_steps = 0
if algo.recurrent:
num_updates = 1
else:
num_updates = episode_timesteps
for _ in range(num_updates):
u_loss, u_steps = algo.update_policy(replay_buff, args.batch_size, traj_len=args.traj_len)
episode_loss += u_loss / num_updates
update_steps += u_steps
if done:
episode_elapsed = (time() - episode_start)
episode_secs_per_sample = episode_elapsed / episode_timesteps
logger.add_scalar(args.env_name + ' episode length', episode_timesteps, iter)
logger.add_scalar(args.env_name + ' episode reward', episode_reward, iter)
logger.add_scalar(args.env_name + ' critic loss', episode_loss, iter)
completion = 1 - float(timesteps) / args.timesteps
avg_sample_r = (time() - training_start)/timesteps
secs_remaining = avg_sample_r * args.timesteps * completion
hrs_remaining = int(secs_remaining//(60*60))
min_remaining = int(secs_remaining - hrs_remaining*60*60)//60
if iter % args.eval_every == 0 and iter != 0:
eval_reward = eval_policy(algo.behavioral_actor, eval_env, max_traj_len=args.traj_len)
logger.add_scalar(args.env_name + ' eval episode', eval_reward, iter)
logger.add_scalar(args.env_name + ' eval timestep', eval_reward, timesteps)
print("evaluation after {:4d} episodes | return: {:7.3f} | timesteps {:9n}{:100s}".format(iter, eval_reward, timesteps, ''))
if best_reward is None or eval_reward > best_reward:
torch.save(algo.behavioral_actor, args.save_actor)
#torch.save(algo.behavioral_critic, args.save_critic)
best_reward = eval_reward
print("\t(best policy so far! saving to {})".format(args.save_actor))
try:
print("episode {:5d} | episode timestep {:5d}/{:5d} | return {:5.1f} | update timesteps: {:7n} | {:3.1f}s/1k samples | approx. {:3d}h {:02d}m remain\t\t\t\t".format(
iter,
episode_timesteps,
args.traj_len,
episode_reward,
update_steps,
1000*episode_secs_per_sample,
hrs_remaining,
min_remaining), end='\r')
except NameError:
pass
if done:
if hasattr(algo.behavioral_actor, 'init_hidden_state'):
algo.behavioral_actor.init_hidden_state()
episode_start, episode_reward, episode_timesteps, episode_loss = time(), 0, 0, 0
iter += 1
| 37.065891 | 171 | 0.680958 | 3,322 | 0.347381 | 0 | 0 | 0 | 0 | 0 | 0 | 1,157 | 0.120987 |
89dae401f8334c13497ff1b437626cfde768def7 | 2,021 | py | Python | Hackathon 4.0_2021-01-08_07-22-55.py | ClointFusion-Community/CFC-Projects | c6381738ade07e6e8979bbae37400ec2b4e626c5 | [
"MIT"
] | null | null | null | Hackathon 4.0_2021-01-08_07-22-55.py | ClointFusion-Community/CFC-Projects | c6381738ade07e6e8979bbae37400ec2b4e626c5 | [
"MIT"
] | null | null | null | Hackathon 4.0_2021-01-08_07-22-55.py | ClointFusion-Community/CFC-Projects | c6381738ade07e6e8979bbae37400ec2b4e626c5 | [
"MIT"
] | null | null | null | # This code is generated automatically by ClointFusion BOT Builder Tool.
import ClointFusion as cf
import time
cf.window_show_desktop()
cf.mouse_click(int(cf.pg.size()[0]/2),int(cf.pg.size()[1]/2))
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\mrmay\AppData\Local\Temp\cf_log_5fa2gg4s_generator\Images\Snips\1--1788_368.png',conf=0.7, wait=12),left_or_right='left', single_double_triple = 'single')
except:
cf.mouse_click(1788,368,left_or_right='left', single_double_triple = 'single')
time.sleep(2)
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\mrmay\AppData\Local\Temp\cf_log_5fa2gg4s_generator\Images\Snips\2--246_938.png',conf=0.7, wait=10),left_or_right='left', single_double_triple = 'single')
except:
cf.mouse_click(246,938,left_or_right='left', single_double_triple = 'single')
time.sleep(0)
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\mrmay\AppData\Local\Temp\cf_log_5fa2gg4s_generator\Images\Snips\3--246_938.png',conf=0.7, wait=13),left_or_right='left', single_double_triple = 'double')
except:
cf.mouse_click(246,938,left_or_right='left', single_double_triple = 'double')
time.sleep(3)
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\mrmay\AppData\Local\Temp\cf_log_5fa2gg4s_generator\Images\Snips\4-NewTabGoogleChrome-385_77.png',conf=0.7, wait=11),left_or_right='left', single_double_triple = 'single')
except:
cf.mouse_click(385,77,left_or_right='left', single_double_triple = 'single')
time.sleep(1)
cf.key_write_enter('modi')
time.sleep(0)
cf.key_press('enter')
time.sleep(3)
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\mrmay\AppData\Local\Temp\cf_log_5fa2gg4s_generator\Images\Snips\5-modiGoogleSearchGoogleChrome-1905_57.png',conf=0.7, wait=10),left_or_right='left', single_double_triple = 'single')
except:
cf.mouse_click(1905,57,left_or_right='left', single_double_triple = 'single')
time.sleep(0)
| 48.119048 | 256 | 0.784265 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 721 | 0.356754 |
89db6ccc91759a48501578936605ad3449526dbf | 4,524 | py | Python | examples/2d/obsolete/gravity/generate_statedb.py | Grant-Block/pylith | f6338261b17551eba879da998a5aaf2d91f5f658 | [
"MIT"
] | 93 | 2015-01-08T16:41:22.000Z | 2022-02-25T13:40:02.000Z | examples/2d/obsolete/gravity/generate_statedb.py | Grant-Block/pylith | f6338261b17551eba879da998a5aaf2d91f5f658 | [
"MIT"
] | 277 | 2015-02-20T16:27:35.000Z | 2022-03-30T21:13:09.000Z | examples/2d/obsolete/gravity/generate_statedb.py | Grant-Block/pylith | f6338261b17551eba879da998a5aaf2d91f5f658 | [
"MIT"
] | 71 | 2015-03-24T12:11:08.000Z | 2022-03-03T04:26:02.000Z | #!/usr/bin/env nemesis
"""
This script creates a spatial database for the initial stress and state
variables for a Maxwell plane strain material.
"""
sim = "gravity_vardensity"
materials = ["crust","mantle"]
import numpy
import h5py
from spatialdata.spatialdb.SimpleIOAscii import SimpleIOAscii
from spatialdata.geocoords.CSCart import CSCart
cs = CSCart()
cs._configure()
cs.setSpaceDim(2)
# Basis functions for quad4 cell evaluated at quadrature points. Use
# to compute coordinate of quadrature points in each cell from
# coordinates of vertices. Note the order must correspond to the order
# of the data at the quadrature points in the output.
qpts = numpy.array([[ 0.62200847, 0.16666667, 0.0446582, 0.16666667],
[ 0.16666667, 0.62200847, 0.16666667, 0.0446582 ],
[ 0.16666667, 0.0446582, 0.16666667, 0.62200847],
[ 0.0446582, 0.16666667, 0.62200847, 0.16666667]], dtype=numpy.float64)
def calcQuadCoords(vertices, cells, qpts):
"""Compute coordinates of quadrature points."""
nqpts = qpts.shape[0]
ncells = cells.shape[0]
spaceDim = vertices.shape[1]
quadCoords = numpy.zeros((ncells, nqpts, spaceDim), dtype=numpy.float64)
cellCoords = vertices[cells,:]
for iDim in range(spaceDim):
quadCoords[:,:,iDim] = numpy.dot(cellCoords[:,:,iDim], qpts.transpose())
quadCoords = quadCoords.reshape((ncells*nqpts, spaceDim))
return quadCoords
for material in materials:
filenameH5 = "output/%s-%s.h5" % (sim, material)
filenameDB = "%s_statevars-%s.spatialdb" % (sim, material)
# Open HDF5 file and get coordinates, cells, and stress.
h5 = h5py.File(filenameH5, "r")
vertices = h5['geometry/vertices'][:]
tindex = -1
cells = numpy.array(h5['topology/cells'][:], dtype=numpy.int)
stress = h5['cell_fields/stress'][tindex,:,:]
if "mantle" in material:
vstrain = h5['cell_fields/viscous_strain'][tindex,:,:]
h5.close()
# Compute coordinates of quadrature points.
quadCoords = calcQuadCoords(vertices, cells, qpts)
nqpts = qpts.shape[0]
ncells = cells.shape[0]
nvalues = stress.shape[1]/nqpts
# Check to make sure output included all quadrature points (CellFilterAvg was not used).
if stress.shape[1] == 3:
raise ValueError("Found %d stress values for each cell. Expected 12 stress values (stress_xx, stress_yy, and stress_xy at 4 quadrature points) for each cell. Turn off CellFilterAvg in pylithapp.cfg." % stress.shape[1])
if stress.shape[1] != nqpts*3:
raise ValueError("Found %d stress values for each cell. Expected 12 stress values (stress_xx, stress_yy, and stress_xy at 4 quadrature points) for each cell. Did you turn off CellFilterAvg in pylithapp.cfg?" % stress.shape[1])
stress = stress.reshape((ncells*nqpts, nvalues))
# Create writer for spatial database file
writer = SimpleIOAscii()
writer.inventory.filename = filenameDB
writer._configure()
values = [{'name': "stress-xx",
'units': "Pa",
'data': stress[:,0]},
{'name': "stress-yy",
'units': "Pa",
'data': stress[:,1]},
{'name': "stress-xy",
'units': "Pa",
'data': stress[:,2]},
]
if "mantle" in material:
nvalues = vstrain.shape[1]/nqpts
vstrain = vstrain.reshape((ncells*nqpts, nvalues))
stressZZ = 0.5*(stress[:,0]+stress[:,1])
zeros = numpy.zeros(stressZZ.shape)
values += [{'name': "stress-zz-initial",
'units': "Pa",
'data': stressZZ},
{'name': "total-strain-xx",
'units': "None",
'data': zeros},
{'name': "total-strain-yy",
'units': "None",
'data': zeros},
{'name': "total-strain-xy",
'units': "None",
'data': zeros},
{'name': "viscous-strain-xx",
'units': "None",
'data': vstrain[:,0]},
{'name': "viscous-strain-yy",
'units': "None",
'data': vstrain[:,1]},
{'name': "viscous-strain-zz",
'units': "None",
'data': vstrain[:,2]},
{'name': "viscous-strain-xy",
'units': "None",
'data': vstrain[:,3]},
]
writer.write({'points': quadCoords,
'coordsys': cs,
'data_dim': 2,
'values': values})
# End of file
| 35.34375 | 230 | 0.593722 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,724 | 0.381079 |
89de0f572a2f8179acd41fd1a3556c20df2554f1 | 923 | py | Python | Python_Examples/BehaviorPolicy.py | dquail/GVFMinecraft | 5eae9ea9974ec604194b32cdb235765ea3fe7fb3 | [
"MIT"
] | null | null | null | Python_Examples/BehaviorPolicy.py | dquail/GVFMinecraft | 5eae9ea9974ec604194b32cdb235765ea3fe7fb3 | [
"MIT"
] | null | null | null | Python_Examples/BehaviorPolicy.py | dquail/GVFMinecraft | 5eae9ea9974ec604194b32cdb235765ea3fe7fb3 | [
"MIT"
] | null | null | null | from random import randint
import numpy as np
import random
class BehaviorPolicy:
def __init__(self):
self.lastAction = 0
self.i = 0
self.ACTIONS = {
'forward': "move 1",
'back': "move -1",
'turn_left': "turn 1",
'extend_hand':"attack 1"
}
def policy(self, state):
self.i = self.i + 1
isFacingWall = state[len(state) - 1] == 1 #Last bit in the feature representation represents facing the wall
if isFacingWall:
return self.ACTIONS['look_left']
else:
return self.ACTIONS['forward']
def randomPolicy(self, state):
return self.ACTIONS[random.choice(list(self.ACTIONS.keys()))]
def moveForwardPolicy(self, state):
self.i = self.i+1
return self.ACTIONS['forward']
def turnLeftPolicy(self, state):
self.i = self.i + 1
return self.ACTIONS['turn_left']
def epsilonGreedyPolicy(self, state):
print("Do something here") | 23.075 | 112 | 0.648971 | 862 | 0.933911 | 0 | 0 | 0 | 0 | 0 | 0 | 199 | 0.215601 |
89de59b92801ba07cf84ab98bf1071d2e2892971 | 311 | py | Python | Lists/In_Lists.py | obareau/python_travaux_pratiques | 2205f4c253e96e409b56f5c23d6e23387ab46524 | [
"MIT"
] | 1 | 2021-11-01T12:45:50.000Z | 2021-11-01T12:45:50.000Z | Lists/In_Lists.py | obareau/python_travaux_pratiques | 2205f4c253e96e409b56f5c23d6e23387ab46524 | [
"MIT"
] | null | null | null | Lists/In_Lists.py | obareau/python_travaux_pratiques | 2205f4c253e96e409b56f5c23d6e23387ab46524 | [
"MIT"
] | null | null | null | # Check if the value is in the list?
words = ['apple', 'banana', 'peach', '42']
if 'apple' in words:
print('found apple')
if 'a' in words:
print('found a')
else:
print('NOT found a')
if 42 in words:
print('found 42')
else:
print('NOT found 42')
# found apple
# NOT found a
# NOT found 42 | 16.368421 | 42 | 0.604502 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 171 | 0.549839 |
89dfe3895be78648508f25f822750a4e38dd966a | 46,445 | py | Python | src/genesis_api_wrapper/catalogue.py | j-suchard/destatis-genesis-api | b0bbec0283ba41707d543b11e556ab0997e5f792 | [
"BSD-3-Clause"
] | null | null | null | src/genesis_api_wrapper/catalogue.py | j-suchard/destatis-genesis-api | b0bbec0283ba41707d543b11e556ab0997e5f792 | [
"BSD-3-Clause"
] | null | null | null | src/genesis_api_wrapper/catalogue.py | j-suchard/destatis-genesis-api | b0bbec0283ba41707d543b11e556ab0997e5f792 | [
"BSD-3-Clause"
] | null | null | null | import datetime
import typing
from . import enums, tools
class CatalogueAPIWrapper:
"""Methods for listing objects"""
def __init__(
self, username: str, password: str, language: enums.Language = enums.Language.GERMAN
):
"""Create a new Wrapper containing functions for listing different object types
:param username: The username which will be used for authenticating at the database. Due
to constraints of the database the username needs to be exactly 10 characters long and
may not contain any whitespaces
:type username: str
:param password: The password which will be used for authenticating at the database. Due
to constraints of the database the password needs to be at least 10 characters long,
may not exceed 20 characters and may not contain any whitespaces
:type password: str
:param language: The language in which the responses are returned by the database.
:py:enum:mem:`~genesis_api_wrapper.enums.Language.GERMAN` has the most compatibility
with the database
since most of the tables are on German. Therefore, this parameter defaults to
:py:enum:mem:`~genesis_api_wrapper.enums.Language.GERMAN`
:type language: enums.Language
:raise ValueError: The username or the password did not match the constraints stated in
their description.
"""
if " " in username:
raise ValueError("The username may not contain any whitespaces")
if len(username) != 10:
raise ValueError("The username may only be 10 characters long")
if " " in password:
raise ValueError("The password may not contain any whitespaces")
if len(password) < 10:
raise ValueError(
f"The password may not be shorter than 10 characters. Current "
f"length: {len(password)}"
)
if len(password) > 20:
raise ValueError(
f"The password may not be longer that 20 characters. Current "
f"length: {len(password)}"
)
self._username = username
self._password = password
self._language = language
self._service_url = "/catalogue"
self._base_parameter = {
"username": self._username,
"password": self._password,
"language": self._language.value,
}
async def cubes(
self,
object_name: str,
storage_location: enums.ObjectStorage = enums.ObjectStorage.ALL,
result_count: int = 100,
) -> dict:
"""
**PREMIUM ACCESS REQUIRED**
List the datacubes matching the ``object_name``
:param object_name: The identifier code of the data cubes. The usage of an asterisk
(``*``) is permitted as wildcard
:type object_name: str
:param storage_location: The storage location of the object, defaults to
:py:enum:mem:`~genesis_api_wrapper.enums.ObjectStorage.ALL`
:type storage_location: enums.ObjectStorage, optional
:param result_count: The maximal amount of results which are returned by the database,
defaults to 100
:type result_count: int, optional
:return: The response from the database parsed into a dict. If the ``Content-Type``
header indicated a non-JSON response the response is stored in a temporary file and
the file path will be returned
:rtype: dict, os.PathLike
:raises exceptions.GENESISPermissionError: The supplied account does not have the
permissions to access data cubes.
:raises ValueError: One of the parameters does not contain a valid value. Please check
the message of the exception for further information
"""
if " " in object_name:
raise ValueError("The object_name parameter may not contain whitespaces")
if len(object_name) == 0:
raise ValueError("The object_name parameter may not be empty")
if len(object_name) > 10:
raise ValueError("The object_name parameter may not exceed 10 characters")
if type(storage_location) is not enums.ObjectStorage:
raise ValueError(
f"The storage_location parameter only accepts "
f"{repr(enums.ObjectStorage)} values"
)
if result_count < 1:
raise ValueError("The result_count parameter value may not be below 0")
if result_count > 2500:
raise ValueError("The result_count parameter value may not exceed 2500")
query_parameters = self._base_parameter | {
"selection": object_name,
"area": storage_location.value,
"pagelength": result_count,
}
query_path = self._service_url + "/cubes"
return await tools.get_database_response(query_path, query_parameters)
async def cubes2statistic(
self,
object_name: str,
cube_code: typing.Optional[str] = None,
storage_location: enums.ObjectStorage = enums.ObjectStorage.ALL,
result_count: int = 100,
) -> dict:
"""
**PREMIUM ACCESS REQUIRED**
List the datacubes matching the ``object_name``
:param object_name: The identifier code of the statistic
:type object_name: str
:param cube_code: The identifier code of the cube. The usage of an asterisk
(``*``) is permitted as wildcard. This value acts as filter, only showing the data
cubes matching this code
:type cube_code: str, optional
:param storage_location: The storage location of the object, defaults to
:py:enum:mem:`~genesis_api_wrapper.enums.ObjectStorage.ALL`
:type storage_location: enums.ObjectStorage
:param result_count: The maximal amount of results which are returned by the database,
defaults to 100
:type result_count: int
:return: The response from the database parsed into a dict. If the ``Content-Type``
header indicated a non-JSON response the response is stored in a temporary file and
the file path will be returned
:rtype: dict, os.PathLike
:raises exceptions.GENESISPermissionError: The supplied account does not have the
permissions to access data cubes.
:raises ValueError: One of the parameters does not contain a valid value. Please check
the message of the exception for further information
"""
if " " in object_name:
raise ValueError("The object_name parameter may not contain whitespaces")
if "*" in object_name:
raise ValueError(
"The object_name parameter may not contain asterisks. Wildcards are "
"not permitted"
)
if len(object_name) == 0:
raise ValueError("The object_name parameter may not be empty")
if len(object_name) > 6:
raise ValueError("The object_name parameter may not exceed 6 characters")
if cube_code is not None and " " in cube_code:
raise ValueError("The cube_code parameter may not contain whitespaces")
if cube_code is not None and len(cube_code) == 0:
raise ValueError("The cube_code parameter may not be empty")
if cube_code is not None and len(cube_code) > 10:
raise ValueError("The cube_code parameter may not exceed 10 characters")
if type(storage_location) is not enums.ObjectStorage:
raise ValueError(
f"The storage_location parameter only accepts "
f"{repr(enums.ObjectStorage)} values"
)
if result_count < 1:
raise ValueError("The result_count parameter value may not be below 0")
if result_count > 2500:
raise ValueError("The result_count parameter value may not exceed 2500")
query_parameters = self._base_parameter | {
"name": object_name,
"selection": "" if cube_code is None else cube_code,
"area": storage_location.value,
"pagelength": result_count,
}
query_path = self._service_url + "/cubes2statistic"
return await tools.get_database_response(query_path, query_parameters)
async def cubes2variable(
self,
object_name: str,
cube_code: str,
storage_location: enums.ObjectStorage = enums.ObjectStorage.ALL,
result_count: int = 100,
) -> dict:
"""
**PREMIUM ACCESS REQUIRED**
List the datacubes matching the ``object_name``
:param object_name: The identifier code of the variable
:type object_name: str
:param cube_code: The identifier code of the cube. The usage of an asterisk
(``*``) is permitted as wildcard. This value acts as filter, only showing the
data cubes matching this code
:type cube_code: str, optional
:param storage_location: The storage location of the object, defaults to
:py:enum:mem:`~genesis_api_wrapper.enums.ObjectStorage.ALL`
:type storage_location: enums.ObjectStorage
:param result_count: The maximal amount of results which are returned by the
database,
defaults to 100
:type result_count: int
:return: The response from the database parsed into a dict. If the ``Content-Type``
header indicated a non-JSON response the response is stored in a temporary
file and
the file path will be returned
:rtype: dict, os.PathLike
:raises exceptions.GENESISPermissionError: The supplied account does not have the
permissions to access data cubes.
:raises ValueError: One of the parameters does not contain a valid value. Please check
the message of the exception for further information
"""
if " " in object_name:
raise ValueError("The object_name parameter may not contain whitespaces")
if "*" in object_name:
raise ValueError(
"The object_name parameter may not contain asterisks. Wildcards are "
"not permitted"
)
if len(object_name) == 0:
raise ValueError("The object_name parameter may not be empty")
if len(object_name) > 6:
raise ValueError("The object_name parameter may not exceed 6 characters")
if cube_code is not None and " " in cube_code:
raise ValueError("The cube_code parameter may not contain whitespaces")
if cube_code is not None and len(cube_code) == 0:
raise ValueError("The cube_code parameter may not be empty")
if cube_code is not None and len(cube_code) > 10:
raise ValueError("The cube_code parameter may not exceed 10 characters")
if type(storage_location) is not enums.ObjectStorage:
raise ValueError(
f"The storage_location parameter only accepts "
f"{repr(enums.ObjectStorage)} values"
)
if result_count < 1:
raise ValueError("The result_count parameter value may not be below 0")
if result_count > 2500:
raise ValueError("The result_count parameter value may not exceed 2500")
query_parameters = self._base_parameter | {
"name": object_name,
"selection": "" if cube_code is None else cube_code,
"area": storage_location.value,
"pagelength": result_count,
}
query_path = self._service_url + "/cubes2variable"
return await tools.get_database_response(query_path, query_parameters)
async def jobs(
self,
object_name: str,
search_by: enums.JobCriteria,
sort_by: enums.JobCriteria,
job_type: enums.JobType = enums.JobType.ALL,
result_count: int = 100,
) -> dict:
"""
Get a list of the jobs that match the parameters
:param object_name: The identifier code of the job. The usage of an asterisk
(``*``) is permitted as wildcard. This value acts as filter, only showing the
jobs matching this code
:type object_name: str
:param search_by: Criteria which shall be applied to the object_name
:type search_by: enums.JobCriteria
:param sort_by: Criteria by which the output shall be sorted
:type sort_by: enums.JobCriteria
:param job_type: The type of jobs which shall be returned, defaults to
:py:enum:mem:`~genesis_api_wrapper.enums.JobType.ALL`
:type job_type: enums.JobType
:param result_count: The maximal amount of results which are returned by the
database, defaults to 100
:type result_count: int
:rtype: dict, os.PathLike
:raises exceptions.GENESISPermissionError: The supplied account does not have the
permissions to this resource.
:raises ValueError: One of the parameters does not contain a valid value. Please check
the message of the exception for further information
"""
if " " in object_name:
raise ValueError("The object_name parameter may not contain whitespaces")
if "*" in object_name:
raise ValueError(
"The object_name parameter may not contain asterisks. Wildcards are "
"not permitted"
)
if len(object_name) == 0:
raise ValueError("The object_name parameter may not be empty")
if len(object_name) > 50:
raise ValueError("The object_name parameter may not exceed 50 characters")
if type(search_by) is not enums.JobCriteria:
raise ValueError(
f"The search_by parameter only accepts values from the following enumeration: "
f"{repr(enums.JobCriteria)}"
)
if type(sort_by) is not enums.JobCriteria:
raise ValueError(
f"The sort_by parameter only accepts values from the following enumeration: "
f"{repr(enums.JobCriteria)}"
)
if type(job_type) is not enums.JobType:
raise ValueError(
f"The job_type parameter only accepts values from the following enumeration: "
f"{repr(enums.JobType)}"
)
if result_count < 1:
raise ValueError("The result_count parameter value may not be below 0")
if result_count > 2500:
raise ValueError("The result_count parameter value may not exceed 2500")
query_parameter = self._base_parameter | {
'selection': object_name,
'searchcriterion': search_by.value,
'sortcriterion': sort_by.value,
'type': job_type.value,
'pagelength': result_count
}
query_path = self._service_url + '/jobs'
return await tools.get_database_response(query_path, query_parameter)
async def modified_data(
self,
object_filter: str,
object_type: enums.ObjectType = enums.ObjectType.ALL,
updated_after: datetime.date = datetime.date.today() - datetime.timedelta(days=-7),
result_count: int = 100
) -> dict:
"""
**Due to an error in the database the parameter** ``result_count`` **is ignored by the
database**
Get a list of modified objects which were modified or uploaded after ``updated_after``.
The following objects are returned by this query:
- Tables
- Statistics
- Statistic updates
:param object_filter: The identifier code of the object. The usage of an asterisk
(``*``) is permitted as wildcard. This value acts as filter, only showing the
jobs matching this code
:type object_filter: str
:param object_type: The type of object that shall be listed
Allowed types (enums):
- :py:enum:mem:`~genesis_api_wrapper.enums.ObjectType.ALL`
- :py:enum:mem:`~genesis_api_wrapper.enums.ObjectType.TABLES`
- :py:enum:mem:`~genesis_api_wrapper.enums.ObjectType.STATISTICS`
- :py:enum:mem:`~genesis_api_wrapper.enums.ObjectType.STATISTIC_UPDATE`
:type object_type: enums.ObjectType
:param updated_after: The date after which the object needs to be modified or uploaded to
be returned by the database, defaults to 7 days before today
:type updated_after: datetime.date
:param result_count: The number of results that will be returned
:type result_count: int
"""
if " " in object_filter:
raise ValueError("The object_filter parameter may not contain whitespaces")
if len(object_filter) == 0:
raise ValueError("The object_filter parameter may not be empty")
if len(object_filter) > 50:
raise ValueError("The object_filter parameter may not exceed 50 characters")
if type(object_type) is not enums.ObjectType:
raise ValueError(
f"The object_type parameter only accepts values from the following enumeration: "
f"{repr(enums.ObjectType)}"
)
if object_type not in [enums.ObjectType.ALL, enums.ObjectType.TABLES,
enums.ObjectType.STATISTICS, enums.ObjectType.STATISTICS_UPDATE]:
raise ValueError(
f"The supplied object_type ({object_type}) is not allowed at this resource"
)
if updated_after > datetime.date.today():
raise ValueError(
f'The updated_after parameter is in the future'
)
# ==== Build the query data ====
query_path = self._service_url + '/modifieddata'
query_parameters = self._base_parameter | {
'selection': object_filter,
'type': object_type.value,
'date': tools.convert_date_to_string(updated_after),
'pagelength': result_count
}
# ==== Return the query data ====
return await tools.get_database_response(query_path, query_parameters)
async def quality_signs(self) -> dict:
"""
Get the list of quality signs from the database
:return: The Response containing the quality signs present in the database
:rtype: dict
"""
query_path = self._service_url + '/qualitysigns'
query_parameters = self._base_parameter
return await tools.get_database_response(query_path, query_parameters)
async def results(
self,
object_name: str,
storage_location: enums.ObjectStorage = enums.ObjectStorage.ALL,
result_count: int = 100
) -> dict:
"""
Get a list of result tables matching the ``object_name``
:param object_name: The identifier code of the result tables. The usage of an asterisk
(``*``) is permitted as wildcard
:type object_name: str
:param storage_location: The storage location of the object, defaults to
:py:enum:mem:`~genesis_api_wrapper.enums.ObjectStorage.ALL`
:type storage_location: enums.ObjectStorage, optional
:param result_count: The maximal amount of results which are returned by the database,
defaults to 100
:type result_count: int, optional
:return: The response from the database parsed into a dict. If the ``Content-Type``
header indicated a non-JSON response the response is stored in a temporary file and
the file path will be returned
:rtype: dict, os.PathLike
:raises exceptions.GENESISPermissionError: The supplied account does not have the
permissions to access data cubes.
:raises ValueError: One of the parameters does not contain a valid value. Please check
the message of the exception for further information
"""
if " " in object_name:
raise ValueError("The object_name parameter may not contain whitespaces")
if len(object_name) == 0:
raise ValueError("The object_name parameter may not be empty")
if len(object_name) > 10:
raise ValueError("The object_name parameter may not exceed 10 characters")
if type(storage_location) is not enums.ObjectStorage:
raise ValueError(
f"The storage_location parameter only accepts "
f"{repr(enums.ObjectStorage)} values"
)
if result_count < 1:
raise ValueError("The result_count parameter value may not be below 0")
if result_count > 2500:
raise ValueError("The result_count parameter value may not exceed 2500")
# ==== Build the query path and parameters ====
query_path = self._service_url + '/results'
query_parameters = self._base_parameter | {
'selection': object_name,
'area': storage_location.value,
'pagelength': result_count
}
# ==== Get the response ====
return await tools.get_database_response(query_path, query_parameters)
async def statistics(
self,
object_name: str,
storage_location: enums.ObjectStorage = enums.ObjectStorage.ALL,
search_by: enums.GenericCriteria = enums.GenericCriteria.CODE,
sort_by: enums.GenericCriteria = enums.GenericCriteria.CODE,
result_count: int = 100
) -> dict:
"""
Get a list of statistics matching the supplied code
:param object_name: The identifier code of the data cubes. The usage of an asterisk
(``*``) is permitted as wildcard
:type object_name: str
:param storage_location: The storage location of the object, defaults to
:py:enum:mem:`~genesis_api_wrapper.enums.ObjectStorage.ALL`
:type storage_location: enums.ObjectStorage, optional
:param search_by: Criteria which shall be applied to the ``object_name``, defaults to
:py:enum:mem:`~genesis_api_wrapper.enums.GenericCriteria.CODE`
:type search_by: enums.GenericCriteria, optional
:param sort_by: Criteria by which the result shall be sorted, defaults to
:py:enum:mem:`~genesis_api_wrapper.enums.GenericCriteria.CODE`
:type sort_by: enums.GenericCriteria, optional
:param result_count: The number of results that the response shall contain at it's maximum
:type result_count: int
:return: The response from the database parsed into a dict. If the ``Content-Type``
header indicated a non-JSON response the response is stored in a temporary file and
the file path will be returned
:rtype: dict, os.PathLike
:raises exceptions.GENESISPermissionError: The supplied account does not have the
permissions to access data cubes.
:raises ValueError: One of the parameters does not contain a valid value. Please check
the message of the exception for further information
"""
if " " in object_name:
raise ValueError("The object_name parameter may not contain whitespaces")
if len(object_name) == 0:
raise ValueError("The object_name parameter may not be empty")
if len(object_name) > 15:
raise ValueError("The object_name parameter may not exceed 15 characters")
if type(storage_location) is not enums.ObjectStorage:
raise ValueError(
f"The storage_location parameter only accepts "
f"{repr(enums.ObjectStorage)} values"
)
if type(search_by) is not enums.GenericCriteria:
raise ValueError(
f"The search_by parameter only accepts "
f"{repr(enums.GenericCriteria)} values"
)
if type(sort_by) is not enums.GenericCriteria:
raise ValueError(
f"The sort_by parameter only accepts "
f"{repr(enums.GenericCriteria)} values"
)
if result_count < 1:
raise ValueError("The result_count parameter value may not be below 0")
if result_count > 2500:
raise ValueError("The result_count parameter value may not exceed 2500")
# ==== Build query path and parameters ====
query_path = self._service_url + '/statistics'
query_parameters = self._base_parameter | {
'selection': object_name,
'searchcriterion': search_by.value,
'sortcriterion': sort_by.value,
'pagelength': result_count
}
return await tools.get_database_response(query_path, query_parameters)
async def statistics2variable(
self,
variable_name: str,
statistic_selector: str = None,
search_by: enums.StatisticCriteria = enums.StatisticCriteria.CODE,
sort_by: enums.StatisticCriteria = enums.StatisticCriteria.CODE,
object_area: enums.ObjectStorage = enums.ObjectStorage.ALL,
result_count: int = 100,
):
"""Get a list of statistics which are referenced by the selected variable
:param variable_name: The name of the variable [required]
:type variable_name: str
:param statistic_selector: Filter for the statistics by the code of them, [optional,
stars allowed to wildcard, max. length 15]
:type statistic_selector: str
:param search_by: The field on which the code shall be applied, [optional, defaults
to `GENESISenums.StatisticCriteria.CODE`]
:type search_by: enums.StatisticCriteria
:param sort_by: The field by which the results are to be sorted, [optional, defaults
to `GENESISenums.StatisticCriteria.CODE`]
:type sort_by: enums.StatisticCriteria
:param object_area: The area in which the object is stored
:type object_area: enums.ObjectStorage
:param result_count: The number of results which are returned by the request
:type result_count: int
:return: The response returned by the server
"""
if variable_name is None:
raise ValueError("The variable name needs to be set to run a successful query")
if not 1 <= len(variable_name.strip()) <= 15:
raise ValueError("The variable names length needs to be between 1 and 15 signs")
if statistic_selector and not (1 <= len(statistic_selector.strip()) <= 15):
raise ValueError("The selectors length may not exceed 15 characters")
# Create the parameters object
_param = self._base_parameter | {
"name": variable_name,
"selection": "" if statistic_selector is None else statistic_selector,
"searchcriterion": search_by.value,
"sortcriterion": sort_by.value,
"pagelength": result_count,
"area": object_area.value,
}
_url = self._service_url + "/statistics2variable"
return await tools.get_database_response(_url, _param)
async def tables(
self,
table_selector: str,
object_area: enums.ObjectStorage = enums.ObjectStorage.ALL,
sort_by: enums.TableCriteria = enums.TableCriteria.CODE,
result_count: int = 100,
) -> dict:
"""Get a list of tables matching the selector from the selected object area
:param table_selector: The code of the table [required, stars (*) allowed for wildcards]
:param object_area: The area in which the table is stored [defaults to ALL]
:param sort_by: The criteria by which the results shall be sorted [defaults to CODE]
:param result_count: The number of results that shall be returned
:return: A list of tables matching the request
"""
if table_selector and not (1 <= len(table_selector.strip()) <= 15):
raise ValueError(
"The table selector needs to be at least 1 character and max 15 " "characters"
)
_param = self._base_parameter | {
"selection": table_selector,
"area": object_area.value,
"searchcriterion": "Code",
"sortcriterion": sort_by.value,
"pagelength": result_count,
}
_url = self._service_url + "/tables"
return await tools.get_database_response(_url, _param)
async def tables2statistics(
self,
statistics_name: str,
table_selector: str = None,
object_area: enums.ObjectStorage = enums.ObjectStorage.ALL,
result_count: int = 100,
) -> dict:
"""Get a list of tables matching the table selector which are assigned to the
:param statistics_name: Name of the statistic [required, 1-15 characters]
:param table_selector: Filter for the tables code [optional, wildcards allowed]
:param object_area: The location of the statistic/tables
:param result_count: The number of tables in the response
:return:
"""
if statistics_name is None:
raise ValueError("The name of the statistic is required to get the tables")
if not 1 <= len(statistics_name.strip()) <= 15:
raise ValueError("The length of the statistics name needs to be between 1 and 15")
if table_selector and not (1 <= len(table_selector.strip()) <= 15):
raise ValueError(
"The table selector needs to be at least 1 character and max 15 " "characters"
)
_param = self._base_parameter | {
"name": statistics_name,
"selection": table_selector,
"area": object_area.value,
"pagelength": result_count,
}
_url = self._service_url + "/tables2statistic"
return await tools.get_database_response(_url, _param)
async def tables2variable(
self,
variable_name: str,
table_selector: str = None,
object_area: enums.ObjectStorage = enums.ObjectStorage.ALL,
result_count: int = 100,
) -> dict:
"""Get a list of tables matching the table selector which are assigned to the
:param variable_name: Name of the statistic [required, 1-15 characters]
:param table_selector: Filter for the tables code [optional, wildcards allowed]
:param object_area: The location of the statistic/tables
:param result_count: The number of tables in the response
:return:
"""
if variable_name is None:
raise ValueError("The name of the statistic is required to get the tables")
if not 1 <= len(variable_name) <= 15:
raise ValueError("The length of the statistics name needs to be between 1 and 15")
if table_selector and not (1 <= len(table_selector.strip()) <= 15):
raise ValueError(
"The table selector needs to be at least 1 character and max 15 " "characters"
)
_param = self._base_parameter | {
"name": variable_name,
"selection": table_selector,
"area": object_area.value,
"pagelength": result_count,
}
_url = self._service_url + "/tables2variable"
return await tools.get_database_response(_url, _param)
async def terms(self, term_selector: str, result_count: int = 100):
"""Get a list of terms according to the selector
:param term_selector: The selector for the terms [required, wildcards allowed]
:param result_count: The number of terms which shall be returned
:return: The parsed response from the server
"""
if term_selector is None:
raise ValueError("The selector for the terms is a required parameter")
if not 1 <= len(term_selector.strip()) <= 15:
raise ValueError("The length of the selector needs to be between 1 and 15")
_param = self._base_parameter | {"selection": term_selector, "pagelength": result_count}
_url = self._service_url + "/terms"
return await tools.get_database_response(_url, _param)
async def timeseries(
self,
timeseries_selector: str,
object_location: enums.ObjectStorage = enums.ObjectStorage.ALL,
result_count: int = 100,
) -> dict:
"""Get a list of timeseries according to the selector and the location of the object
:param timeseries_selector: The selector for the timeseries [required, wildcards
allowed]
:param object_location: The area in which the object is stored [default:
``enums.ObjectStorage.ALL``]
:param result_count: The number of results that shall be returned
:return: The list of found timeseries
"""
if timeseries_selector is None:
raise ValueError("The selector is required for a successful database request")
if not 1 <= len(timeseries_selector.strip()) <= 15:
raise ValueError(
"The length of the selector needs to be between 1 and 15 " "characters"
)
_param = self._base_parameter | {
"selection": timeseries_selector,
"area": object_location.value,
"pagelength": result_count,
}
_url = self._service_url + "/timeseries"
return await tools.get_database_response(_url, _param)
async def timeseries2statistic(
self,
statistic_name: str,
timeseries_selector: typing.Optional[str] = None,
object_location: enums.ObjectStorage = enums.ObjectStorage.ALL,
result_count: int = 100,
):
"""Get a list of timeseries which are related to the selected statistic
:param statistic_name: Code of the statistic [required, length: 1-15 characters]
:param timeseries_selector: Filter for the timeseries by their code [optional,
wildcards allowed]
:param object_location: The storage location of the object
:param result_count: The number of results that shall be returned
:return: A response containing the list of timeseries which match the supplied
parameters
"""
if statistic_name is None:
raise ValueError("The name of the statistic is a required parameter")
if timeseries_selector and not (1 <= len(timeseries_selector.strip()) <= 15):
raise ValueError(
"If a timeseries_selector is supplied its length may not exceed " "15 characters"
)
# Build the query parameters
param = self._base_parameter | {
"name": statistic_name,
"selection": "" if timeseries_selector is None else timeseries_selector,
"area": object_location.value,
"pagelength": result_count,
}
url = self._service_url + "/timeseries2statistic"
return await tools.get_database_response(url, param)
async def timeseries2variable(
self,
variable_name: str,
timeseries_selector: typing.Optional[str] = None,
object_location: enums.ObjectStorage = enums.ObjectStorage.ALL,
result_count: int = 100,
) -> dict:
"""Get a list of timeseries which are related to the specified variable
:param variable_name: The code of the variable [required]
:param timeseries_selector: A filter for the returned timeseries [optional, wildcards
allowed]
:param object_location: The storage location in which the search shall be executed [
optional, defaults to ``enums.ObjectStorage.ALL``]
:param result_count: The number of results that shall be returned
:return: A parsed response containing the list of timeseries, if any were found
"""
if variable_name is None:
raise ValueError("The variable_name is a required parameter")
if not (1 <= len(variable_name.strip()) <= 15):
raise ValueError("The length of the variable name may not exceed 15 characters")
if timeseries_selector and not (1 <= len(timeseries_selector.strip()) <= 15):
raise ValueError(
"If a timeseries_selector is supplied its length may not exceed " "15 characters"
)
# Build the query parameters
_query_parameter = self._base_parameter | {
"name": variable_name,
"selection": "" if timeseries_selector is None else timeseries_selector,
"area": object_location.value,
"pagelength": result_count,
}
_url = self._service_url + "/timeseries2variable"
return await tools.get_database_response(_url, _query_parameter)
async def values(
self,
value_filter: str,
object_location: enums.ObjectStorage = enums.ObjectStorage.ALL,
search_by: enums.GenericCriteria = enums.GenericCriteria.CODE,
sort_by: enums.GenericCriteria = enums.GenericCriteria.CODE,
result_count: int = 100,
) -> dict:
"""Get a list of values specified by the filter
:param value_filter: The filter for the value identifications [optional, wildcards
allowed]
:param object_location: The storage location which shall be used during the search [
optional, defaults to ``GenericCriteria.CODE``]
:param search_by: The criteria which is used in combination to the value_filter [
optional, defaults to ``GenericCriteria.CODE``]
:param sort_by: The criteria by which the results are sorted [optional, defaults to
``GenericCriteria.CODE``]
:param result_count: The number of results returned
:return: A parsed response containing the list of values
"""
# Check the received variables
if value_filter is None:
raise ValueError("The value_filter is a required parameter")
if not 1 <= len(value_filter.strip()) <= 15:
raise ValueError(
"The length of the value_filter needs to be at least 1 character "
"and may not exceed 15 characters"
)
if not 1 <= result_count <= 2500:
raise ValueError(
"The number of results returned needs to be greater than 1, "
"but may not exceed 2500"
)
# Build the query parameters
params = self._base_parameter | {
"selection": value_filter,
"area": object_location.value,
"searchcriterion": search_by.value,
"sortcriterion": sort_by.value,
"pagelength": result_count,
}
_url = self._service_url + "/values"
return await tools.get_database_response(_url, params)
async def values2variable(
self,
variable_name: str,
value_filter: typing.Optional[str] = None,
object_location: enums.ObjectStorage = enums.ObjectStorage.ALL,
search_by: enums.GenericCriteria = enums.GenericCriteria.CODE,
sort_by: enums.GenericCriteria = enums.GenericCriteria.CODE,
result_count: int = 100,
) -> dict:
"""Get a list of characteristic values for the supplied variable
:param variable_name: The code of the variable
:param value_filter: A filter for the returned values [optional, wildcards allowed]
:param object_location: The storage location of the variable
:param search_by: Criteria which is applied to the ``value_filter``
:param sort_by: Criteria which is used to sort the results
:param result_count: The number of characteristic values which may be returned
:return: A parsed response from the server containing the list of characteristic values
"""
# Check if the variable name is set correctly
if not variable_name or len(variable_name.strip()) == 0:
raise ValueError("The variable_name is a required parameter and may not be empty")
if not (1 <= len(variable_name.strip()) <= 15):
raise ValueError(
"The length of the variable_name may not exceed 15 characters "
"and may not be below 1 character"
)
if "*" in variable_name:
raise ValueError("The variable_name may not contain any wildcards (*)")
# Check the value filter
if value_filter and not (1 <= len(value_filter.strip()) <= 15):
raise ValueError(
"The length of the value_filter may not exceed 15 characters and "
"may not be below 1"
)
# Check the number of results returned
if not 1 <= result_count <= 2500:
raise ValueError(
"The number of results returned needs to be greater than 1, "
"but may not exceed 2500"
)
# Create the query parameter
_param = self._base_parameter | {
"name": variable_name,
"selection": value_filter,
"area": object_location.value,
"searchcriterion": search_by.value,
"sortcriterion": sort_by.value,
"pagelength": result_count,
}
# Build the url for the call
_url = self._service_url + "/values2variable"
# Make the call and await the response
return await tools.get_database_response(_url, _param)
async def variables(
self,
variable_filter: str,
object_location: enums.ObjectStorage = enums.ObjectStorage.ALL,
search_by: enums.GenericCriteria = enums.GenericCriteria.CODE,
sort_by: enums.GenericCriteria = enums.GenericCriteria.CODE,
variable_type: enums.VariableType = enums.VariableType.ALL,
result_count: int = 100,
) -> dict:
"""Get a list of variables matching the filter and object location
:param variable_filter: Identification Code of the variable [required, wildcards
allowed]
:param object_location: The storage location of the object [optional]
:param search_by: Criteria which is applied to the variable filter [optional]
:param sort_by: Criteria by which the result is sorted [optional]
:param variable_type: The type of variable [optional]
:param result_count: The number of results that may be returned [optional]
:return: A parsed response from the server containing the variables
"""
# Check if the filter is supplied correctly
if not variable_filter or len(variable_filter.strip()) == 0:
raise ValueError("The variable_filter is a required parameter any may not be empty")
if not (1 <= len(variable_filter.strip()) <= 6):
raise ValueError("The variable_filter may only contain up to 6 characters")
# Check if the result count is set properly
if not (1 <= result_count <= 2500):
raise ValueError("The number of possible results needs to be between 1 and 2500")
# Build the query parameters
_param = self._base_parameter | {
"selection": variable_filter,
"area": object_location.value,
"searchcriterion": search_by.value,
"sortcriterion": sort_by.value,
"type": variable_type.value,
"pagelength": result_count,
}
# Build the url
_url = self._service_url + "/variables"
# Return the parsed result
return await tools.get_database_response(_url, _param)
async def variables2statistic(
self,
statistic_name: str,
variable_filter: typing.Optional[str] = None,
object_location: enums.ObjectStorage = enums.ObjectStorage.ALL,
search_by: enums.GenericCriteria = enums.GenericCriteria.CODE,
sort_by: enums.GenericCriteria = enums.GenericCriteria.CODE,
variable_type: enums.VariableType = enums.VariableType.ALL,
result_count: int = 100,
) -> dict:
"""Get a list of variables related to the supplied statistic
:param statistic_name: The identification of the statistic [required]
:param variable_filter: Filter for the returned variables [optional, wildcards allowed]
:param object_location: Storage location which is used for the search [optional]
:param search_by: Criteria which is applied to the variable_filter [optional]
:param sort_by: Criteria specifying how the results are to be sorted [optional]
:param variable_type: The type of variables that shall be returned [optional]
:param result_count: Max. amount of results returned by the server [optional]
:return: A parsed response containing a list of variables
"""
# Check if the statistic_name is set correctly
if not statistic_name or len(statistic_name.strip()) == 0:
raise ValueError("The statistic_name is a required parameter")
if not (1 <= len(statistic_name.strip()) <= 15):
raise ValueError("The length of statistic_name may not exceed 15 characters")
if "*" in statistic_name:
raise ValueError("The statistic_name may not contain wildcards (*)")
# Check if the variable_filter is set correctly if set
if variable_filter and not (1 <= len(variable_filter.strip()) <= 6):
raise ValueError(
"The variable_filter may not exceed the length of 6 characters, "
"if it is supplied"
)
# Build the query parameters
_param = self._base_parameter | {
"name": statistic_name,
"selection": variable_filter,
"area": object_location.value,
"searchcriterion": search_by.value,
"sortcriterion": sort_by.value,
"type": variable_type.value,
"pagelength": result_count,
}
# Build the query path
_path = self._service_url + "/variables2statistic"
return await tools.get_database_response(_path, _param)
| 48.53187 | 98 | 0.636279 | 46,384 | 0.998687 | 0 | 0 | 0 | 0 | 43,836 | 0.943826 | 25,858 | 0.556745 |
89dffdff4ab352309290e6973585dc0df6b2adbd | 2,440 | py | Python | python/rsa_encrypt_decrypt.py | hipro/hipro | 49e8c9751109839fdef9a6bc812e3b92fdff7d4e | [
"Apache-2.0"
] | null | null | null | python/rsa_encrypt_decrypt.py | hipro/hipro | 49e8c9751109839fdef9a6bc812e3b92fdff7d4e | [
"Apache-2.0"
] | null | null | null | python/rsa_encrypt_decrypt.py | hipro/hipro | 49e8c9751109839fdef9a6bc812e3b92fdff7d4e | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""加密算法:公钥(私钥)加密,私钥解密"""
from Crypto.PublicKey import RSA
from Crypto import Random
DATA = 'Hello, word!'
PRIVATE_KEY_PEM = """-----BEGIN RSA PRIVATE KEY-----
MIICXQIBAAKBgQDB3c0nwVs6koPkpt6REeT07jK7m9qE9BmDw1Zl55T66rGfKM3g
1DFBq7jtcZ+xcgYAGgvJWPW16nylag/1lVNUxMShm2jlp3MwuBNKRvrXP2u29j9v
AAlM9lMLXzt0Ui4ZfLF9abpti5oD9tWy29Sp9Lt+0OWHKxp1QRazmykQeQIDAQAB
AoGAdL4FMcB9GFtscz+NXVyiPGBISrOCtndr+e2iVIFNNIAp8AcZWx9MfhhTpyC6
IpfgRyVoHZqldCO9Zbrl22RNpfybrP/2BeHx9xJWDXLXNAvDkZNCokCtc/bZYaQU
XCSYHUAmV078E0xZShwMwGu1YgZlz9er3XsqqBrT9ujDjIECQQDTOt+ukShtMJQd
6soNTA5+LU/kA+MKRB7oNPoviEMRRGeonD2ZXbjmzY6i1XJ/YsKPVuMkkvYCtPEY
KcvtCSApAkEA6vTMUBViRTr1Db63WBGpobAr9V8kiiMn6q2TuRBITsyijOgL6u+X
CrpRf+KDVyWC06ZHS/UFPPi+lubIgAU30QJAKtMp3HOTlaeer/4VHuMHoS9AnkLn
egJbncp32sEuj8almXqrxndI8IpGW98YipkURwlfnd+pvty+cJ6wuIr8GQJBAN/2
33cLGzSQ4ZzrigtqMr+Mlip8OfFvV5JtSR4kdjie+efFHe8h2WGBf0SfH8GHYTDt
FJNECW04Uzy22rKlxrECQQCtOkedu7SDr4tb3miKPNy5jyoVBRIR4QElE6DfZoDX
sxf4NowzBDwLbhYHNzSCl0xlIAA/xvFtRkEDtlYjq58n
-----END RSA PRIVATE KEY-----"""
PUBLIC_KEY_PEM = """-----BEGIN PUBLIC KEY-----
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDB3c0nwVs6koPkpt6REeT07jK7
m9qE9BmDw1Zl55T66rGfKM3g1DFBq7jtcZ+xcgYAGgvJWPW16nylag/1lVNUxMSh
m2jlp3MwuBNKRvrXP2u29j9vAAlM9lMLXzt0Ui4ZfLF9abpti5oD9tWy29Sp9Lt+
0OWHKxp1QRazmykQeQIDAQAB
-----END PUBLIC KEY-----"""
def _encrypt_by_public():
random_func = Random.new().read
public_key = RSA.importKey(PUBLIC_KEY_PEM)
encrypted = public_key.encrypt(DATA, random_func)
return encrypted
def _encrypt_by_private():
random_func = Random.new().read
private_key = RSA.importKey(PRIVATE_KEY_PEM)
encrypted = private_key.encrypt(DATA, random_func)
return encrypted
def _decrypt_by_private(msg_encrypt):
private_key = RSA.importKey(PRIVATE_KEY_PEM)
decrypted = private_key.decrypt(msg_encrypt)
return decrypted
def _decrypt_by_public_err(msg_encrypt):
"""无效"""
public_key = RSA.importKey(PUBLIC_KEY_PEM)
decrypted = public_key.decrypt(msg_encrypt)
return decrypted
if __name__ == '__main__':
print(DATA, _decrypt_by_private(_encrypt_by_public()))
print(DATA, _decrypt_by_private(_encrypt_by_private()))
try:
print(DATA, _decrypt_by_public_err(_encrypt_by_public()))
except TypeError as e1:
print(DATA, e1)
try:
print(DATA, _decrypt_by_public_err(_encrypt_by_private()))
except TypeError as e2:
print(DATA, e2)
| 35.362319 | 66 | 0.812295 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,276 | 0.515347 |
89e0907a323d313394f560697c26056210dde316 | 375 | py | Python | users/views.py | migleankstutyte/kaavapino | 1fd0b642a66f1ec7c61decf46433dc9f0bf3ed8e | [
"MIT"
] | 3 | 2019-02-07T14:47:00.000Z | 2022-02-15T14:09:38.000Z | users/views.py | migleankstutyte/kaavapino | 1fd0b642a66f1ec7c61decf46433dc9f0bf3ed8e | [
"MIT"
] | 74 | 2017-12-13T09:18:04.000Z | 2022-03-11T23:29:59.000Z | users/views.py | migleankstutyte/kaavapino | 1fd0b642a66f1ec7c61decf46433dc9f0bf3ed8e | [
"MIT"
] | 8 | 2017-12-13T09:31:20.000Z | 2022-02-15T13:10:34.000Z | from django.contrib.auth import get_user_model
from rest_framework import mixins
from rest_framework.viewsets import GenericViewSet
from users.serializers import UserSerializer
class UserViewSet(mixins.RetrieveModelMixin, mixins.ListModelMixin, GenericViewSet):
queryset = get_user_model().objects.all()
serializer_class = UserSerializer
lookup_field = "uuid"
| 31.25 | 84 | 0.824 | 194 | 0.517333 | 0 | 0 | 0 | 0 | 0 | 0 | 6 | 0.016 |
89e10d148c9fdea8e999e4febe67aedd65bd1469 | 59 | py | Python | interpreted/python.py | bupboi1337/Hello-World-Collection | 989a25cb2916a3fa0c8d5e21c7b857a0f89ab49f | [
"MIT"
] | 2 | 2021-12-03T10:49:15.000Z | 2021-12-03T17:28:17.000Z | interpreted/python.py | bupboi1337/Hello-World-Collection | 989a25cb2916a3fa0c8d5e21c7b857a0f89ab49f | [
"MIT"
] | null | null | null | interpreted/python.py | bupboi1337/Hello-World-Collection | 989a25cb2916a3fa0c8d5e21c7b857a0f89ab49f | [
"MIT"
] | null | null | null | print("Hello, World!")
print("This uses the MIT Licence!")
| 19.666667 | 35 | 0.694915 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 43 | 0.728814 |
89e1e0fdcc58fa3523c28ea0543829f7666a6db0 | 5,597 | py | Python | survae/tests/transforms/bijections/conditional/coupling/coupling_mixtures.py | alisiahkoohi/survae_flows | e1747b05524c7ab540a211ed360ab3e67bc3e96d | [
"MIT"
] | 262 | 2020-07-05T20:57:44.000Z | 2022-03-28T02:24:43.000Z | survae/tests/transforms/bijections/conditional/coupling/coupling_mixtures.py | alisiahkoohi/survae_flows | e1747b05524c7ab540a211ed360ab3e67bc3e96d | [
"MIT"
] | 17 | 2020-08-15T05:43:34.000Z | 2022-01-31T12:24:21.000Z | survae/tests/transforms/bijections/conditional/coupling/coupling_mixtures.py | alisiahkoohi/survae_flows | e1747b05524c7ab540a211ed360ab3e67bc3e96d | [
"MIT"
] | 35 | 2020-08-24T06:55:37.000Z | 2022-02-11T05:17:58.000Z | import numpy as np
import torch
import torch.nn as nn
import torchtestcase
import unittest
from survae.transforms.bijections.conditional.coupling import *
from survae.nn.layers import ElementwiseParams, ElementwiseParams2d, scale_fn
from survae.tests.transforms.bijections.conditional import ConditionalBijectionTest
class ConditionalGaussianMixtureCouplingBijectionTest(ConditionalBijectionTest):
def test_bijection_is_well_behaved(self):
num_mix = 8
batch_size = 10
elementwise_params = 3 * num_mix
self.eps = 5e-5
for shape in [(6,),
(6,4,4)]:
for num_condition in [None, 1]:
with self.subTest(shape=shape, num_condition=num_condition):
x = torch.randn(batch_size, *shape)
context = torch.randn(batch_size, *shape)
if num_condition is None:
if len(shape) == 1: net = nn.Sequential(nn.Linear(3+6,3*elementwise_params), ElementwiseParams(elementwise_params))
if len(shape) == 3: net = nn.Sequential(nn.Conv2d(3+6,3*elementwise_params, kernel_size=3, padding=1), ElementwiseParams2d(elementwise_params))
else:
if len(shape) == 1: net = nn.Sequential(nn.Linear(1+6,5*elementwise_params), ElementwiseParams(elementwise_params))
if len(shape) == 3: net = nn.Sequential(nn.Conv2d(1+6,5*elementwise_params, kernel_size=3, padding=1), ElementwiseParams2d(elementwise_params))
bijection = ConditionalGaussianMixtureCouplingBijection(net, num_mixtures=num_mix, num_condition=num_condition)
self.assert_bijection_is_well_behaved(bijection, x, context, z_shape=(batch_size, *shape))
z, _ = bijection.forward(x, context=context)
if num_condition is None:
self.assertEqual(x[:,:3], z[:,:3])
else:
self.assertEqual(x[:,:1], z[:,:1])
class ConditionalLogisticMixtureCouplingBijectionTest(ConditionalBijectionTest):
def test_bijection_is_well_behaved(self):
num_mix = 8
batch_size = 10
elementwise_params = 3 * num_mix
self.eps = 5e-5
for shape in [(6,),
(6,4,4)]:
for num_condition in [None, 1]:
with self.subTest(shape=shape, num_condition=num_condition):
x = torch.randn(batch_size, *shape)
context = torch.randn(batch_size, *shape)
if num_condition is None:
if len(shape) == 1: net = nn.Sequential(nn.Linear(3+6,3*elementwise_params), ElementwiseParams(elementwise_params))
if len(shape) == 3: net = nn.Sequential(nn.Conv2d(3+6,3*elementwise_params, kernel_size=3, padding=1), ElementwiseParams2d(elementwise_params))
else:
if len(shape) == 1: net = nn.Sequential(nn.Linear(1+6,5*elementwise_params), ElementwiseParams(elementwise_params))
if len(shape) == 3: net = nn.Sequential(nn.Conv2d(1+6,5*elementwise_params, kernel_size=3, padding=1), ElementwiseParams2d(elementwise_params))
bijection = ConditionalLogisticMixtureCouplingBijection(net, num_mixtures=num_mix, num_condition=num_condition)
self.assert_bijection_is_well_behaved(bijection, x, context, z_shape=(batch_size, *shape))
z, _ = bijection.forward(x, context=context)
if num_condition is None:
self.assertEqual(x[:,:3], z[:,:3])
else:
self.assertEqual(x[:,:1], z[:,:1])
class ConditionalCensoredLogisticMixtureCouplingBijectionTest(ConditionalBijectionTest):
def test_bijection_is_well_behaved(self):
num_bins = 16
num_mix = 8
batch_size = 10
elementwise_params = 3 * num_mix
self.eps = 1e-6
for shape in [(6,),
(6,4,4)]:
for num_condition in [None, 1]:
with self.subTest(shape=shape, num_condition=num_condition):
x = torch.rand(batch_size, *shape)
context = torch.randn(batch_size, *shape)
if num_condition is None:
if len(shape) == 1: net = nn.Sequential(nn.Linear(3+6,3*elementwise_params), ElementwiseParams(elementwise_params))
if len(shape) == 3: net = nn.Sequential(nn.Conv2d(3+6,3*elementwise_params, kernel_size=3, padding=1), ElementwiseParams2d(elementwise_params))
else:
if len(shape) == 1: net = nn.Sequential(nn.Linear(1+6,5*elementwise_params), ElementwiseParams(elementwise_params))
if len(shape) == 3: net = nn.Sequential(nn.Conv2d(1+6,5*elementwise_params, kernel_size=3, padding=1), ElementwiseParams2d(elementwise_params))
bijection = ConditionalCensoredLogisticMixtureCouplingBijection(net, num_mixtures=num_mix, num_bins=num_bins, num_condition=num_condition)
self.assert_bijection_is_well_behaved(bijection, x, context, z_shape=(batch_size, *shape))
z, _ = bijection.forward(x, context=context)
if num_condition is None:
self.assertEqual(x[:,:3], z[:,:3])
else:
self.assertEqual(x[:,:1], z[:,:1])
if __name__ == '__main__':
unittest.main()
| 55.415842 | 167 | 0.60586 | 5,222 | 0.933 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 0.001787 |
89e266ad4a5dbc97ce90b8acd022e9cd85eb5d18 | 408 | py | Python | shop_website/shop/migrations/0002_auto_20200228_1533.py | omar00070/django-shopping-website | af2741b900b60631349ea2e6de17586994e31680 | [
"MIT"
] | null | null | null | shop_website/shop/migrations/0002_auto_20200228_1533.py | omar00070/django-shopping-website | af2741b900b60631349ea2e6de17586994e31680 | [
"MIT"
] | null | null | null | shop_website/shop/migrations/0002_auto_20200228_1533.py | omar00070/django-shopping-website | af2741b900b60631349ea2e6de17586994e31680 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.3 on 2020-02-28 15:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shop', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='product',
name='photo',
field=models.ImageField(default='default.jpg', upload_to='product_image'),
),
]
| 21.473684 | 86 | 0.60049 | 315 | 0.772059 | 0 | 0 | 0 | 0 | 0 | 0 | 111 | 0.272059 |
89e27c59e9c3624f60d5be2d955f2c4ed96ee92e | 4,812 | py | Python | machines/migrations/0001_initial.py | minikdo/domino | 16ccc5b36c730c8bee223024e02b4984feedef26 | [
"Apache-2.0"
] | null | null | null | machines/migrations/0001_initial.py | minikdo/domino | 16ccc5b36c730c8bee223024e02b4984feedef26 | [
"Apache-2.0"
] | 1 | 2022-02-10T10:54:20.000Z | 2022-02-10T10:54:20.000Z | machines/migrations/0001_initial.py | minikdo/domino | 16ccc5b36c730c8bee223024e02b4984feedef26 | [
"Apache-2.0"
] | 1 | 2018-11-19T23:17:52.000Z | 2018-11-19T23:17:52.000Z | # Generated by Django 2.2.3 on 2019-07-21 01:32
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Device',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField(blank=True, null=True)),
('name', models.CharField(max_length=150, null=True)),
('price', models.DecimalField(blank=True, decimal_places=0, max_digits=5, null=True)),
('company', models.CharField(blank=True, max_length=150, null=True)),
('invoice', models.CharField(blank=True, max_length=150, null=True)),
],
),
migrations.CreateModel(
name='DeviceType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, null=True, unique=True)),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50, null=True)),
('address', models.CharField(max_length=150, null=True)),
],
),
migrations.CreateModel(
name='Machine',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('FQDN', models.CharField(blank=True, max_length=50, null=True)),
('date', models.DateField(blank=True, null=True)),
('form', models.CharField(blank=True, max_length=50, null=True)),
('bios', models.CharField(blank=True, max_length=50, null=True)),
('prod', models.CharField(blank=True, max_length=150, null=True)),
('vendor', models.CharField(blank=True, max_length=150, null=True)),
('OS', models.CharField(blank=True, max_length=150, null=True)),
('kernel', models.CharField(blank=True, max_length=150, null=True)),
('CPU', models.CharField(blank=True, max_length=150, null=True)),
('cores', models.CharField(blank=True, max_length=150, null=True)),
('arch', models.CharField(blank=True, max_length=150, null=True)),
('mem', models.CharField(blank=True, max_length=250, null=True)),
('HDD', models.CharField(blank=True, max_length=250, null=True)),
('disk', models.CharField(blank=True, max_length=250, null=True)),
('diskfree', models.CharField(blank=True, max_length=250, null=True)),
('IPs', models.CharField(blank=True, max_length=350, null=True)),
('gateway', models.CharField(blank=True, max_length=250, null=True)),
('gate_iface', models.CharField(blank=True, max_length=250, null=True)),
('location', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='machines.Location')),
],
),
migrations.CreateModel(
name='Service',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('description', models.CharField(max_length=300)),
('device', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='machines.Device')),
('machine', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='machines.Machine')),
],
),
migrations.AddField(
model_name='device',
name='location',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='machines.Location'),
),
migrations.AddField(
model_name='device',
name='machine',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='device', to='machines.Machine'),
),
migrations.AddField(
model_name='device',
name='type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='machines.DeviceType'),
),
]
| 50.125 | 151 | 0.579177 | 4,686 | 0.973815 | 0 | 0 | 0 | 0 | 0 | 0 | 550 | 0.114298 |
89e28e497194558b332c7d2514764dab8aee4b55 | 813 | py | Python | src/openweather/OpenWeatherParser.py | ralfe/wpi | ee077fb9fd1b2e8759e6195b3c36f24848eea6f5 | [
"MIT"
] | null | null | null | src/openweather/OpenWeatherParser.py | ralfe/wpi | ee077fb9fd1b2e8759e6195b3c36f24848eea6f5 | [
"MIT"
] | null | null | null | src/openweather/OpenWeatherParser.py | ralfe/wpi | ee077fb9fd1b2e8759e6195b3c36f24848eea6f5 | [
"MIT"
] | null | null | null | __author__ = 'renderle'
class OpenWeatherParser:
def __init__(self, data):
self.data = data
def getValueFor(self, idx):
return self.data['list'][idx]
def getTemperature(self):
earlymorningValue = self.getValueFor(0)['main']['temp_max']
morningValue = self.getValueFor(1)['main']['temp_max']
mixedTemp = (earlymorningValue + 2*morningValue)/3
return mixedTemp;
def getCloudFactor(self):
earlymorningValue = self.getValueFor(0)['clouds']['all']
morningValue = self.getValueFor(1)['clouds']['all']
mixedCloudFactor = (earlymorningValue + 3*morningValue)/4
return mixedCloudFactor
def getOverallWeatherCondition(self):
morningValue = self.getValueFor(1)['weather'][0]['id']
return morningValue
| 30.111111 | 67 | 0.654367 | 785 | 0.96556 | 0 | 0 | 0 | 0 | 0 | 0 | 87 | 0.107011 |
89e34b71011f62cacdee9a2e71e2e2ea742bcf2b | 8,300 | py | Python | docs/make_docs.py | yacth/autogoal | a55c1534161e850587e2ca3533aa2fd5ae28569e | [
"MIT"
] | null | null | null | docs/make_docs.py | yacth/autogoal | a55c1534161e850587e2ca3533aa2fd5ae28569e | [
"MIT"
] | null | null | null | docs/make_docs.py | yacth/autogoal | a55c1534161e850587e2ca3533aa2fd5ae28569e | [
"MIT"
] | null | null | null | # Convert examples in this folder to their corresponding .md files in docs/examples
import re
import inspect
import textwrap
import datetime
import yaml
from pathlib import Path
def hide(line):
return ":hide:" in line
def build_examples():
current = Path(__file__)
folder = current.parent
for fname in folder.rglob("*.py"):
if fname.name.startswith("_"):
continue
if fname.name == current.name:
continue
process(fname)
class Markdown:
def __init__(self, content):
while content:
if not content[0].strip():
content.pop(0)
else:
break
while content:
if not content[-1].strip():
content.pop()
else:
break
self.content = content
def print(self, fp):
for line in self.content:
if line.startswith("# "):
fp.write(line[2:])
else:
fp.write("\n")
fp.write("\n")
class Python(Markdown):
def print(self, fp):
if not self.content:
return
fp.write("```python\n")
for line in self.content:
fp.write(line)
fp.write("```\n\n")
def process(fname: Path):
print(fname)
content = []
with fname.open("r") as fp:
current = []
state = "markdown"
for line in fp:
if hide(line):
continue
if line.startswith("#"):
if state == "python":
if current:
content.append(Python(current))
current = []
state = "markdown"
current.append(line)
else:
if state == "markdown":
if current:
content.append(Markdown(current))
current = []
state = "python"
current.append(line)
if current:
if state == "markdown":
content.append(Markdown(current))
else:
content.append(Python(current))
output = fname.parent / (fname.name[:-3] + ".md")
with output.open("w") as fp:
for c in content:
c.print(fp)
def build_api():
import autogoal
import autogoal.contrib
import autogoal.datasets
import autogoal.grammar
import autogoal.kb
import autogoal.ml
import autogoal.sampling
import autogoal.search
index = []
generate(autogoal, index)
lines = yaml.dump(index)
with open(Path(__file__).parent.parent / "mkdocs-base.yml", "r") as fr:
with open(Path(__file__).parent.parent / "mkdocs.yml", "w") as fw:
for line in fr:
fw.write(line)
fw.write(" - API:\n")
for line in lines.splitlines():
fw.write(f" {line}\n")
def generate(module, index, visited=set()):
name = module.__name__
if name in visited:
return
visited.add(name)
print(name)
path = Path(__file__).parent / "api" / (name + ".md")
submodules = inspect.getmembers(
module,
lambda m: inspect.ismodule(m)
and m.__name__.startswith("autogoal")
and not "._" in m.__name__,
)
classes = inspect.getmembers(
module,
lambda m: inspect.isclass(m)
and m.__module__.startswith(module.__name__)
and not m.__name__.startswith("_"),
)
functions = inspect.getmembers(
module,
lambda m: inspect.isfunction(m)
and m.__module__.startswith(module.__name__)
and not m.__name__.startswith("_"),
)
members_index = [{"Index": f"api/{name}.md"}]
index.append({name: members_index})
with open(path, "w") as fp:
generate_module(module, name, fp)
if submodules:
fp.write("\n## Submodules\n\n")
for _, submodule in submodules:
fp.write(f"* [{submodule.__name__}](../{submodule.__name__}/)\n")
generate(submodule, index)
if classes:
fp.write("\n## Classes\n\n")
for _, clss in classes:
generate_class(clss, name, fp)
members_index.append({clss.__name__: f"api/{name}.{clss.__name__}.md"})
if functions:
fp.write("\n## Functions\n\n")
for _, func in functions:
generate_func(func, name, fp)
members_index.append({func.__name__: f"api/{name}.{func.__name__}.md"})
def format_param(p: inspect.Parameter) -> str:
if p.default != p.empty:
return f"{p.name}={repr(p.default)}"
if p.kind == inspect.Parameter.VAR_POSITIONAL:
return f"*{p.name}"
if p.kind == inspect.Parameter.VAR_KEYWORD:
return f"**{p.name}"
return f"{p.name}"
def format_signature(obj, name=None) -> str:
if name is None:
name = obj.__name__
signature = inspect.signature(obj)
params = ", ".join(format_param(p) for p in signature.parameters.values())
return f"{name}({params})"
def generate_class(clss, name, fp):
print(name, clss)
doc = inspect.getdoc(clss)
fp.write(f"### [`{clss.__name__}`](../{name}.{clss.__name__})\n")
if doc:
fp.write(f"> {doc.splitlines()[0]}\n\n")
fp = open(Path(__file__).parent / "api" / f"{name}.{clss.__name__}.md", "w")
fp.write(f"# `{name}.{clss.__name__}`\n\n")
src = inspect.getsourcefile(clss)
if src:
line = inspect.getsourcelines(clss)[1]
src = src.replace(
"/usr/lib/python3/dist-packages/",
"https://github.com/autogal/autogoal/blob/main/",
)
src_link = f"> [📝]({src}#L{line})\n"
fp.write(src_link)
fp.write(f"> `{format_signature(clss.__init__, clss.__name__)}`\n\n")
if doc:
fp.write(doc)
fp.write("\n")
members = inspect.getmembers(
clss, lambda m: inspect.isfunction(m) and not m.__name__.startswith("_")
)
for _, member in members:
generate_func(member, name, fp, indent="###", new_file=False)
def generate_func(func, name, fp, indent="###", new_file=True):
print(name, func)
doc = inspect.getdoc(func)
if new_file:
fp.write(f"{indent} [`{func.__name__}`](../{name}.{func.__name__})\n")
if doc:
fp.write(f"> {doc.splitlines()[0]}\n\n")
fp = open(Path(__file__).parent / "api" / f"{name}.{func.__name__}.md", "w")
fp.write(f"# `{name}.{func.__name__}`\n\n")
else:
fp.write(f"{indent} `{func.__name__}`\n\n")
src = inspect.getsourcefile(func)
if src:
line = inspect.getsourcelines(func)[1]
src = src.replace(
"/usr/lib/python3/dist-packages/",
"https://github.com/autogoal/autogoal/blob/main/",
)
src_link = f"> [📝]({src}#L{line})\n"
fp.write(src_link)
fp.write(f"> `{format_signature(func)}`\n\n")
if doc:
fp.write(doc)
fp.write("\n")
if new_file:
fp.close()
def generate_module(module, name, fp):
doc = module.__doc__
fp.write(f"# `{module.__name__}`\n")
if doc is not None:
fp.write(doc)
def build_schemas():
from autogoal.kb._data import draw_data_hierarchy
draw_data_hierarchy(str(Path(__file__).parent / "guide" / "datatypes"))
def make_algorithms_table():
from autogoal.contrib import find_classes
all_classes = find_classes()
with open(Path(__file__).parent / "guide" / "algorithms.md", "w") as fp:
fp.write(textwrap.dedent(
"""
|Algorithm|Dependencies|Input|Output|
|--|--|--|--|
"""
))
for clss in all_classes:
print(clss)
signature = inspect.signature(clss.run)
dependency = clss.__module__.split('.')[2]
if dependency.startswith('_'):
dependency = ""
fp.write(f"| {clss.__name__} | {dependency} | {signature.parameters['input'].annotation} | {signature.return_annotation} | \n")
if __name__ == "__main__":
build_examples()
build_schemas()
make_algorithms_table()
build_api()
| 25.696594 | 139 | 0.546145 | 773 | 0.093065 | 0 | 0 | 0 | 0 | 0 | 0 | 1,573 | 0.189381 |
89e6cd7ae91131db466757761efa7d854f285ccc | 21,017 | py | Python | MVMOO/multi_mixed_optimiser.py | jmanson377/MVMOO | 0e3d5ca5c1a9dd0fb9ff949e8e0d0448ae78a535 | [
"Apache-2.0"
] | 5 | 2021-07-10T10:22:14.000Z | 2022-03-28T05:16:21.000Z | MVMOO/multi_mixed_optimiser.py | jmanson377/MVMOO | 0e3d5ca5c1a9dd0fb9ff949e8e0d0448ae78a535 | [
"Apache-2.0"
] | 2 | 2021-01-19T15:28:32.000Z | 2021-08-16T14:21:47.000Z | MVMOO/multi_mixed_optimiser.py | jmanson377/MVMOO | 0e3d5ca5c1a9dd0fb9ff949e8e0d0448ae78a535 | [
"Apache-2.0"
] | 1 | 2022-03-28T05:16:25.000Z | 2022-03-28T05:16:25.000Z | import numpy as np
from scipy.stats import norm
from .mixed_optimiser import MVO
from scipy.optimize import shgo, differential_evolution, dual_annealing
import scipy as stats
class MVMOO(MVO):
"""
Multi variate mixed variable optimisation
"""
def __init__(self, input_dim=1, num_qual=0, num_obj=2, bounds=None, k_type='matern3', dist='manhattan', scale='bounds'):
"""
Initialisation of the class
"""
super().__init__(input_dim=input_dim, num_qual=num_qual, bounds=bounds, dist=dist, k_type=k_type)
self.num_obj = num_obj
self.scale = scale
def generatemodels(self, X, Y, scale=True, variance=1.0):
"""
Generate a list containing the models for each of the objectives
"""
self.nsamples, nobj = np.shape(Y)
models = []
if scale is True:
self.Yscaled = self.scaley(Y)
self.Xscaled = self.scaleX(X,mode=self.scale)
for i in range(nobj):
self.fitmodel(self.Xscaled, self.Yscaled[:,i].reshape((-1,1)), variance=variance)
models.append(self.model)
return models
for i in range(nobj):
self.fitmodel(X, Y[:,i].reshape((-1,1)))
models.append(self.model)
return models
def is_pareto_efficient(self, costs, return_mask = True):
"""
Find the pareto-efficient points for minimisation problem
:param costs: An (n_points, n_costs) array
:param return_mask: True to return a mask
:return: An array of indices of pareto-efficient points.
If return_mask is True, this will be an (n_points, ) boolean array
Otherwise it will be a (n_efficient_points, ) integer array of indices.
"""
is_efficient = np.arange(costs.shape[0])
n_points = costs.shape[0]
next_point_index = 0 # Next index in the is_efficient array to search for
while next_point_index<len(costs):
nondominated_point_mask = np.any(costs<costs[next_point_index], axis=1)
nondominated_point_mask[next_point_index] = True
is_efficient = is_efficient[nondominated_point_mask] # Remove dominated points
costs = costs[nondominated_point_mask]
next_point_index = np.sum(nondominated_point_mask[:next_point_index])+1
if return_mask:
is_efficient_mask = np.zeros(n_points, dtype = bool)
is_efficient_mask[is_efficient] = True
return is_efficient_mask
else:
return is_efficient
def paretofront(self, Y):
"""
Return an array of the pareto front for the system, set up for a minimising
"""
ind = self.is_pareto_efficient(Y, return_mask=False)
return Y[ind,:]
def EIM(self, X, mode='euclidean'):
"""
Calculate the expected improvment matrix for a candidate point
@ARTICLE{7908974,
author={D. {Zhan} and Y. {Cheng} and J. {Liu}},
journal={IEEE Transactions on Evolutionary Computation},
title={Expected Improvement Matrix-Based Infill Criteria for Expensive Multiobjective Optimization},
year={2017},
volume={21},
number={6},
pages={956-975},
doi={10.1109/TEVC.2017.2697503},
ISSN={1089-778X},
month={Dec}}
"""
f = self.currentfront
nfx = np.shape(f)[0]
nobj = np.shape(f)[1]
nx = np.shape(X)[0]
r = 1.1 * np.ones((1, nobj))
y = np.zeros((nx, 1))
ulist = []
varlist = []
X = self.scaleX(X, mode='bounds')
for iobj in range(nobj):
u, var = self.models[iobj].predict_y(X)
ulist.append(u)
varlist.append(var)
u = np.concatenate(ulist, axis=1)
var = np.concatenate(varlist, axis=1)
std = np.sqrt(np.maximum(0,var))
u_matrix = np.reshape(u.T,(1,nobj,nx)) * np.ones((nfx,1,1))
s_matrix = np.reshape(std.T,(1,nobj,nx)) * np.ones((nfx,1,1))
f_matrix = f.reshape((nfx,nobj,1)) * np.ones((1,1,nx))
Z_matrix = (f_matrix - u_matrix) / s_matrix
EI_matrix = np.multiply((f_matrix - u_matrix), norm.cdf(Z_matrix)) + np.multiply(s_matrix, norm.pdf(Z_matrix))
if mode == 'euclidean':
y = np.min(np.sqrt(np.sum(EI_matrix**2,axis=1)),axis=0).reshape(-1,1)
elif mode == 'hypervolume':
y = np.min(np.prod(r.reshape(1,2,1) - f_matrix + EI_matrix, axis=1) - np.prod(r - f, axis=1).reshape((-1,1)),axis=0).reshape((-1,1))
elif mode == 'maxmin':
y = np.min(np.max(EI_matrix,axis=1),axis=0).reshape(-1,1)
elif mode == 'combine':
y = np.min(np.sqrt(np.sum(EI_matrix**2,axis=1)),axis=0).reshape(-1,1) +\
np.min(np.prod(r.reshape(1,2,1) - f_matrix + EI_matrix, axis=1) - \
np.prod(r - f, axis=1).reshape((-1,1)),axis=0).reshape((-1,1))
else:
y1 = np.min(np.sqrt(np.sum(EI_matrix**2,axis=1)),axis=0).reshape(-1,1)
y2 = np.min(np.prod(r.reshape(1,2,1) - f_matrix + EI_matrix, axis=1) - np.prod(r - f, axis=1).reshape((-1,1)),axis=0).reshape((-1,1))
#y3 = np.min(np.max(EI_matrix,axis=1),axis=0).reshape(-1,1)
return np.hstack((y1,y2))
return y
def CEIM_Hypervolume(self, X):
"""
Calculate the expected improvment matrix for a candidate point, given constraints
@ARTICLE{7908974,
author={D. {Zhan} and Y. {Cheng} and J. {Liu}},
journal={IEEE Transactions on Evolutionary Computation},
title={Expected Improvement Matrix-Based Infill Criteria for Expensive Multiobjective Optimization},
year={2017},
volume={21},
number={6},
pages={956-975},
doi={10.1109/TEVC.2017.2697503},
ISSN={1089-778X},
month={Dec}}
"""
f = self.currentfront
nobj = np.shape(f)[1]
nx = np.shape(X)[0]
r = 1.1 * np.ones((1, nobj))
y = np.zeros((nx, 1))
ulist = []
varlist = []
for iobj in range(nobj):
u, var = self.models[iobj].predict_y(X)
ulist.append(u)
varlist.append(var)
u = np.concatenate(ulist, axis=1)
var = np.concatenate(varlist, axis=1)
std = np.sqrt(np.maximum(0,var))
for ix in range(nx):
Z = (f - u[ix,:]) / std[ix,:]
EIM = np.multiply((f - u[ix,:]), norm.cdf(Z)) + np.multiply(std[ix,:], norm.pdf(Z))
y[ix] = np.min(np.prod(r - f + EIM, axis=1) - np.prod(r - f, axis=1))
# Constraints
ncon = len(self.constrainedmodels)
uconlist = []
varconlist = []
for iobj in range(ncon):
ucon, varcon = self.constrainedmodels[iobj].predict_y(X)
uconlist.append(ucon)
varconlist.append(varcon)
ucon = np.concatenate(uconlist, axis=1)
varcon = np.concatenate(varconlist, axis=1)
stdcon = np.sqrt(np.maximum(0,varcon))
PoF = np.prod(norm.cdf((0 - ucon) / stdcon), axis=1).reshape(-1,1)
return y * PoF
def AEIM_Hypervolume(self, X):
"""
Calculate the adaptive expected improvment matrix for a candidate point
Adaptive addition based on https://arxiv.org/pdf/1807.01279.pdf
"""
f = self.currentfront
c = self.contextual
nfx = np.shape(f)[0]
nobj = np.shape(f)[1]
nx = np.shape(X)[0]
r = 1.1 * np.ones((1, nobj))
y = np.zeros((nx, 1))
ulist = []
varlist = []
for iobj in range(nobj):
u, var = self.models[iobj].predict_y(X)
ulist.append(u)
varlist.append(var)
u = np.concatenate(ulist, axis=1)
var = np.concatenate(varlist, axis=1)
std = np.sqrt(np.maximum(0,var))
u_matrix = np.reshape(u.T,(1,nobj,nx)) * np.ones((nfx,1,1))
s_matrix = np.reshape(std.T,(1,nobj,nx)) * np.ones((nfx,1,1))
f_matrix = f.reshape((nfx,nobj,1)) * np.ones((1,1,nx))
c_matrix = c.reshape((nfx,nobj,1)) * np.ones((1,1,nx))
Z_matrix = (f_matrix - u_matrix - c_matrix) / s_matrix
EI_matrix = np.multiply((f_matrix - u_matrix), norm.cdf(Z_matrix)) + np.multiply(s_matrix, norm.pdf(Z_matrix))
y = np.min(np.prod(r.reshape(1,2,1) - f_matrix + EI_matrix, axis=1) - np.prod(r - f, axis=1).reshape((-1,1)),axis=0).reshape((-1,1))
#for ix in range(nx):
# Z = (f - u[ix,:] - c) / std[ix,:]
# EIM = np.multiply((f - u[ix,:]), norm.cdf(Z)) + np.multiply(std[ix,:], norm.pdf(Z))
# y[ix] = np.min(np.prod(r - f + EIM, axis=1) - np.prod(r - f, axis=1))
return y
def AEIM_Euclidean(self, X):
"""
Calculate the expected improvment matrix for a candidate point
@ARTICLE{7908974,
author={D. {Zhan} and Y. {Cheng} and J. {Liu}},
journal={IEEE Transactions on Evolutionary Computation},
title={Expected Improvement Matrix-Based Infill Criteria for Expensive Multiobjective Optimization},
year={2017},
volume={21},
number={6},
pages={956-975},
doi={10.1109/TEVC.2017.2697503},
ISSN={1089-778X},
month={Dec}}
"""
f = self.currentfront
c = self.contextual
nfx = np.shape(f)[0]
nobj = np.shape(f)[1]
nx = np.shape(X)[0]
y = np.zeros((nx, 1))
ulist = []
varlist = []
X = self.scaleX(X, mode='bounds')
for iobj in range(nobj):
u, var = self.models[iobj].predict_f(X)
ulist.append(u)
varlist.append(var)
u = np.concatenate(ulist, axis=1)
var = np.concatenate(varlist, axis=1)
std = np.sqrt(np.maximum(0,var))
u_matrix = np.reshape(u.T,(1,nobj,nx)) * np.ones((nfx,1,1))
s_matrix = np.reshape(std.T,(1,nobj,nx)) * np.ones((nfx,1,1))
f_matrix = f.reshape((nfx,nobj,1)) * np.ones((1,1,nx))
c_matrix = c.reshape((nfx,nobj,1)) * np.ones((1,1,nx))
Z_matrix = (f_matrix - u_matrix - c_matrix) / s_matrix
EI_matrix = np.multiply((f_matrix - u_matrix), norm.cdf(Z_matrix)) + np.multiply(s_matrix, norm.pdf(Z_matrix))
y = np.min(np.sqrt(np.sum(EI_matrix**2,axis=1)),axis=0).reshape(-1,1)
return y
def EIMoptimiserWrapper(self, Xcont, Xqual, constraints=False, mode='euclidean'):
X = np.concatenate((Xcont.reshape((1,-1)), Xqual.reshape((1,-1))), axis=1)
if constraints is not False:
return -self.CEIM_Hypervolume(X)
return -self.EIM(X,mode).reshape(-1)
def AEIMoptimiserWrapper(self, Xcont, Xqual, constraints=False):
X = np.concatenate((Xcont.reshape((1,-1)), Xqual.reshape((1,-1))), axis=1)
return -self.AEIM_Euclidean(X).reshape(-1)
def EIMmixedoptimiser(self, constraints, algorithm='Random Local', values=None, mode='euclidean'):
"""
Optimise EI search whole domain
"""
if algorithm == 'Random':
Xsamples = self.sample_design(samples=10000, design='halton')
if constraints is False:
fvals = self.EIM(Xsamples, mode=mode)
else:
fvals = self.CEIM_Hypervolume(Xsamples)
fmax = np.amax(fvals)
indymax = np.argmax(fvals)
xmax = Xsamples[indymax,:]
if values is None:
return fmax, xmax
return fmax, xmax, fvals, Xsamples
elif algorithm == 'Random Local':
Xsamples = self.sample_design(samples=10000, design='halton')
if constraints is False:
fvals = self.EIM(Xsamples, mode=mode)
else:
fvals = self.CEIM_Hypervolume(Xsamples)
if mode == 'all':
fmax = np.max(fvals,axis=0)
print(fvals.shape)
print(fmax.shape)
indmax = np.argmax(fvals,axis=0)
print(indmax)
xmax = Xsamples[indmax,:]
qual = xmax[:,-self.num_qual:].reshape(-1)
bnd = list(self.bounds[:,:self.num_quant].T)
bndlist = []
for element in bnd:
bndlist.append(tuple(element))
modes = ['euclidean', 'hypervolume']
results = []
for i in range(2):
results.append(stats.optimize.minimize(self.EIMoptimiserWrapper, xmax[i,:-self.num_qual].reshape(-1), args=(qual[i],constraints,modes[i]), bounds=bndlist,method='SLSQP'))
xmax = np.concatenate((results[0].x, qual[0]),axis=None)
xmax = np.vstack((xmax,np.concatenate((results[1].x, qual[1]),axis=None)))
fmax = np.array((results[0].fun,results[1].fun))
return fmax, xmax
fmax = np.amax(fvals)
indymax = np.argmax(fvals)
xmax = Xsamples[indymax,:]
qual = xmax[-self.num_qual:]
bnd = list(self.bounds[:,:self.num_quant].T)
bndlist = []
for element in bnd:
bndlist.append(tuple(element))
result = stats.optimize.minimize(self.EIMoptimiserWrapper, xmax[:-self.num_qual].reshape(-1), args=(qual,constraints,mode), bounds=bndlist,method='SLSQP')
if values is None:
return result.fun, np.concatenate((result.x, qual),axis=None)
return fmax, xmax, fvals, Xsamples
else:
raise NotImplementedError()
def AEIMmixedoptimiser(self, constraints, algorithm='Random', values=None):
# Get estimate for mean variance of model using halton sampling
X = self.sample_design(samples=10000, design='halton')
X = self.scaleX(X, mode='bounds')
varlist = []
for iobj in range(self.num_obj):
_ , var = self.models[iobj].predict_y(X)
varlist.append(var)
var = np.concatenate(varlist, axis=1)
meanvar = np.mean(var,axis=0)
f = self.currentfront
self.contextual = np.divide(meanvar, f)
# Optimise acquisition
if algorithm == 'Random':
Xsamples = self.sample_design(samples=10000, design='halton')
fvals = self.AEIM_Hypervolume(Xsamples)
fmax = np.amax(fvals)
indymax = np.argmax(fvals)
xmax = Xsamples[indymax,:]
if values is None:
return fmax, xmax
return fmax, xmax, fvals, Xsamples
elif algorithm == 'Random Local':
Xsamples = self.sample_design(samples=10000, design='halton')
if constraints is False:
fvals = self.AEIM_Euclidean(Xsamples)
else:
raise NotImplementedError()
fmax = np.amax(fvals)
indymax = np.argmax(fvals)
xmax = Xsamples[indymax,:]
qual = xmax[-self.num_qual:]
bnd = list(self.bounds[:,:self.num_quant].T)
bndlist = []
for element in bnd:
bndlist.append(tuple(element))
result = stats.optimize.minimize(self.AEIMoptimiserWrapper, xmax[:-self.num_qual].reshape(-1), args=(qual,constraints), bounds=bndlist,method='SLSQP')
if values is None:
return result.fun, np.concatenate((result.x, qual),axis=None)
return fmax, xmax, fvals, Xsamples
elif algorithm == 'SHGO':
if self.num_qual < 1:
bnd = list(self.bounds.T)
bndlist = []
for element in bnd:
bndlist.append(tuple(element))
result = shgo(self.AEIM_Hypervolume,bndlist, sampling_method='sobol', n=30, iters=2)
return result.x, result.fun
else:
sample = self.sample_design(samples=1, design='random')
contbnd = list(self.bounds[:,:self.num_quant].T)
contbndlist = []
qual = sample[:,-self.num_qual:]
for element in contbnd:
contbndlist.append(tuple(element))
resXstore = []
resFstore = []
for i in range(np.shape(qual)[0]):
result = shgo(self.AEIMoptimiserWrapper, contbndlist, args=(qual[i,:]), sampling_method='sobol', n=30, iters=2)
resXstore.append(result.x)
resFstore.append(result.fun)
# sort for each discrete combination and get best point
ind = resFstore.index(min(resFstore))
xmax = np.concatenate((resXstore[ind],qual[ind,:]))
fval = min(resFstore)
return fval, xmax
elif algorithm == 'DE':
if self.num_qual < 1:
bnd = list(self.bounds.T)
bndlist = []
for element in bnd:
bndlist.append(tuple(element))
result = differential_evolution(self.AEIM_Hypervolume,bndlist)
return result.x, result.fun
else:
sample = self.sample_design(samples=1, design='random')
contbnd = list(self.bounds[:,:self.num_quant].T)
contbndlist = []
qual = sample[:,-self.num_qual:]
for element in contbnd:
contbndlist.append(tuple(element))
resXstore = []
resFstore = []
for i in range(np.shape(qual)[0]):
result = dual_annealing(self.AEIMoptimiserWrapper, contbndlist, args=(qual[i,:]))
resXstore.append(result.x)
resFstore.append(result.fun)
# sort for each discrete combination and get best point
ind = resFstore.index(min(resFstore))
xmax = np.concatenate((resXstore[ind],qual[ind,:]))
fval = min(resFstore)
return fval, xmax
return
def multinextcondition(self, X, Y, constraints=False, values=None, method='EIM', mode='euclidean'):
"""
Suggest the next condition for evaluation
"""
if constraints is False:
try:
self.k_type = 'matern3'
self.models = self.generatemodels(X, Y)
except:
print('Initial model optimisation failed, retrying with new kernel')
try:
self.k_type = 'matern5'
self.models = self.generatemodels(X, Y)
except:
print('Model optimisation failed, retrying with new value of variance')
for variance in [0.1,1,2,10]:
try:
self.models = self.generatemodels(X, Y, variance=variance)
except:
print('Model optimisation failed, retrying with new value of variance')
self.currentfront = self.paretofront(self.Yscaled)
means = []
for model in self.models:
mean, _ = model.predict_y(self.sample_design(samples=2, design='halton'))
means.append(mean.numpy())
if np.any(means == np.nan):
print("Retraining model with new starting variance")
self.models = self.generatemodels(X, Y, variance=0.1)
if method == 'AEIM':
fmax, xmax = self.AEIMmixedoptimiser(constraints, algorithm='Random Local')
else:
fmax, xmax = self.EIMmixedoptimiser(constraints, algorithm='Random Local',mode=mode)
if values is None and mode != 'all':
return xmax.reshape(1,-1), fmax
elif values is None and mode == 'all':
if np.allclose(xmax[0,:],xmax[1,:], rtol=1e-3, atol=1e-5):
return xmax[0,:].reshape(1,-1), fmax[0]
return np.unique(xmax.round(6),axis=0), fmax
self.models = self.generatemodels(X,Y)
self.currentfront = self.paretofront(self.Yscaled)
self.constrainedmodels = self.generatemodels(X, constraints, scale=False)
fmax, xmax = self.EIMmixedoptimiser(constraints, algorithm='Simplical')
if values is None:
return xmax.reshape(1,-1), fmax | 37.664875 | 190 | 0.538136 | 20,841 | 0.991626 | 0 | 0 | 0 | 0 | 0 | 0 | 3,935 | 0.187229 |
89e8f1e4189f0729dfe92491a135e0998857cfdb | 2,306 | py | Python | chainer/links/connection/mgu.py | Qwinpin/chainer | 1dca01bc8a1aceec6ee53a66d24970b203a9fc51 | [
"MIT"
] | 1 | 2019-02-12T23:10:16.000Z | 2019-02-12T23:10:16.000Z | chainer/links/connection/mgu.py | nolfwin/chainer | 8d776fcc1e848cb9d3800a6aab356eb91ae9d088 | [
"MIT"
] | 1 | 2018-06-26T08:16:09.000Z | 2018-06-26T08:16:09.000Z | chainer/links/connection/mgu.py | nolfwin/chainer | 8d776fcc1e848cb9d3800a6aab356eb91ae9d088 | [
"MIT"
] | 1 | 2018-05-28T22:43:34.000Z | 2018-05-28T22:43:34.000Z | import numpy
import chainer
from chainer.backends import cuda
from chainer.functions.activation import sigmoid
from chainer.functions.activation import tanh
from chainer.functions.array import concat
from chainer.functions.math import linear_interpolate
from chainer import link
from chainer.links.connection import linear
class MGUBase(link.Chain):
def __init__(self, n_inputs, n_units):
super(MGUBase, self).__init__()
with self.init_scope():
self.W_f = linear.Linear(n_inputs + n_units, n_units)
self.W_h = linear.Linear(n_inputs + n_units, n_units)
def _call_mgu(self, h, x):
f = sigmoid.sigmoid(self.W_f(concat.concat([h, x])))
h_bar = tanh.tanh(self.W_h(concat.concat([f * h, x])))
h_new = linear_interpolate.linear_interpolate(f, h_bar, h)
return h_new
class StatelessMGU(MGUBase):
forward = MGUBase._call_mgu
class StatefulMGU(MGUBase):
def __init__(self, in_size, out_size):
super(StatefulMGU, self).__init__(in_size, out_size)
self._state_size = out_size
self.reset_state()
def _to_device(self, device, skip_between_cupy_devices=False):
# Overrides Link._to_device
# TODO(niboshi): Avoid forcing concrete links to override _to_device
device = chainer.get_device(device)
super(StatefulMGU, self)._to_device(
device, skip_between_cupy_devices=skip_between_cupy_devices)
if self.h is not None:
if not (skip_between_cupy_devices
and device.xp is cuda.cupy
and isinstance(self.h, cuda.ndarray)):
self.h.to_device(device)
return self
def set_state(self, h):
assert isinstance(h, chainer.Variable)
h_ = h
if self.xp is numpy:
h_.to_cpu()
else:
h_.to_gpu()
self.h = h_
def reset_state(self):
self.h = None
def forward(self, x):
if self.h is None:
n_batch = x.shape[0]
dtype = chainer.get_dtype()
h_data = self.xp.zeros(
(n_batch, self._state_size), dtype=dtype)
h = chainer.Variable(h_data)
else:
h = self.h
self.h = self._call_mgu(h, x)
return self.h
| 29.948052 | 76 | 0.630095 | 1,973 | 0.855594 | 0 | 0 | 0 | 0 | 0 | 0 | 95 | 0.041197 |
89e93dde7c1c5cc0955c1c9e582b41a9104efdc2 | 2,920 | py | Python | backend/api/v1/rest.py | aroraenterprise/projecteos | e1fb0438af8cb59b77792523c6616c480b23a6f8 | [
"MIT"
] | null | null | null | backend/api/v1/rest.py | aroraenterprise/projecteos | e1fb0438af8cb59b77792523c6616c480b23a6f8 | [
"MIT"
] | null | null | null | backend/api/v1/rest.py | aroraenterprise/projecteos | e1fb0438af8cb59b77792523c6616c480b23a6f8 | [
"MIT"
] | null | null | null | """
Project: flask-rest
Author: Saj Arora
Description: Initializes the rest app
"""
from collections import OrderedDict
import flask
from api.v1 import Api
class _SageRest(flask.Flask):
_api = None
__modules = []
_modules = {}
_ordered_modules = OrderedDict()
_auth_module = None
def __init__(self, name, config, version, base_url):
super(_SageRest, self).__init__(name)
self.name = name
self.config.from_object(config or {})
# init flask_restful
self._api = Api(self, version, base_url)
def get_api(self):
return self._api
def set_auth(self, auth_module):
self._auth_module = auth_module
self.__modules.append(auth_module)
def add_modules(self, modules=None):
modules = self.__modules + modules or []
for module in modules:
self._modules[module.name] = module
self._sort_module_dependencies()
for name, module in self._ordered_modules.iteritems():
module.link(self.get_api(), auth=self._auth_module)
def _sort_module_dependencies(self):
for module_name, module in self._modules.iteritems():
if module_name in self._ordered_modules: # already loaded dependencies for this module
continue
self._load_dependencies(module)
# add to ordered modules list
self._ordered_modules[module_name] = module
def _load_dependencies(self, module, dependent_modules=None):
dependent_modules = dependent_modules or []
for dependency_name in module.get_dependencies():
# fetch the dependency:
if not dependency_name in self._modules:
raise Exception('%s module not found as an import for this app. Please create or register '
'this module during intialization to continue.' % dependency_name)
dependency = self._modules.get(dependency_name)
if dependency.name in self._ordered_modules: # already loaded, awesome
continue
# check for circular depencies
if dependency_name in dependent_modules:
raise Exception('%s is a dependency of %s which in turn '
'itself is a dependency of %s. Please fix this circular '
'dependency.' % (
dependency_name, module.name, dependency_name
))
# not a circular dependency, add current dependency to the dependent_modules
_dependent_modules = dependent_modules or [] + [dependency.name]
# load all dependencies of the current dependency
self._load_dependencies(dependency, _dependent_modules)
# add to ordered modules list
self._ordered_modules[dependency.name] = dependency | 36.962025 | 107 | 0.626027 | 2,760 | 0.945205 | 0 | 0 | 0 | 0 | 0 | 0 | 642 | 0.219863 |
89ea4df004678d25f396f7be142ad28d35ef4d77 | 1,575 | py | Python | torchreid/losses/log_euclid_loss.py | fremigereau/MTDA_KD_REID | 354645990c9ff0e1eff363ddddccd0d208b08269 | [
"MIT"
] | null | null | null | torchreid/losses/log_euclid_loss.py | fremigereau/MTDA_KD_REID | 354645990c9ff0e1eff363ddddccd0d208b08269 | [
"MIT"
] | null | null | null | torchreid/losses/log_euclid_loss.py | fremigereau/MTDA_KD_REID | 354645990c9ff0e1eff363ddddccd0d208b08269 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchreid.metrics import compute_distance_matrix
import scipy.linalg
def adjoint(A, E, f):
A_H = A.T.conj().to(E.dtype)
n = A.size(0)
M = torch.zeros(2*n, 2*n, dtype=E.dtype, device=E.device)
M[:n, :n] = A_H
M[n:, n:] = A_H
M[:n, n:] = E
return f(M)[:n, n:].to(A.dtype)
def logm_scipy(A):
return torch.from_numpy(scipy.linalg.logm(A.cpu(), disp=False)[0]).to(A.device)
class Logm(torch.autograd.Function):
@staticmethod
def forward(ctx, A):
assert len(A.shape) == 2 and A.shape[0] == A.shape[1] # Square matrix
assert A.dtype in (torch.float32, torch.float64, torch.complex64, torch.complex128)
ctx.save_for_backward(A)
return logm_scipy(A)
@staticmethod
def backward(ctx, G):
A, = ctx.saved_tensors
return adjoint(A, G, logm_scipy)
def logm(x):
return Logm.apply(x)
class LogEuclidLoss(nn.Module):
def __init__(self, use_gpu=True, log=False):
super(LogEuclidLoss, self).__init__()
self.use_gpu = use_gpu
self.log = log
def forward(self, feats_source, feats_target):
dist_s = compute_distance_matrix(feats_source,feats_source,metric='cosine')
dist_t = compute_distance_matrix(feats_target,feats_target,metric='cosine')
if self.log:
dist_s = logm(dist_s)
dist_t = logm(dist_t)
loss = torch.norm(dist_s - dist_t)
return loss
| 28.125 | 91 | 0.652063 | 985 | 0.625397 | 0 | 0 | 382 | 0.24254 | 0 | 0 | 31 | 0.019683 |
89ebaa5775c0dea38d5246fe1024840283181c4b | 4,460 | py | Python | app/rockband/tests/test_member_api.py | solattila/rock-band-api | 1521b2913b75c53310ba1b71d77d599966237483 | [
"MIT"
] | null | null | null | app/rockband/tests/test_member_api.py | solattila/rock-band-api | 1521b2913b75c53310ba1b71d77d599966237483 | [
"MIT"
] | null | null | null | app/rockband/tests/test_member_api.py | solattila/rock-band-api | 1521b2913b75c53310ba1b71d77d599966237483 | [
"MIT"
] | null | null | null | from django.contrib.auth import get_user_model
from django.urls import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Member, Band
from rockband.serializers import MemberSerializer
MEMBERS_URL = reverse('rockband:member-list')
class PublicMembersApiTests(TestCase):
"""
Test the publicly available ingredients API
"""
def setUp(self):
self.client = APIClient()
def test_login_required(self):
"""
Test that login is required to access the endpoint
:return:
"""
res = self.client.get(MEMBERS_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateMemberApiTests(TestCase):
"""
Test the private member API
"""
def setUp(self):
self.client = APIClient()
self.user = get_user_model().objects.create_user(
'test@rockbanddev.com',
'testpass'
)
self.client.force_authenticate(self.user)
def test_retrieve_member_list(self):
"""
Test retrieving a list of members
:return:
"""
Member.objects.create(user=self.user, name='Hendrix')
Member.objects.create(user=self.user, name='Satriani')
res = self.client.get(MEMBERS_URL)
members = Member.objects.all().order_by('-name')
serializer = MemberSerializer(members, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_members_limited_to_user(self):
"""
Test that members for the authenticated user are returned
:return:
"""
user2 = get_user_model().objects.create_user(
'other@rockbanddev.com',
'testpass'
)
Member.objects.create(user=user2, name='Lemmy')
member = Member.objects.create(user=self.user, name='Elvis')
res = self.client.get(MEMBERS_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data[0]['name'], member.name)
def test_create_member_successful(self):
"""
Test create a new ingredient
:return:
"""
payload = {'name': 'Petrucci'}
self.client.post(MEMBERS_URL, payload)
exists = Member.objects.filter(
user=self.user,
name=payload['name'],
).exists()
self.assertTrue(exists)
def test_create_member_invalid(self):
"""
Test creating invalid member fails
:return:
"""
payload = {'name': ''}
res = self.client.post(MEMBERS_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_retrieve_members_assigned_to_bands(self):
"""
Test filtering members by those assigned to bands
:return:
"""
member1 = Member.objects.create(
user=self.user, name='Joakim'
)
member2 = Member.objects.create(
user=self.user, name='Tony'
)
band = Band.objects.create(
title='Sabaton',
band_members=5,
tickets=55.5,
user=self.user
)
band.members.add(member1)
res = self.client.get(MEMBERS_URL, {'assigned_only': 1})
serializer1 = MemberSerializer(member1)
serializer2 = MemberSerializer(member2)
self.assertIn(serializer1.data, res.data)
self.assertNotIn(serializer2.data, res.data)
def Test_retrieve_members_assigned_unique(self):
"""
Test filtering members by assigned returns unique items
:return:
"""
member = Member.objects.create(
user=self.user, name='Joakim'
)
Member.objects.create(
user=self.user, name='Tony'
)
band1 = Band.objects.create(
title='Sabaton',
band_members=5,
tickets=55.5,
user=self.user
)
band1.members.add(member)
band2 = Band.objects.create(
title='Sonata',
band_members=5,
tickets=45.5,
user=self.user
)
band2.members.add(member)
res = self.client.get(MEMBERS_URL, {'assigned_only': 1})
self.assertEqual(len(res.data), 1)
| 28.050314 | 71 | 0.602018 | 4,128 | 0.925561 | 0 | 0 | 0 | 0 | 0 | 0 | 942 | 0.211211 |
89ebb0adb00564e2ea1a6c39febb2b498caf378e | 372 | py | Python | bot.py | bufgix/slave | 9c71035d06e1a714db43ac7c4503a807f0baa185 | [
"MIT"
] | 8 | 2019-06-16T17:03:28.000Z | 2021-10-10T19:48:34.000Z | bot.py | bufgix/slave | 9c71035d06e1a714db43ac7c4503a807f0baa185 | [
"MIT"
] | 4 | 2019-08-22T23:28:07.000Z | 2021-02-02T22:15:29.000Z | bot.py | bufgix/slave | 9c71035d06e1a714db43ac7c4503a807f0baa185 | [
"MIT"
] | 2 | 2021-04-02T21:00:15.000Z | 2021-04-03T07:02:26.000Z | from slave.playground.bots import BotInformation
from slave.lib.bots import BotBasic, BotV2
config = {
'host': 'chat.freenode.net',
'port': 6667,
'channel': "#slavebotpool666",
'boss_name': 'boss666',
'bot_prefix': "SLAVEBOT"
}
BotInformation.read_config_from_dict(config)
BotInformation.use_other_bot_commands(BotV2)
BotInformation.start(safe=True) | 24.8 | 48 | 0.741935 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 100 | 0.268817 |
89ec0f6ca81160f0a6c25c125bae56759feda8ef | 363 | py | Python | Desafios-intermediarios-em-Python/Crescente e Decrescente.py | Alexsandramaran/Desafios-Intermedi-rios-Python | 5013953d4d5b051bf5f9009141f1e1a61d9798f4 | [
"MIT"
] | null | null | null | Desafios-intermediarios-em-Python/Crescente e Decrescente.py | Alexsandramaran/Desafios-Intermedi-rios-Python | 5013953d4d5b051bf5f9009141f1e1a61d9798f4 | [
"MIT"
] | null | null | null | Desafios-intermediarios-em-Python/Crescente e Decrescente.py | Alexsandramaran/Desafios-Intermedi-rios-Python | 5013953d4d5b051bf5f9009141f1e1a61d9798f4 | [
"MIT"
] | null | null | null | X = []
Y = []
cont = 0
n = True
while n:
a,b = input().split(" ")
a = int(a)
b = int(b)
if a == b:
n = False
cont-=1
else:
X.append(a)
Y.append(b)
cont+=1
i = 0
while i < cont:
if X[i] > Y[i]:
print('Decrescente')
elif X[i] < Y[i]:
print('Crescente')
i+=1 | 15.125 | 29 | 0.380165 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 0.07438 |
89ecf59e43b45e631155311a80e6f746b263deaf | 1,482 | py | Python | utils/generate-sha256.py | dskrvk/anteater | bce2dba767d661e9245da9dd662fec49554b374f | [
"Apache-2.0"
] | 177 | 2018-03-28T18:50:26.000Z | 2022-02-27T00:36:58.000Z | utils/generate-sha256.py | dskrvk/anteater | bce2dba767d661e9245da9dd662fec49554b374f | [
"Apache-2.0"
] | 30 | 2016-08-20T08:58:57.000Z | 2018-02-01T16:56:54.000Z | utils/generate-sha256.py | dskrvk/anteater | bce2dba767d661e9245da9dd662fec49554b374f | [
"Apache-2.0"
] | 8 | 2019-06-28T07:45:57.000Z | 2021-11-13T06:06:36.000Z | ##############################################################################
# Copyright (c) 2017 Luke Hinds <lhinds@redhat.com>, Red Hat
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
# python generate-sha256.py --project /home/user/opnfv/infra
# output made to working directory, file `output.yaml`
import os
import sys
import hashlib
import argparse
from binaryornot.check import is_binary
hasher = hashlib.sha256()
parser = argparse.ArgumentParser()
parser.add_argument('--project', help="Full path to project folder",
required=True)
args = parser.parse_args()
ignore_dirs = ['.git']
sys.stdout = open('output.yaml', 'w')
print("binaries:")
for root, dirs, files in os.walk(args.project):
dirs[:] = [d for d in dirs if d not in ignore_dirs]
for file in files:
full_path = os.path.join(root, file)
if is_binary(full_path):
with open(full_path, 'rb') as afile:
buf = afile.read()
hasher.update(buf)
split_path = full_path.split(args.project + '/', 1)[-1]
print(" {}:".format(split_path))
sum = hasher.hexdigest()
print(" - {}".format(sum))
| 36.146341 | 78 | 0.579622 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 667 | 0.450067 |